Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,194 b'' | |||||
|
1 | # | |||
|
2 | # This file is autogenerated by pip-compile | |||
|
3 | # To update, run: | |||
|
4 | # | |||
|
5 | # pip-compile --generate-hashes --output-file=contrib/automation/linux-requirements-py3.5.txt contrib/automation/linux-requirements.txt.in | |||
|
6 | # | |||
|
7 | astroid==2.4.2 \ | |||
|
8 | --hash=sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703 \ | |||
|
9 | --hash=sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386 | |||
|
10 | # via pylint | |||
|
11 | docutils==0.17.1 \ | |||
|
12 | --hash=sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125 \ | |||
|
13 | --hash=sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61 | |||
|
14 | # via -r contrib/automation/linux-requirements.txt.in | |||
|
15 | fuzzywuzzy==0.18.0 \ | |||
|
16 | --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8 \ | |||
|
17 | --hash=sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993 | |||
|
18 | # via -r contrib/automation/linux-requirements.txt.in | |||
|
19 | idna==3.1 \ | |||
|
20 | --hash=sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16 \ | |||
|
21 | --hash=sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1 | |||
|
22 | # via yarl | |||
|
23 | isort==4.3.21 \ | |||
|
24 | --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \ | |||
|
25 | --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd | |||
|
26 | # via | |||
|
27 | # -r contrib/automation/linux-requirements.txt.in | |||
|
28 | # pylint | |||
|
29 | lazy-object-proxy==1.4.3 \ | |||
|
30 | --hash=sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d \ | |||
|
31 | --hash=sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449 \ | |||
|
32 | --hash=sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08 \ | |||
|
33 | --hash=sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a \ | |||
|
34 | --hash=sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50 \ | |||
|
35 | --hash=sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd \ | |||
|
36 | --hash=sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239 \ | |||
|
37 | --hash=sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb \ | |||
|
38 | --hash=sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea \ | |||
|
39 | --hash=sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e \ | |||
|
40 | --hash=sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156 \ | |||
|
41 | --hash=sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142 \ | |||
|
42 | --hash=sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442 \ | |||
|
43 | --hash=sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62 \ | |||
|
44 | --hash=sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db \ | |||
|
45 | --hash=sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531 \ | |||
|
46 | --hash=sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383 \ | |||
|
47 | --hash=sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a \ | |||
|
48 | --hash=sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357 \ | |||
|
49 | --hash=sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4 \ | |||
|
50 | --hash=sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0 | |||
|
51 | # via astroid | |||
|
52 | mccabe==0.6.1 \ | |||
|
53 | --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ | |||
|
54 | --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f | |||
|
55 | # via pylint | |||
|
56 | multidict==5.0.2 \ | |||
|
57 | --hash=sha256:060d68ae3e674c913ec41a464916f12c4d7ff17a3a9ebbf37ba7f2c681c2b33e \ | |||
|
58 | --hash=sha256:06f39f0ddc308dab4e5fa282d145f90cd38d7ed75390fc83335636909a9ec191 \ | |||
|
59 | --hash=sha256:17847fede1aafdb7e74e01bb34ab47a1a1ea726e8184c623c45d7e428d2d5d34 \ | |||
|
60 | --hash=sha256:1cd102057b09223b919f9447c669cf2efabeefb42a42ae6233f25ffd7ee31a79 \ | |||
|
61 | --hash=sha256:20cc9b2dd31761990abff7d0e63cd14dbfca4ebb52a77afc917b603473951a38 \ | |||
|
62 | --hash=sha256:2576e30bbec004e863d87216bc34abe24962cc2e964613241a1c01c7681092ab \ | |||
|
63 | --hash=sha256:2ab9cad4c5ef5c41e1123ed1f89f555aabefb9391d4e01fd6182de970b7267ed \ | |||
|
64 | --hash=sha256:359ea00e1b53ceef282232308da9d9a3f60d645868a97f64df19485c7f9ef628 \ | |||
|
65 | --hash=sha256:3e61cc244fd30bd9fdfae13bdd0c5ec65da51a86575ff1191255cae677045ffe \ | |||
|
66 | --hash=sha256:43c7a87d8c31913311a1ab24b138254a0ee89142983b327a2c2eab7a7d10fea9 \ | |||
|
67 | --hash=sha256:4a3f19da871befa53b48dd81ee48542f519beffa13090dc135fffc18d8fe36db \ | |||
|
68 | --hash=sha256:4df708ef412fd9b59b7e6c77857e64c1f6b4c0116b751cb399384ec9a28baa66 \ | |||
|
69 | --hash=sha256:59182e975b8c197d0146a003d0f0d5dc5487ce4899502061d8df585b0f51fba2 \ | |||
|
70 | --hash=sha256:6128d2c0956fd60e39ec7d1c8f79426f0c915d36458df59ddd1f0cff0340305f \ | |||
|
71 | --hash=sha256:6168839491a533fa75f3f5d48acbb829475e6c7d9fa5c6e245153b5f79b986a3 \ | |||
|
72 | --hash=sha256:62abab8088704121297d39c8f47156cb8fab1da731f513e59ba73946b22cf3d0 \ | |||
|
73 | --hash=sha256:653b2bbb0bbf282c37279dd04f429947ac92713049e1efc615f68d4e64b1dbc2 \ | |||
|
74 | --hash=sha256:6566749cd78cb37cbf8e8171b5cd2cbfc03c99f0891de12255cf17a11c07b1a3 \ | |||
|
75 | --hash=sha256:76cbdb22f48de64811f9ce1dd4dee09665f84f32d6a26de249a50c1e90e244e0 \ | |||
|
76 | --hash=sha256:8efcf070d60fd497db771429b1c769a3783e3a0dd96c78c027e676990176adc5 \ | |||
|
77 | --hash=sha256:8fa4549f341a057feec4c3139056ba73e17ed03a506469f447797a51f85081b5 \ | |||
|
78 | --hash=sha256:9380b3f2b00b23a4106ba9dd022df3e6e2e84e1788acdbdd27603b621b3288df \ | |||
|
79 | --hash=sha256:9ed9b280f7778ad6f71826b38a73c2fdca4077817c64bc1102fdada58e75c03c \ | |||
|
80 | --hash=sha256:a7b8b5bd16376c8ac2977748bd978a200326af5145d8d0e7f799e2b355d425b6 \ | |||
|
81 | --hash=sha256:af271c2540d1cd2a137bef8d95a8052230aa1cda26dd3b2c73d858d89993d518 \ | |||
|
82 | --hash=sha256:b561e76c9e21402d9a446cdae13398f9942388b9bff529f32dfa46220af54d00 \ | |||
|
83 | --hash=sha256:b82400ef848bbac6b9035a105ac6acaa1fb3eea0d164e35bbb21619b88e49fed \ | |||
|
84 | --hash=sha256:b98af08d7bb37d3456a22f689819ea793e8d6961b9629322d7728c4039071641 \ | |||
|
85 | --hash=sha256:c58e53e1c73109fdf4b759db9f2939325f510a8a5215135330fe6755921e4886 \ | |||
|
86 | --hash=sha256:cbabfc12b401d074298bfda099c58dfa5348415ae2e4ec841290627cb7cb6b2e \ | |||
|
87 | --hash=sha256:d4a6fb98e9e9be3f7d70fd3e852369c00a027bd5ed0f3e8ade3821bcad257408 \ | |||
|
88 | --hash=sha256:d99da85d6890267292065e654a329e1d2f483a5d2485e347383800e616a8c0b1 \ | |||
|
89 | --hash=sha256:e58db0e0d60029915f7fc95a8683fa815e204f2e1990f1fb46a7778d57ca8c35 \ | |||
|
90 | --hash=sha256:e5bf89fe57f702a046c7ec718fe330ed50efd4bcf74722940db2eb0919cddb1c \ | |||
|
91 | --hash=sha256:f612e8ef8408391a4a3366e3508bab8ef97b063b4918a317cb6e6de4415f01af \ | |||
|
92 | --hash=sha256:f65a2442c113afde52fb09f9a6276bbc31da71add99dc76c3adf6083234e07c6 \ | |||
|
93 | --hash=sha256:fa0503947a99a1be94f799fac89d67a5e20c333e78ddae16e8534b151cdc588a | |||
|
94 | # via yarl | |||
|
95 | pyflakes==2.3.1 \ | |||
|
96 | --hash=sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3 \ | |||
|
97 | --hash=sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db | |||
|
98 | # via -r contrib/automation/linux-requirements.txt.in | |||
|
99 | pygments==2.9.0 \ | |||
|
100 | --hash=sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f \ | |||
|
101 | --hash=sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e | |||
|
102 | # via -r contrib/automation/linux-requirements.txt.in | |||
|
103 | pylint==2.6.2 \ | |||
|
104 | --hash=sha256:718b74786ea7ed07aa0c58bf572154d4679f960d26e9641cc1de204a30b87fc9 \ | |||
|
105 | --hash=sha256:e71c2e9614a4f06e36498f310027942b0f4f2fde20aebb01655b31edc63b9eaf | |||
|
106 | # via -r contrib/automation/linux-requirements.txt.in | |||
|
107 | python-levenshtein==0.12.2 \ | |||
|
108 | --hash=sha256:dc2395fbd148a1ab31090dd113c366695934b9e85fe5a4b2a032745efd0346f6 | |||
|
109 | # via -r contrib/automation/linux-requirements.txt.in | |||
|
110 | pyyaml==5.3.1 \ | |||
|
111 | --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ | |||
|
112 | --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ | |||
|
113 | --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ | |||
|
114 | --hash=sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e \ | |||
|
115 | --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ | |||
|
116 | --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ | |||
|
117 | --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ | |||
|
118 | --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ | |||
|
119 | --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ | |||
|
120 | --hash=sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a \ | |||
|
121 | --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ | |||
|
122 | --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ | |||
|
123 | --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a | |||
|
124 | # via vcrpy | |||
|
125 | six==1.16.0 \ | |||
|
126 | --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ | |||
|
127 | --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 | |||
|
128 | # via | |||
|
129 | # astroid | |||
|
130 | # vcrpy | |||
|
131 | toml==0.10.2 \ | |||
|
132 | --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ | |||
|
133 | --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f | |||
|
134 | # via pylint | |||
|
135 | typed-ast==1.4.3 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \ | |||
|
136 | --hash=sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace \ | |||
|
137 | --hash=sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff \ | |||
|
138 | --hash=sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266 \ | |||
|
139 | --hash=sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528 \ | |||
|
140 | --hash=sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6 \ | |||
|
141 | --hash=sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808 \ | |||
|
142 | --hash=sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4 \ | |||
|
143 | --hash=sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363 \ | |||
|
144 | --hash=sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341 \ | |||
|
145 | --hash=sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04 \ | |||
|
146 | --hash=sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41 \ | |||
|
147 | --hash=sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e \ | |||
|
148 | --hash=sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3 \ | |||
|
149 | --hash=sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899 \ | |||
|
150 | --hash=sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805 \ | |||
|
151 | --hash=sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c \ | |||
|
152 | --hash=sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c \ | |||
|
153 | --hash=sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39 \ | |||
|
154 | --hash=sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a \ | |||
|
155 | --hash=sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3 \ | |||
|
156 | --hash=sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7 \ | |||
|
157 | --hash=sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f \ | |||
|
158 | --hash=sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075 \ | |||
|
159 | --hash=sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0 \ | |||
|
160 | --hash=sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40 \ | |||
|
161 | --hash=sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428 \ | |||
|
162 | --hash=sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927 \ | |||
|
163 | --hash=sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3 \ | |||
|
164 | --hash=sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f \ | |||
|
165 | --hash=sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65 | |||
|
166 | # via | |||
|
167 | # -r contrib/automation/linux-requirements.txt.in | |||
|
168 | # astroid | |||
|
169 | vcrpy==4.1.1 \ | |||
|
170 | --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \ | |||
|
171 | --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 | |||
|
172 | # via -r contrib/automation/linux-requirements.txt.in | |||
|
173 | wrapt==1.12.1 \ | |||
|
174 | --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 | |||
|
175 | # via | |||
|
176 | # astroid | |||
|
177 | # vcrpy | |||
|
178 | yarl==1.3.0 \ | |||
|
179 | --hash=sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9 \ | |||
|
180 | --hash=sha256:2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f \ | |||
|
181 | --hash=sha256:3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb \ | |||
|
182 | --hash=sha256:3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320 \ | |||
|
183 | --hash=sha256:5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842 \ | |||
|
184 | --hash=sha256:73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0 \ | |||
|
185 | --hash=sha256:7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829 \ | |||
|
186 | --hash=sha256:b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310 \ | |||
|
187 | --hash=sha256:c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4 \ | |||
|
188 | --hash=sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8 \ | |||
|
189 | --hash=sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1 | |||
|
190 | # via vcrpy | |||
|
191 | ||||
|
192 | # WARNING: The following packages were not pinned, but pip requires them to be | |||
|
193 | # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. | |||
|
194 | # setuptools |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100644 |
|
NO CONTENT: new file 100644 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100755 |
|
NO CONTENT: new file 100755 | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: new file 100755 |
|
NO CONTENT: new file 100755 | ||
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,1325 +1,1335 b'' | |||||
1 | # aws.py - Automation code for Amazon Web Services |
|
1 | # aws.py - Automation code for Amazon Web Services | |
2 | # |
|
2 | # | |
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | # no-check-code because Python 3 native. |
|
8 | # no-check-code because Python 3 native. | |
9 |
|
9 | |||
10 | import contextlib |
|
10 | import contextlib | |
11 | import copy |
|
11 | import copy | |
12 | import hashlib |
|
12 | import hashlib | |
13 | import json |
|
13 | import json | |
14 | import os |
|
14 | import os | |
15 | import pathlib |
|
15 | import pathlib | |
16 | import subprocess |
|
16 | import subprocess | |
17 | import time |
|
17 | import time | |
18 |
|
18 | |||
19 | import boto3 |
|
19 | import boto3 | |
20 | import botocore.exceptions |
|
20 | import botocore.exceptions | |
21 |
|
21 | |||
22 | from .linux import BOOTSTRAP_DEBIAN |
|
22 | from .linux import BOOTSTRAP_DEBIAN | |
23 | from .ssh import ( |
|
23 | from .ssh import ( | |
24 | exec_command as ssh_exec_command, |
|
24 | exec_command as ssh_exec_command, | |
25 | wait_for_ssh, |
|
25 | wait_for_ssh, | |
26 | ) |
|
26 | ) | |
27 | from .winrm import ( |
|
27 | from .winrm import ( | |
28 | run_powershell, |
|
28 | run_powershell, | |
29 | wait_for_winrm, |
|
29 | wait_for_winrm, | |
30 | ) |
|
30 | ) | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | SOURCE_ROOT = pathlib.Path( |
|
33 | SOURCE_ROOT = pathlib.Path( | |
34 | os.path.abspath(__file__) |
|
34 | os.path.abspath(__file__) | |
35 | ).parent.parent.parent.parent |
|
35 | ).parent.parent.parent.parent | |
36 |
|
36 | |||
37 | INSTALL_WINDOWS_DEPENDENCIES = ( |
|
37 | INSTALL_WINDOWS_DEPENDENCIES = ( | |
38 | SOURCE_ROOT / 'contrib' / 'install-windows-dependencies.ps1' |
|
38 | SOURCE_ROOT / 'contrib' / 'install-windows-dependencies.ps1' | |
39 | ) |
|
39 | ) | |
40 |
|
40 | |||
41 |
|
41 | |||
42 | INSTANCE_TYPES_WITH_STORAGE = { |
|
42 | INSTANCE_TYPES_WITH_STORAGE = { | |
43 | 'c5d', |
|
43 | 'c5d', | |
44 | 'd2', |
|
44 | 'd2', | |
45 | 'h1', |
|
45 | 'h1', | |
46 | 'i3', |
|
46 | 'i3', | |
47 | 'm5ad', |
|
47 | 'm5ad', | |
48 | 'm5d', |
|
48 | 'm5d', | |
49 | 'r5d', |
|
49 | 'r5d', | |
50 | 'r5ad', |
|
50 | 'r5ad', | |
51 | 'x1', |
|
51 | 'x1', | |
52 | 'z1d', |
|
52 | 'z1d', | |
53 | } |
|
53 | } | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | AMAZON_ACCOUNT_ID = '801119661308' |
|
56 | AMAZON_ACCOUNT_ID = '801119661308' | |
57 | DEBIAN_ACCOUNT_ID = '379101102735' |
|
57 | DEBIAN_ACCOUNT_ID = '379101102735' | |
58 | DEBIAN_ACCOUNT_ID_2 = '136693071363' |
|
58 | DEBIAN_ACCOUNT_ID_2 = '136693071363' | |
59 | UBUNTU_ACCOUNT_ID = '099720109477' |
|
59 | UBUNTU_ACCOUNT_ID = '099720109477' | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | WINDOWS_BASE_IMAGE_NAME = 'Windows_Server-2019-English-Full-Base-*' |
|
62 | WINDOWS_BASE_IMAGE_NAME = 'Windows_Server-2019-English-Full-Base-*' | |
63 |
|
63 | |||
64 |
|
64 | |||
65 | KEY_PAIRS = { |
|
65 | KEY_PAIRS = { | |
66 | 'automation', |
|
66 | 'automation', | |
67 | } |
|
67 | } | |
68 |
|
68 | |||
69 |
|
69 | |||
70 | SECURITY_GROUPS = { |
|
70 | SECURITY_GROUPS = { | |
71 | 'linux-dev-1': { |
|
71 | 'linux-dev-1': { | |
72 | 'description': 'Mercurial Linux instances that perform build/test automation', |
|
72 | 'description': 'Mercurial Linux instances that perform build/test automation', | |
73 | 'ingress': [ |
|
73 | 'ingress': [ | |
74 | { |
|
74 | { | |
75 | 'FromPort': 22, |
|
75 | 'FromPort': 22, | |
76 | 'ToPort': 22, |
|
76 | 'ToPort': 22, | |
77 | 'IpProtocol': 'tcp', |
|
77 | 'IpProtocol': 'tcp', | |
78 | 'IpRanges': [ |
|
78 | 'IpRanges': [ | |
79 | { |
|
79 | { | |
80 | 'CidrIp': '0.0.0.0/0', |
|
80 | 'CidrIp': '0.0.0.0/0', | |
81 | 'Description': 'SSH from entire Internet', |
|
81 | 'Description': 'SSH from entire Internet', | |
82 | }, |
|
82 | }, | |
83 | ], |
|
83 | ], | |
84 | }, |
|
84 | }, | |
85 | ], |
|
85 | ], | |
86 | }, |
|
86 | }, | |
87 | 'windows-dev-1': { |
|
87 | 'windows-dev-1': { | |
88 | 'description': 'Mercurial Windows instances that perform build automation', |
|
88 | 'description': 'Mercurial Windows instances that perform build automation', | |
89 | 'ingress': [ |
|
89 | 'ingress': [ | |
90 | { |
|
90 | { | |
91 | 'FromPort': 22, |
|
91 | 'FromPort': 22, | |
92 | 'ToPort': 22, |
|
92 | 'ToPort': 22, | |
93 | 'IpProtocol': 'tcp', |
|
93 | 'IpProtocol': 'tcp', | |
94 | 'IpRanges': [ |
|
94 | 'IpRanges': [ | |
95 | { |
|
95 | { | |
96 | 'CidrIp': '0.0.0.0/0', |
|
96 | 'CidrIp': '0.0.0.0/0', | |
97 | 'Description': 'SSH from entire Internet', |
|
97 | 'Description': 'SSH from entire Internet', | |
98 | }, |
|
98 | }, | |
99 | ], |
|
99 | ], | |
100 | }, |
|
100 | }, | |
101 | { |
|
101 | { | |
102 | 'FromPort': 3389, |
|
102 | 'FromPort': 3389, | |
103 | 'ToPort': 3389, |
|
103 | 'ToPort': 3389, | |
104 | 'IpProtocol': 'tcp', |
|
104 | 'IpProtocol': 'tcp', | |
105 | 'IpRanges': [ |
|
105 | 'IpRanges': [ | |
106 | { |
|
106 | { | |
107 | 'CidrIp': '0.0.0.0/0', |
|
107 | 'CidrIp': '0.0.0.0/0', | |
108 | 'Description': 'RDP from entire Internet', |
|
108 | 'Description': 'RDP from entire Internet', | |
109 | }, |
|
109 | }, | |
110 | ], |
|
110 | ], | |
111 | }, |
|
111 | }, | |
112 | { |
|
112 | { | |
113 | 'FromPort': 5985, |
|
113 | 'FromPort': 5985, | |
114 | 'ToPort': 5986, |
|
114 | 'ToPort': 5986, | |
115 | 'IpProtocol': 'tcp', |
|
115 | 'IpProtocol': 'tcp', | |
116 | 'IpRanges': [ |
|
116 | 'IpRanges': [ | |
117 | { |
|
117 | { | |
118 | 'CidrIp': '0.0.0.0/0', |
|
118 | 'CidrIp': '0.0.0.0/0', | |
119 | 'Description': 'PowerShell Remoting (Windows Remote Management)', |
|
119 | 'Description': 'PowerShell Remoting (Windows Remote Management)', | |
120 | }, |
|
120 | }, | |
121 | ], |
|
121 | ], | |
122 | }, |
|
122 | }, | |
123 | ], |
|
123 | ], | |
124 | }, |
|
124 | }, | |
125 | } |
|
125 | } | |
126 |
|
126 | |||
127 |
|
127 | |||
128 | IAM_ROLES = { |
|
128 | IAM_ROLES = { | |
129 | 'ephemeral-ec2-role-1': { |
|
129 | 'ephemeral-ec2-role-1': { | |
130 | 'description': 'Mercurial temporary EC2 instances', |
|
130 | 'description': 'Mercurial temporary EC2 instances', | |
131 | 'policy_arns': [ |
|
131 | 'policy_arns': [ | |
132 | 'arn:aws:iam::aws:policy/service-role/AmazonEC2RoleforSSM', |
|
132 | 'arn:aws:iam::aws:policy/service-role/AmazonEC2RoleforSSM', | |
133 | ], |
|
133 | ], | |
134 | }, |
|
134 | }, | |
135 | } |
|
135 | } | |
136 |
|
136 | |||
137 |
|
137 | |||
138 | ASSUME_ROLE_POLICY_DOCUMENT = ''' |
|
138 | ASSUME_ROLE_POLICY_DOCUMENT = ''' | |
139 | { |
|
139 | { | |
140 | "Version": "2012-10-17", |
|
140 | "Version": "2012-10-17", | |
141 | "Statement": [ |
|
141 | "Statement": [ | |
142 | { |
|
142 | { | |
143 | "Effect": "Allow", |
|
143 | "Effect": "Allow", | |
144 | "Principal": { |
|
144 | "Principal": { | |
145 | "Service": "ec2.amazonaws.com" |
|
145 | "Service": "ec2.amazonaws.com" | |
146 | }, |
|
146 | }, | |
147 | "Action": "sts:AssumeRole" |
|
147 | "Action": "sts:AssumeRole" | |
148 | } |
|
148 | } | |
149 | ] |
|
149 | ] | |
150 | } |
|
150 | } | |
151 | '''.strip() |
|
151 | '''.strip() | |
152 |
|
152 | |||
153 |
|
153 | |||
154 | IAM_INSTANCE_PROFILES = { |
|
154 | IAM_INSTANCE_PROFILES = { | |
155 | 'ephemeral-ec2-1': { |
|
155 | 'ephemeral-ec2-1': { | |
156 | 'roles': [ |
|
156 | 'roles': [ | |
157 | 'ephemeral-ec2-role-1', |
|
157 | 'ephemeral-ec2-role-1', | |
158 | ], |
|
158 | ], | |
159 | } |
|
159 | } | |
160 | } |
|
160 | } | |
161 |
|
161 | |||
162 |
|
162 | |||
163 | # User Data for Windows EC2 instance. Mainly used to set the password |
|
163 | # User Data for Windows EC2 instance. Mainly used to set the password | |
164 | # and configure WinRM. |
|
164 | # and configure WinRM. | |
165 | # Inspired by the User Data script used by Packer |
|
165 | # Inspired by the User Data script used by Packer | |
166 | # (from https://www.packer.io/intro/getting-started/build-image.html). |
|
166 | # (from https://www.packer.io/intro/getting-started/build-image.html). | |
167 | WINDOWS_USER_DATA = r''' |
|
167 | WINDOWS_USER_DATA = r''' | |
168 | <powershell> |
|
168 | <powershell> | |
169 |
|
169 | |||
170 | # TODO enable this once we figure out what is failing. |
|
170 | # TODO enable this once we figure out what is failing. | |
171 | #$ErrorActionPreference = "stop" |
|
171 | #$ErrorActionPreference = "stop" | |
172 |
|
172 | |||
173 | # Set administrator password |
|
173 | # Set administrator password | |
174 | net user Administrator "%s" |
|
174 | net user Administrator "%s" | |
175 | wmic useraccount where "name='Administrator'" set PasswordExpires=FALSE |
|
175 | wmic useraccount where "name='Administrator'" set PasswordExpires=FALSE | |
176 |
|
176 | |||
177 | # First, make sure WinRM can't be connected to |
|
177 | # First, make sure WinRM can't be connected to | |
178 | netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new enable=yes action=block |
|
178 | netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new enable=yes action=block | |
179 |
|
179 | |||
180 | # Delete any existing WinRM listeners |
|
180 | # Delete any existing WinRM listeners | |
181 | winrm delete winrm/config/listener?Address=*+Transport=HTTP 2>$Null |
|
181 | winrm delete winrm/config/listener?Address=*+Transport=HTTP 2>$Null | |
182 | winrm delete winrm/config/listener?Address=*+Transport=HTTPS 2>$Null |
|
182 | winrm delete winrm/config/listener?Address=*+Transport=HTTPS 2>$Null | |
183 |
|
183 | |||
184 | # Create a new WinRM listener and configure |
|
184 | # Create a new WinRM listener and configure | |
185 | winrm create winrm/config/listener?Address=*+Transport=HTTP |
|
185 | winrm create winrm/config/listener?Address=*+Transport=HTTP | |
186 | winrm set winrm/config/winrs '@{MaxMemoryPerShellMB="0"}' |
|
186 | winrm set winrm/config/winrs '@{MaxMemoryPerShellMB="0"}' | |
187 | winrm set winrm/config '@{MaxTimeoutms="7200000"}' |
|
187 | winrm set winrm/config '@{MaxTimeoutms="7200000"}' | |
188 | winrm set winrm/config/service '@{AllowUnencrypted="true"}' |
|
188 | winrm set winrm/config/service '@{AllowUnencrypted="true"}' | |
189 | winrm set winrm/config/service '@{MaxConcurrentOperationsPerUser="12000"}' |
|
189 | winrm set winrm/config/service '@{MaxConcurrentOperationsPerUser="12000"}' | |
190 | winrm set winrm/config/service/auth '@{Basic="true"}' |
|
190 | winrm set winrm/config/service/auth '@{Basic="true"}' | |
191 | winrm set winrm/config/client/auth '@{Basic="true"}' |
|
191 | winrm set winrm/config/client/auth '@{Basic="true"}' | |
192 |
|
192 | |||
193 | # Configure UAC to allow privilege elevation in remote shells |
|
193 | # Configure UAC to allow privilege elevation in remote shells | |
194 | $Key = 'HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System' |
|
194 | $Key = 'HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System' | |
195 | $Setting = 'LocalAccountTokenFilterPolicy' |
|
195 | $Setting = 'LocalAccountTokenFilterPolicy' | |
196 | Set-ItemProperty -Path $Key -Name $Setting -Value 1 -Force |
|
196 | Set-ItemProperty -Path $Key -Name $Setting -Value 1 -Force | |
197 |
|
197 | |||
198 | # Avoid long usernames in the temp directory path because the '~' causes extra quoting in ssh output |
|
198 | # Avoid long usernames in the temp directory path because the '~' causes extra quoting in ssh output | |
199 | [System.Environment]::SetEnvironmentVariable('TMP', 'C:\Temp', [System.EnvironmentVariableTarget]::User) |
|
199 | [System.Environment]::SetEnvironmentVariable('TMP', 'C:\Temp', [System.EnvironmentVariableTarget]::User) | |
200 | [System.Environment]::SetEnvironmentVariable('TEMP', 'C:\Temp', [System.EnvironmentVariableTarget]::User) |
|
200 | [System.Environment]::SetEnvironmentVariable('TEMP', 'C:\Temp', [System.EnvironmentVariableTarget]::User) | |
201 |
|
201 | |||
202 | # Configure and restart the WinRM Service; Enable the required firewall exception |
|
202 | # Configure and restart the WinRM Service; Enable the required firewall exception | |
203 | Stop-Service -Name WinRM |
|
203 | Stop-Service -Name WinRM | |
204 | Set-Service -Name WinRM -StartupType Automatic |
|
204 | Set-Service -Name WinRM -StartupType Automatic | |
205 | netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new action=allow localip=any remoteip=any |
|
205 | netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new action=allow localip=any remoteip=any | |
206 | Start-Service -Name WinRM |
|
206 | Start-Service -Name WinRM | |
207 |
|
207 | |||
208 | # Disable firewall on private network interfaces so prompts don't appear. |
|
208 | # Disable firewall on private network interfaces so prompts don't appear. | |
209 | Set-NetFirewallProfile -Name private -Enabled false |
|
209 | Set-NetFirewallProfile -Name private -Enabled false | |
210 | </powershell> |
|
210 | </powershell> | |
211 | '''.lstrip() |
|
211 | '''.lstrip() | |
212 |
|
212 | |||
213 |
|
213 | |||
214 | WINDOWS_BOOTSTRAP_POWERSHELL = ''' |
|
214 | WINDOWS_BOOTSTRAP_POWERSHELL = ''' | |
215 | Write-Output "installing PowerShell dependencies" |
|
215 | Write-Output "installing PowerShell dependencies" | |
216 | Install-PackageProvider -Name NuGet -MinimumVersion 2.8.5.201 -Force |
|
216 | Install-PackageProvider -Name NuGet -MinimumVersion 2.8.5.201 -Force | |
217 | Set-PSRepository -Name PSGallery -InstallationPolicy Trusted |
|
217 | Set-PSRepository -Name PSGallery -InstallationPolicy Trusted | |
218 | Install-Module -Name OpenSSHUtils -RequiredVersion 0.0.2.0 |
|
218 | Install-Module -Name OpenSSHUtils -RequiredVersion 0.0.2.0 | |
219 |
|
219 | |||
220 | Write-Output "installing OpenSSL server" |
|
220 | Write-Output "installing OpenSSL server" | |
221 | Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 |
|
221 | Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 | |
222 | # Various tools will attempt to use older versions of .NET. So we enable |
|
222 | # Various tools will attempt to use older versions of .NET. So we enable | |
223 | # the feature that provides them so it doesn't have to be auto-enabled |
|
223 | # the feature that provides them so it doesn't have to be auto-enabled | |
224 | # later. |
|
224 | # later. | |
225 | Write-Output "enabling .NET Framework feature" |
|
225 | Write-Output "enabling .NET Framework feature" | |
226 | Install-WindowsFeature -Name Net-Framework-Core |
|
226 | Install-WindowsFeature -Name Net-Framework-Core | |
227 | ''' |
|
227 | ''' | |
228 |
|
228 | |||
229 |
|
229 | |||
230 | class AWSConnection: |
|
230 | class AWSConnection: | |
231 | """Manages the state of a connection with AWS.""" |
|
231 | """Manages the state of a connection with AWS.""" | |
232 |
|
232 | |||
233 | def __init__(self, automation, region: str, ensure_ec2_state: bool = True): |
|
233 | def __init__(self, automation, region: str, ensure_ec2_state: bool = True): | |
234 | self.automation = automation |
|
234 | self.automation = automation | |
235 | self.local_state_path = automation.state_path |
|
235 | self.local_state_path = automation.state_path | |
236 |
|
236 | |||
237 | self.prefix = 'hg-' |
|
237 | self.prefix = 'hg-' | |
238 |
|
238 | |||
239 | self.session = boto3.session.Session(region_name=region) |
|
239 | self.session = boto3.session.Session(region_name=region) | |
240 | self.ec2client = self.session.client('ec2') |
|
240 | self.ec2client = self.session.client('ec2') | |
241 | self.ec2resource = self.session.resource('ec2') |
|
241 | self.ec2resource = self.session.resource('ec2') | |
242 | self.iamclient = self.session.client('iam') |
|
242 | self.iamclient = self.session.client('iam') | |
243 | self.iamresource = self.session.resource('iam') |
|
243 | self.iamresource = self.session.resource('iam') | |
244 | self.security_groups = {} |
|
244 | self.security_groups = {} | |
245 |
|
245 | |||
246 | if ensure_ec2_state: |
|
246 | if ensure_ec2_state: | |
247 | ensure_key_pairs(automation.state_path, self.ec2resource) |
|
247 | ensure_key_pairs(automation.state_path, self.ec2resource) | |
248 | self.security_groups = ensure_security_groups(self.ec2resource) |
|
248 | self.security_groups = ensure_security_groups(self.ec2resource) | |
249 | ensure_iam_state(self.iamclient, self.iamresource) |
|
249 | ensure_iam_state(self.iamclient, self.iamresource) | |
250 |
|
250 | |||
251 | def key_pair_path_private(self, name): |
|
251 | def key_pair_path_private(self, name): | |
252 | """Path to a key pair private key file.""" |
|
252 | """Path to a key pair private key file.""" | |
253 | return self.local_state_path / 'keys' / ('keypair-%s' % name) |
|
253 | return self.local_state_path / 'keys' / ('keypair-%s' % name) | |
254 |
|
254 | |||
255 | def key_pair_path_public(self, name): |
|
255 | def key_pair_path_public(self, name): | |
256 | return self.local_state_path / 'keys' / ('keypair-%s.pub' % name) |
|
256 | return self.local_state_path / 'keys' / ('keypair-%s.pub' % name) | |
257 |
|
257 | |||
258 |
|
258 | |||
259 | def rsa_key_fingerprint(p: pathlib.Path): |
|
259 | def rsa_key_fingerprint(p: pathlib.Path): | |
260 | """Compute the fingerprint of an RSA private key.""" |
|
260 | """Compute the fingerprint of an RSA private key.""" | |
261 |
|
261 | |||
262 | # TODO use rsa package. |
|
262 | # TODO use rsa package. | |
263 | res = subprocess.run( |
|
263 | res = subprocess.run( | |
264 | [ |
|
264 | [ | |
265 | 'openssl', |
|
265 | 'openssl', | |
266 | 'pkcs8', |
|
266 | 'pkcs8', | |
267 | '-in', |
|
267 | '-in', | |
268 | str(p), |
|
268 | str(p), | |
269 | '-nocrypt', |
|
269 | '-nocrypt', | |
270 | '-topk8', |
|
270 | '-topk8', | |
271 | '-outform', |
|
271 | '-outform', | |
272 | 'DER', |
|
272 | 'DER', | |
273 | ], |
|
273 | ], | |
274 | capture_output=True, |
|
274 | capture_output=True, | |
275 | check=True, |
|
275 | check=True, | |
276 | ) |
|
276 | ) | |
277 |
|
277 | |||
278 | sha1 = hashlib.sha1(res.stdout).hexdigest() |
|
278 | sha1 = hashlib.sha1(res.stdout).hexdigest() | |
279 | return ':'.join(a + b for a, b in zip(sha1[::2], sha1[1::2])) |
|
279 | return ':'.join(a + b for a, b in zip(sha1[::2], sha1[1::2])) | |
280 |
|
280 | |||
281 |
|
281 | |||
282 | def ensure_key_pairs(state_path: pathlib.Path, ec2resource, prefix='hg-'): |
|
282 | def ensure_key_pairs(state_path: pathlib.Path, ec2resource, prefix='hg-'): | |
283 | remote_existing = {} |
|
283 | remote_existing = {} | |
284 |
|
284 | |||
285 | for kpi in ec2resource.key_pairs.all(): |
|
285 | for kpi in ec2resource.key_pairs.all(): | |
286 | if kpi.name.startswith(prefix): |
|
286 | if kpi.name.startswith(prefix): | |
287 | remote_existing[kpi.name[len(prefix) :]] = kpi.key_fingerprint |
|
287 | remote_existing[kpi.name[len(prefix) :]] = kpi.key_fingerprint | |
288 |
|
288 | |||
289 | # Validate that we have these keys locally. |
|
289 | # Validate that we have these keys locally. | |
290 | key_path = state_path / 'keys' |
|
290 | key_path = state_path / 'keys' | |
291 | key_path.mkdir(exist_ok=True, mode=0o700) |
|
291 | key_path.mkdir(exist_ok=True, mode=0o700) | |
292 |
|
292 | |||
293 | def remove_remote(name): |
|
293 | def remove_remote(name): | |
294 | print('deleting key pair %s' % name) |
|
294 | print('deleting key pair %s' % name) | |
295 | key = ec2resource.KeyPair(name) |
|
295 | key = ec2resource.KeyPair(name) | |
296 | key.delete() |
|
296 | key.delete() | |
297 |
|
297 | |||
298 | def remove_local(name): |
|
298 | def remove_local(name): | |
299 | pub_full = key_path / ('keypair-%s.pub' % name) |
|
299 | pub_full = key_path / ('keypair-%s.pub' % name) | |
300 | priv_full = key_path / ('keypair-%s' % name) |
|
300 | priv_full = key_path / ('keypair-%s' % name) | |
301 |
|
301 | |||
302 | print('removing %s' % pub_full) |
|
302 | print('removing %s' % pub_full) | |
303 | pub_full.unlink() |
|
303 | pub_full.unlink() | |
304 | print('removing %s' % priv_full) |
|
304 | print('removing %s' % priv_full) | |
305 | priv_full.unlink() |
|
305 | priv_full.unlink() | |
306 |
|
306 | |||
307 | local_existing = {} |
|
307 | local_existing = {} | |
308 |
|
308 | |||
309 | for f in sorted(os.listdir(key_path)): |
|
309 | for f in sorted(os.listdir(key_path)): | |
310 | if not f.startswith('keypair-') or not f.endswith('.pub'): |
|
310 | if not f.startswith('keypair-') or not f.endswith('.pub'): | |
311 | continue |
|
311 | continue | |
312 |
|
312 | |||
313 | name = f[len('keypair-') : -len('.pub')] |
|
313 | name = f[len('keypair-') : -len('.pub')] | |
314 |
|
314 | |||
315 | pub_full = key_path / f |
|
315 | pub_full = key_path / f | |
316 | priv_full = key_path / ('keypair-%s' % name) |
|
316 | priv_full = key_path / ('keypair-%s' % name) | |
317 |
|
317 | |||
318 | with open(pub_full, 'r', encoding='ascii') as fh: |
|
318 | with open(pub_full, 'r', encoding='ascii') as fh: | |
319 | data = fh.read() |
|
319 | data = fh.read() | |
320 |
|
320 | |||
321 | if not data.startswith('ssh-rsa '): |
|
321 | if not data.startswith('ssh-rsa '): | |
322 | print( |
|
322 | print( | |
323 | 'unexpected format for key pair file: %s; removing' % pub_full |
|
323 | 'unexpected format for key pair file: %s; removing' % pub_full | |
324 | ) |
|
324 | ) | |
325 | pub_full.unlink() |
|
325 | pub_full.unlink() | |
326 | priv_full.unlink() |
|
326 | priv_full.unlink() | |
327 | continue |
|
327 | continue | |
328 |
|
328 | |||
329 | local_existing[name] = rsa_key_fingerprint(priv_full) |
|
329 | local_existing[name] = rsa_key_fingerprint(priv_full) | |
330 |
|
330 | |||
331 | for name in sorted(set(remote_existing) | set(local_existing)): |
|
331 | for name in sorted(set(remote_existing) | set(local_existing)): | |
332 | if name not in local_existing: |
|
332 | if name not in local_existing: | |
333 | actual = '%s%s' % (prefix, name) |
|
333 | actual = '%s%s' % (prefix, name) | |
334 | print('remote key %s does not exist locally' % name) |
|
334 | print('remote key %s does not exist locally' % name) | |
335 | remove_remote(actual) |
|
335 | remove_remote(actual) | |
336 | del remote_existing[name] |
|
336 | del remote_existing[name] | |
337 |
|
337 | |||
338 | elif name not in remote_existing: |
|
338 | elif name not in remote_existing: | |
339 | print('local key %s does not exist remotely' % name) |
|
339 | print('local key %s does not exist remotely' % name) | |
340 | remove_local(name) |
|
340 | remove_local(name) | |
341 | del local_existing[name] |
|
341 | del local_existing[name] | |
342 |
|
342 | |||
343 | elif remote_existing[name] != local_existing[name]: |
|
343 | elif remote_existing[name] != local_existing[name]: | |
344 | print( |
|
344 | print( | |
345 | 'key fingerprint mismatch for %s; ' |
|
345 | 'key fingerprint mismatch for %s; ' | |
346 | 'removing from local and remote' % name |
|
346 | 'removing from local and remote' % name | |
347 | ) |
|
347 | ) | |
348 | remove_local(name) |
|
348 | remove_local(name) | |
349 | remove_remote('%s%s' % (prefix, name)) |
|
349 | remove_remote('%s%s' % (prefix, name)) | |
350 | del local_existing[name] |
|
350 | del local_existing[name] | |
351 | del remote_existing[name] |
|
351 | del remote_existing[name] | |
352 |
|
352 | |||
353 | missing = KEY_PAIRS - set(remote_existing) |
|
353 | missing = KEY_PAIRS - set(remote_existing) | |
354 |
|
354 | |||
355 | for name in sorted(missing): |
|
355 | for name in sorted(missing): | |
356 | actual = '%s%s' % (prefix, name) |
|
356 | actual = '%s%s' % (prefix, name) | |
357 | print('creating key pair %s' % actual) |
|
357 | print('creating key pair %s' % actual) | |
358 |
|
358 | |||
359 | priv_full = key_path / ('keypair-%s' % name) |
|
359 | priv_full = key_path / ('keypair-%s' % name) | |
360 | pub_full = key_path / ('keypair-%s.pub' % name) |
|
360 | pub_full = key_path / ('keypair-%s.pub' % name) | |
361 |
|
361 | |||
362 | kp = ec2resource.create_key_pair(KeyName=actual) |
|
362 | kp = ec2resource.create_key_pair(KeyName=actual) | |
363 |
|
363 | |||
364 | with priv_full.open('w', encoding='ascii') as fh: |
|
364 | with priv_full.open('w', encoding='ascii') as fh: | |
365 | fh.write(kp.key_material) |
|
365 | fh.write(kp.key_material) | |
366 | fh.write('\n') |
|
366 | fh.write('\n') | |
367 |
|
367 | |||
368 | priv_full.chmod(0o0600) |
|
368 | priv_full.chmod(0o0600) | |
369 |
|
369 | |||
370 | # SSH public key can be extracted via `ssh-keygen`. |
|
370 | # SSH public key can be extracted via `ssh-keygen`. | |
371 | with pub_full.open('w', encoding='ascii') as fh: |
|
371 | with pub_full.open('w', encoding='ascii') as fh: | |
372 | subprocess.run( |
|
372 | subprocess.run( | |
373 | ['ssh-keygen', '-y', '-f', str(priv_full)], |
|
373 | ['ssh-keygen', '-y', '-f', str(priv_full)], | |
374 | stdout=fh, |
|
374 | stdout=fh, | |
375 | check=True, |
|
375 | check=True, | |
376 | ) |
|
376 | ) | |
377 |
|
377 | |||
378 | pub_full.chmod(0o0600) |
|
378 | pub_full.chmod(0o0600) | |
379 |
|
379 | |||
380 |
|
380 | |||
381 | def delete_instance_profile(profile): |
|
381 | def delete_instance_profile(profile): | |
382 | for role in profile.roles: |
|
382 | for role in profile.roles: | |
383 | print( |
|
383 | print( | |
384 | 'removing role %s from instance profile %s' |
|
384 | 'removing role %s from instance profile %s' | |
385 | % (role.name, profile.name) |
|
385 | % (role.name, profile.name) | |
386 | ) |
|
386 | ) | |
387 | profile.remove_role(RoleName=role.name) |
|
387 | profile.remove_role(RoleName=role.name) | |
388 |
|
388 | |||
389 | print('deleting instance profile %s' % profile.name) |
|
389 | print('deleting instance profile %s' % profile.name) | |
390 | profile.delete() |
|
390 | profile.delete() | |
391 |
|
391 | |||
392 |
|
392 | |||
393 | def ensure_iam_state(iamclient, iamresource, prefix='hg-'): |
|
393 | def ensure_iam_state(iamclient, iamresource, prefix='hg-'): | |
394 | """Ensure IAM state is in sync with our canonical definition.""" |
|
394 | """Ensure IAM state is in sync with our canonical definition.""" | |
395 |
|
395 | |||
396 | remote_profiles = {} |
|
396 | remote_profiles = {} | |
397 |
|
397 | |||
398 | for profile in iamresource.instance_profiles.all(): |
|
398 | for profile in iamresource.instance_profiles.all(): | |
399 | if profile.name.startswith(prefix): |
|
399 | if profile.name.startswith(prefix): | |
400 | remote_profiles[profile.name[len(prefix) :]] = profile |
|
400 | remote_profiles[profile.name[len(prefix) :]] = profile | |
401 |
|
401 | |||
402 | for name in sorted(set(remote_profiles) - set(IAM_INSTANCE_PROFILES)): |
|
402 | for name in sorted(set(remote_profiles) - set(IAM_INSTANCE_PROFILES)): | |
403 | delete_instance_profile(remote_profiles[name]) |
|
403 | delete_instance_profile(remote_profiles[name]) | |
404 | del remote_profiles[name] |
|
404 | del remote_profiles[name] | |
405 |
|
405 | |||
406 | remote_roles = {} |
|
406 | remote_roles = {} | |
407 |
|
407 | |||
408 | for role in iamresource.roles.all(): |
|
408 | for role in iamresource.roles.all(): | |
409 | if role.name.startswith(prefix): |
|
409 | if role.name.startswith(prefix): | |
410 | remote_roles[role.name[len(prefix) :]] = role |
|
410 | remote_roles[role.name[len(prefix) :]] = role | |
411 |
|
411 | |||
412 | for name in sorted(set(remote_roles) - set(IAM_ROLES)): |
|
412 | for name in sorted(set(remote_roles) - set(IAM_ROLES)): | |
413 | role = remote_roles[name] |
|
413 | role = remote_roles[name] | |
414 |
|
414 | |||
415 | print('removing role %s' % role.name) |
|
415 | print('removing role %s' % role.name) | |
416 | role.delete() |
|
416 | role.delete() | |
417 | del remote_roles[name] |
|
417 | del remote_roles[name] | |
418 |
|
418 | |||
419 | # We've purged remote state that doesn't belong. Create missing |
|
419 | # We've purged remote state that doesn't belong. Create missing | |
420 | # instance profiles and roles. |
|
420 | # instance profiles and roles. | |
421 | for name in sorted(set(IAM_INSTANCE_PROFILES) - set(remote_profiles)): |
|
421 | for name in sorted(set(IAM_INSTANCE_PROFILES) - set(remote_profiles)): | |
422 | actual = '%s%s' % (prefix, name) |
|
422 | actual = '%s%s' % (prefix, name) | |
423 | print('creating IAM instance profile %s' % actual) |
|
423 | print('creating IAM instance profile %s' % actual) | |
424 |
|
424 | |||
425 | profile = iamresource.create_instance_profile( |
|
425 | profile = iamresource.create_instance_profile( | |
426 | InstanceProfileName=actual |
|
426 | InstanceProfileName=actual | |
427 | ) |
|
427 | ) | |
428 | remote_profiles[name] = profile |
|
428 | remote_profiles[name] = profile | |
429 |
|
429 | |||
430 | waiter = iamclient.get_waiter('instance_profile_exists') |
|
430 | waiter = iamclient.get_waiter('instance_profile_exists') | |
431 | waiter.wait(InstanceProfileName=actual) |
|
431 | waiter.wait(InstanceProfileName=actual) | |
432 | print('IAM instance profile %s is available' % actual) |
|
432 | print('IAM instance profile %s is available' % actual) | |
433 |
|
433 | |||
434 | for name in sorted(set(IAM_ROLES) - set(remote_roles)): |
|
434 | for name in sorted(set(IAM_ROLES) - set(remote_roles)): | |
435 | entry = IAM_ROLES[name] |
|
435 | entry = IAM_ROLES[name] | |
436 |
|
436 | |||
437 | actual = '%s%s' % (prefix, name) |
|
437 | actual = '%s%s' % (prefix, name) | |
438 | print('creating IAM role %s' % actual) |
|
438 | print('creating IAM role %s' % actual) | |
439 |
|
439 | |||
440 | role = iamresource.create_role( |
|
440 | role = iamresource.create_role( | |
441 | RoleName=actual, |
|
441 | RoleName=actual, | |
442 | Description=entry['description'], |
|
442 | Description=entry['description'], | |
443 | AssumeRolePolicyDocument=ASSUME_ROLE_POLICY_DOCUMENT, |
|
443 | AssumeRolePolicyDocument=ASSUME_ROLE_POLICY_DOCUMENT, | |
444 | ) |
|
444 | ) | |
445 |
|
445 | |||
446 | waiter = iamclient.get_waiter('role_exists') |
|
446 | waiter = iamclient.get_waiter('role_exists') | |
447 | waiter.wait(RoleName=actual) |
|
447 | waiter.wait(RoleName=actual) | |
448 | print('IAM role %s is available' % actual) |
|
448 | print('IAM role %s is available' % actual) | |
449 |
|
449 | |||
450 | remote_roles[name] = role |
|
450 | remote_roles[name] = role | |
451 |
|
451 | |||
452 | for arn in entry['policy_arns']: |
|
452 | for arn in entry['policy_arns']: | |
453 | print('attaching policy %s to %s' % (arn, role.name)) |
|
453 | print('attaching policy %s to %s' % (arn, role.name)) | |
454 | role.attach_policy(PolicyArn=arn) |
|
454 | role.attach_policy(PolicyArn=arn) | |
455 |
|
455 | |||
456 | # Now reconcile state of profiles. |
|
456 | # Now reconcile state of profiles. | |
457 | for name, meta in sorted(IAM_INSTANCE_PROFILES.items()): |
|
457 | for name, meta in sorted(IAM_INSTANCE_PROFILES.items()): | |
458 | profile = remote_profiles[name] |
|
458 | profile = remote_profiles[name] | |
459 | wanted = {'%s%s' % (prefix, role) for role in meta['roles']} |
|
459 | wanted = {'%s%s' % (prefix, role) for role in meta['roles']} | |
460 | have = {role.name for role in profile.roles} |
|
460 | have = {role.name for role in profile.roles} | |
461 |
|
461 | |||
462 | for role in sorted(have - wanted): |
|
462 | for role in sorted(have - wanted): | |
463 | print('removing role %s from %s' % (role, profile.name)) |
|
463 | print('removing role %s from %s' % (role, profile.name)) | |
464 | profile.remove_role(RoleName=role) |
|
464 | profile.remove_role(RoleName=role) | |
465 |
|
465 | |||
466 | for role in sorted(wanted - have): |
|
466 | for role in sorted(wanted - have): | |
467 | print('adding role %s to %s' % (role, profile.name)) |
|
467 | print('adding role %s to %s' % (role, profile.name)) | |
468 | profile.add_role(RoleName=role) |
|
468 | profile.add_role(RoleName=role) | |
469 |
|
469 | |||
470 |
|
470 | |||
471 | def find_image(ec2resource, owner_id, name, reverse_sort_field=None): |
|
471 | def find_image(ec2resource, owner_id, name, reverse_sort_field=None): | |
472 | """Find an AMI by its owner ID and name.""" |
|
472 | """Find an AMI by its owner ID and name.""" | |
473 |
|
473 | |||
474 | images = ec2resource.images.filter( |
|
474 | images = ec2resource.images.filter( | |
475 | Filters=[ |
|
475 | Filters=[ | |
476 | { |
|
476 | { | |
477 | 'Name': 'owner-id', |
|
477 | 'Name': 'owner-id', | |
478 | 'Values': [owner_id], |
|
478 | 'Values': [owner_id], | |
479 | }, |
|
479 | }, | |
480 | { |
|
480 | { | |
481 | 'Name': 'state', |
|
481 | 'Name': 'state', | |
482 | 'Values': ['available'], |
|
482 | 'Values': ['available'], | |
483 | }, |
|
483 | }, | |
484 | { |
|
484 | { | |
485 | 'Name': 'image-type', |
|
485 | 'Name': 'image-type', | |
486 | 'Values': ['machine'], |
|
486 | 'Values': ['machine'], | |
487 | }, |
|
487 | }, | |
488 | { |
|
488 | { | |
489 | 'Name': 'name', |
|
489 | 'Name': 'name', | |
490 | 'Values': [name], |
|
490 | 'Values': [name], | |
491 | }, |
|
491 | }, | |
492 | ] |
|
492 | ] | |
493 | ) |
|
493 | ) | |
494 |
|
494 | |||
495 | if reverse_sort_field: |
|
495 | if reverse_sort_field: | |
496 | images = sorted( |
|
496 | images = sorted( | |
497 | images, |
|
497 | images, | |
498 | key=lambda image: getattr(image, reverse_sort_field), |
|
498 | key=lambda image: getattr(image, reverse_sort_field), | |
499 | reverse=True, |
|
499 | reverse=True, | |
500 | ) |
|
500 | ) | |
501 |
|
501 | |||
502 | for image in images: |
|
502 | for image in images: | |
503 | return image |
|
503 | return image | |
504 |
|
504 | |||
505 | raise Exception('unable to find image for %s' % name) |
|
505 | raise Exception('unable to find image for %s' % name) | |
506 |
|
506 | |||
507 |
|
507 | |||
508 | def ensure_security_groups(ec2resource, prefix='hg-'): |
|
508 | def ensure_security_groups(ec2resource, prefix='hg-'): | |
509 | """Ensure all necessary Mercurial security groups are present. |
|
509 | """Ensure all necessary Mercurial security groups are present. | |
510 |
|
510 | |||
511 | All security groups are prefixed with ``hg-`` by default. Any security |
|
511 | All security groups are prefixed with ``hg-`` by default. Any security | |
512 | groups having this prefix but aren't in our list are deleted. |
|
512 | groups having this prefix but aren't in our list are deleted. | |
513 | """ |
|
513 | """ | |
514 | existing = {} |
|
514 | existing = {} | |
515 |
|
515 | |||
516 | for group in ec2resource.security_groups.all(): |
|
516 | for group in ec2resource.security_groups.all(): | |
517 | if group.group_name.startswith(prefix): |
|
517 | if group.group_name.startswith(prefix): | |
518 | existing[group.group_name[len(prefix) :]] = group |
|
518 | existing[group.group_name[len(prefix) :]] = group | |
519 |
|
519 | |||
520 | purge = set(existing) - set(SECURITY_GROUPS) |
|
520 | purge = set(existing) - set(SECURITY_GROUPS) | |
521 |
|
521 | |||
522 | for name in sorted(purge): |
|
522 | for name in sorted(purge): | |
523 | group = existing[name] |
|
523 | group = existing[name] | |
524 | print('removing legacy security group: %s' % group.group_name) |
|
524 | print('removing legacy security group: %s' % group.group_name) | |
525 | group.delete() |
|
525 | group.delete() | |
526 |
|
526 | |||
527 | security_groups = {} |
|
527 | security_groups = {} | |
528 |
|
528 | |||
529 | for name, group in sorted(SECURITY_GROUPS.items()): |
|
529 | for name, group in sorted(SECURITY_GROUPS.items()): | |
530 | if name in existing: |
|
530 | if name in existing: | |
531 | security_groups[name] = existing[name] |
|
531 | security_groups[name] = existing[name] | |
532 | continue |
|
532 | continue | |
533 |
|
533 | |||
534 | actual = '%s%s' % (prefix, name) |
|
534 | actual = '%s%s' % (prefix, name) | |
535 | print('adding security group %s' % actual) |
|
535 | print('adding security group %s' % actual) | |
536 |
|
536 | |||
537 | group_res = ec2resource.create_security_group( |
|
537 | group_res = ec2resource.create_security_group( | |
538 | Description=group['description'], |
|
538 | Description=group['description'], | |
539 | GroupName=actual, |
|
539 | GroupName=actual, | |
540 | ) |
|
540 | ) | |
541 |
|
541 | |||
542 | group_res.authorize_ingress( |
|
542 | group_res.authorize_ingress( | |
543 | IpPermissions=group['ingress'], |
|
543 | IpPermissions=group['ingress'], | |
544 | ) |
|
544 | ) | |
545 |
|
545 | |||
546 | security_groups[name] = group_res |
|
546 | security_groups[name] = group_res | |
547 |
|
547 | |||
548 | return security_groups |
|
548 | return security_groups | |
549 |
|
549 | |||
550 |
|
550 | |||
551 | def terminate_ec2_instances(ec2resource, prefix='hg-'): |
|
551 | def terminate_ec2_instances(ec2resource, prefix='hg-'): | |
552 | """Terminate all EC2 instances managed by us.""" |
|
552 | """Terminate all EC2 instances managed by us.""" | |
553 | waiting = [] |
|
553 | waiting = [] | |
554 |
|
554 | |||
555 | for instance in ec2resource.instances.all(): |
|
555 | for instance in ec2resource.instances.all(): | |
556 | if instance.state['Name'] == 'terminated': |
|
556 | if instance.state['Name'] == 'terminated': | |
557 | continue |
|
557 | continue | |
558 |
|
558 | |||
559 | for tag in instance.tags or []: |
|
559 | for tag in instance.tags or []: | |
560 | if tag['Key'] == 'Name' and tag['Value'].startswith(prefix): |
|
560 | if tag['Key'] == 'Name' and tag['Value'].startswith(prefix): | |
561 | print('terminating %s' % instance.id) |
|
561 | print('terminating %s' % instance.id) | |
562 | instance.terminate() |
|
562 | instance.terminate() | |
563 | waiting.append(instance) |
|
563 | waiting.append(instance) | |
564 |
|
564 | |||
565 | for instance in waiting: |
|
565 | for instance in waiting: | |
566 | instance.wait_until_terminated() |
|
566 | instance.wait_until_terminated() | |
567 |
|
567 | |||
568 |
|
568 | |||
569 | def remove_resources(c, prefix='hg-'): |
|
569 | def remove_resources(c, prefix='hg-'): | |
570 | """Purge all of our resources in this EC2 region.""" |
|
570 | """Purge all of our resources in this EC2 region.""" | |
571 | ec2resource = c.ec2resource |
|
571 | ec2resource = c.ec2resource | |
572 | iamresource = c.iamresource |
|
572 | iamresource = c.iamresource | |
573 |
|
573 | |||
574 | terminate_ec2_instances(ec2resource, prefix=prefix) |
|
574 | terminate_ec2_instances(ec2resource, prefix=prefix) | |
575 |
|
575 | |||
576 | for image in ec2resource.images.filter(Owners=['self']): |
|
576 | for image in ec2resource.images.filter(Owners=['self']): | |
577 | if image.name.startswith(prefix): |
|
577 | if image.name.startswith(prefix): | |
578 | remove_ami(ec2resource, image) |
|
578 | remove_ami(ec2resource, image) | |
579 |
|
579 | |||
580 | for group in ec2resource.security_groups.all(): |
|
580 | for group in ec2resource.security_groups.all(): | |
581 | if group.group_name.startswith(prefix): |
|
581 | if group.group_name.startswith(prefix): | |
582 | print('removing security group %s' % group.group_name) |
|
582 | print('removing security group %s' % group.group_name) | |
583 | group.delete() |
|
583 | group.delete() | |
584 |
|
584 | |||
585 | for profile in iamresource.instance_profiles.all(): |
|
585 | for profile in iamresource.instance_profiles.all(): | |
586 | if profile.name.startswith(prefix): |
|
586 | if profile.name.startswith(prefix): | |
587 | delete_instance_profile(profile) |
|
587 | delete_instance_profile(profile) | |
588 |
|
588 | |||
589 | for role in iamresource.roles.all(): |
|
589 | for role in iamresource.roles.all(): | |
590 | if role.name.startswith(prefix): |
|
590 | if role.name.startswith(prefix): | |
591 | for p in role.attached_policies.all(): |
|
591 | for p in role.attached_policies.all(): | |
592 | print('detaching policy %s from %s' % (p.arn, role.name)) |
|
592 | print('detaching policy %s from %s' % (p.arn, role.name)) | |
593 | role.detach_policy(PolicyArn=p.arn) |
|
593 | role.detach_policy(PolicyArn=p.arn) | |
594 |
|
594 | |||
595 | print('removing role %s' % role.name) |
|
595 | print('removing role %s' % role.name) | |
596 | role.delete() |
|
596 | role.delete() | |
597 |
|
597 | |||
598 |
|
598 | |||
599 | def wait_for_ip_addresses(instances): |
|
599 | def wait_for_ip_addresses(instances): | |
600 | """Wait for the public IP addresses of an iterable of instances.""" |
|
600 | """Wait for the public IP addresses of an iterable of instances.""" | |
601 | for instance in instances: |
|
601 | for instance in instances: | |
602 | while True: |
|
602 | while True: | |
603 | if not instance.public_ip_address: |
|
603 | if not instance.public_ip_address: | |
604 | time.sleep(2) |
|
604 | time.sleep(2) | |
605 | instance.reload() |
|
605 | instance.reload() | |
606 | continue |
|
606 | continue | |
607 |
|
607 | |||
608 | print( |
|
608 | print( | |
609 | 'public IP address for %s: %s' |
|
609 | 'public IP address for %s: %s' | |
610 | % (instance.id, instance.public_ip_address) |
|
610 | % (instance.id, instance.public_ip_address) | |
611 | ) |
|
611 | ) | |
612 | break |
|
612 | break | |
613 |
|
613 | |||
614 |
|
614 | |||
615 | def remove_ami(ec2resource, image): |
|
615 | def remove_ami(ec2resource, image): | |
616 | """Remove an AMI and its underlying snapshots.""" |
|
616 | """Remove an AMI and its underlying snapshots.""" | |
617 | snapshots = [] |
|
617 | snapshots = [] | |
618 |
|
618 | |||
619 | for device in image.block_device_mappings: |
|
619 | for device in image.block_device_mappings: | |
620 | if 'Ebs' in device: |
|
620 | if 'Ebs' in device: | |
621 | snapshots.append(ec2resource.Snapshot(device['Ebs']['SnapshotId'])) |
|
621 | snapshots.append(ec2resource.Snapshot(device['Ebs']['SnapshotId'])) | |
622 |
|
622 | |||
623 | print('deregistering %s' % image.id) |
|
623 | print('deregistering %s' % image.id) | |
624 | image.deregister() |
|
624 | image.deregister() | |
625 |
|
625 | |||
626 | for snapshot in snapshots: |
|
626 | for snapshot in snapshots: | |
627 | print('deleting snapshot %s' % snapshot.id) |
|
627 | print('deleting snapshot %s' % snapshot.id) | |
628 | snapshot.delete() |
|
628 | snapshot.delete() | |
629 |
|
629 | |||
630 |
|
630 | |||
631 | def wait_for_ssm(ssmclient, instances): |
|
631 | def wait_for_ssm(ssmclient, instances): | |
632 | """Wait for SSM to come online for an iterable of instance IDs.""" |
|
632 | """Wait for SSM to come online for an iterable of instance IDs.""" | |
633 | while True: |
|
633 | while True: | |
634 | res = ssmclient.describe_instance_information( |
|
634 | res = ssmclient.describe_instance_information( | |
635 | Filters=[ |
|
635 | Filters=[ | |
636 | { |
|
636 | { | |
637 | 'Key': 'InstanceIds', |
|
637 | 'Key': 'InstanceIds', | |
638 | 'Values': [i.id for i in instances], |
|
638 | 'Values': [i.id for i in instances], | |
639 | }, |
|
639 | }, | |
640 | ], |
|
640 | ], | |
641 | ) |
|
641 | ) | |
642 |
|
642 | |||
643 | available = len(res['InstanceInformationList']) |
|
643 | available = len(res['InstanceInformationList']) | |
644 | wanted = len(instances) |
|
644 | wanted = len(instances) | |
645 |
|
645 | |||
646 | print('%d/%d instances available in SSM' % (available, wanted)) |
|
646 | print('%d/%d instances available in SSM' % (available, wanted)) | |
647 |
|
647 | |||
648 | if available == wanted: |
|
648 | if available == wanted: | |
649 | return |
|
649 | return | |
650 |
|
650 | |||
651 | time.sleep(2) |
|
651 | time.sleep(2) | |
652 |
|
652 | |||
653 |
|
653 | |||
654 | def run_ssm_command(ssmclient, instances, document_name, parameters): |
|
654 | def run_ssm_command(ssmclient, instances, document_name, parameters): | |
655 | """Run a PowerShell script on an EC2 instance.""" |
|
655 | """Run a PowerShell script on an EC2 instance.""" | |
656 |
|
656 | |||
657 | res = ssmclient.send_command( |
|
657 | res = ssmclient.send_command( | |
658 | InstanceIds=[i.id for i in instances], |
|
658 | InstanceIds=[i.id for i in instances], | |
659 | DocumentName=document_name, |
|
659 | DocumentName=document_name, | |
660 | Parameters=parameters, |
|
660 | Parameters=parameters, | |
661 | CloudWatchOutputConfig={ |
|
661 | CloudWatchOutputConfig={ | |
662 | 'CloudWatchOutputEnabled': True, |
|
662 | 'CloudWatchOutputEnabled': True, | |
663 | }, |
|
663 | }, | |
664 | ) |
|
664 | ) | |
665 |
|
665 | |||
666 | command_id = res['Command']['CommandId'] |
|
666 | command_id = res['Command']['CommandId'] | |
667 |
|
667 | |||
668 | for instance in instances: |
|
668 | for instance in instances: | |
669 | while True: |
|
669 | while True: | |
670 | try: |
|
670 | try: | |
671 | res = ssmclient.get_command_invocation( |
|
671 | res = ssmclient.get_command_invocation( | |
672 | CommandId=command_id, |
|
672 | CommandId=command_id, | |
673 | InstanceId=instance.id, |
|
673 | InstanceId=instance.id, | |
674 | ) |
|
674 | ) | |
675 | except botocore.exceptions.ClientError as e: |
|
675 | except botocore.exceptions.ClientError as e: | |
676 | if e.response['Error']['Code'] == 'InvocationDoesNotExist': |
|
676 | if e.response['Error']['Code'] == 'InvocationDoesNotExist': | |
677 | print('could not find SSM command invocation; waiting') |
|
677 | print('could not find SSM command invocation; waiting') | |
678 | time.sleep(1) |
|
678 | time.sleep(1) | |
679 | continue |
|
679 | continue | |
680 | else: |
|
680 | else: | |
681 | raise |
|
681 | raise | |
682 |
|
682 | |||
683 | if res['Status'] == 'Success': |
|
683 | if res['Status'] == 'Success': | |
684 | break |
|
684 | break | |
685 | elif res['Status'] in ('Pending', 'InProgress', 'Delayed'): |
|
685 | elif res['Status'] in ('Pending', 'InProgress', 'Delayed'): | |
686 | time.sleep(2) |
|
686 | time.sleep(2) | |
687 | else: |
|
687 | else: | |
688 | raise Exception( |
|
688 | raise Exception( | |
689 | 'command failed on %s: %s' % (instance.id, res['Status']) |
|
689 | 'command failed on %s: %s' % (instance.id, res['Status']) | |
690 | ) |
|
690 | ) | |
691 |
|
691 | |||
692 |
|
692 | |||
693 | @contextlib.contextmanager |
|
693 | @contextlib.contextmanager | |
694 | def temporary_ec2_instances(ec2resource, config): |
|
694 | def temporary_ec2_instances(ec2resource, config): | |
695 | """Create temporary EC2 instances. |
|
695 | """Create temporary EC2 instances. | |
696 |
|
696 | |||
697 | This is a proxy to ``ec2client.run_instances(**config)`` that takes care of |
|
697 | This is a proxy to ``ec2client.run_instances(**config)`` that takes care of | |
698 | managing the lifecycle of the instances. |
|
698 | managing the lifecycle of the instances. | |
699 |
|
699 | |||
700 | When the context manager exits, the instances are terminated. |
|
700 | When the context manager exits, the instances are terminated. | |
701 |
|
701 | |||
702 | The context manager evaluates to the list of data structures |
|
702 | The context manager evaluates to the list of data structures | |
703 | describing each created instance. The instances may not be available |
|
703 | describing each created instance. The instances may not be available | |
704 | for work immediately: it is up to the caller to wait for the instance |
|
704 | for work immediately: it is up to the caller to wait for the instance | |
705 | to start responding. |
|
705 | to start responding. | |
706 | """ |
|
706 | """ | |
707 |
|
707 | |||
708 | ids = None |
|
708 | ids = None | |
709 |
|
709 | |||
710 | try: |
|
710 | try: | |
711 | res = ec2resource.create_instances(**config) |
|
711 | res = ec2resource.create_instances(**config) | |
712 |
|
712 | |||
713 | ids = [i.id for i in res] |
|
713 | ids = [i.id for i in res] | |
714 | print('started instances: %s' % ' '.join(ids)) |
|
714 | print('started instances: %s' % ' '.join(ids)) | |
715 |
|
715 | |||
716 | yield res |
|
716 | yield res | |
717 | finally: |
|
717 | finally: | |
718 | if ids: |
|
718 | if ids: | |
719 | print('terminating instances: %s' % ' '.join(ids)) |
|
719 | print('terminating instances: %s' % ' '.join(ids)) | |
720 | for instance in res: |
|
720 | for instance in res: | |
721 | instance.terminate() |
|
721 | instance.terminate() | |
722 | print('terminated %d instances' % len(ids)) |
|
722 | print('terminated %d instances' % len(ids)) | |
723 |
|
723 | |||
724 |
|
724 | |||
725 | @contextlib.contextmanager |
|
725 | @contextlib.contextmanager | |
726 | def create_temp_windows_ec2_instances( |
|
726 | def create_temp_windows_ec2_instances( | |
727 | c: AWSConnection, config, bootstrap: bool = False |
|
727 | c: AWSConnection, config, bootstrap: bool = False | |
728 | ): |
|
728 | ): | |
729 | """Create temporary Windows EC2 instances. |
|
729 | """Create temporary Windows EC2 instances. | |
730 |
|
730 | |||
731 | This is a higher-level wrapper around ``create_temp_ec2_instances()`` that |
|
731 | This is a higher-level wrapper around ``create_temp_ec2_instances()`` that | |
732 | configures the Windows instance for Windows Remote Management. The emitted |
|
732 | configures the Windows instance for Windows Remote Management. The emitted | |
733 | instances will have a ``winrm_client`` attribute containing a |
|
733 | instances will have a ``winrm_client`` attribute containing a | |
734 | ``pypsrp.client.Client`` instance bound to the instance. |
|
734 | ``pypsrp.client.Client`` instance bound to the instance. | |
735 | """ |
|
735 | """ | |
736 | if 'IamInstanceProfile' in config: |
|
736 | if 'IamInstanceProfile' in config: | |
737 | raise ValueError('IamInstanceProfile cannot be provided in config') |
|
737 | raise ValueError('IamInstanceProfile cannot be provided in config') | |
738 | if 'UserData' in config: |
|
738 | if 'UserData' in config: | |
739 | raise ValueError('UserData cannot be provided in config') |
|
739 | raise ValueError('UserData cannot be provided in config') | |
740 |
|
740 | |||
741 | password = c.automation.default_password() |
|
741 | password = c.automation.default_password() | |
742 |
|
742 | |||
743 | config = copy.deepcopy(config) |
|
743 | config = copy.deepcopy(config) | |
744 | config['IamInstanceProfile'] = { |
|
744 | config['IamInstanceProfile'] = { | |
745 | 'Name': 'hg-ephemeral-ec2-1', |
|
745 | 'Name': 'hg-ephemeral-ec2-1', | |
746 | } |
|
746 | } | |
747 | config.setdefault('TagSpecifications', []).append( |
|
747 | config.setdefault('TagSpecifications', []).append( | |
748 | { |
|
748 | { | |
749 | 'ResourceType': 'instance', |
|
749 | 'ResourceType': 'instance', | |
750 | 'Tags': [{'Key': 'Name', 'Value': 'hg-temp-windows'}], |
|
750 | 'Tags': [{'Key': 'Name', 'Value': 'hg-temp-windows'}], | |
751 | } |
|
751 | } | |
752 | ) |
|
752 | ) | |
753 |
|
753 | |||
754 | if bootstrap: |
|
754 | if bootstrap: | |
755 | config['UserData'] = WINDOWS_USER_DATA % password |
|
755 | config['UserData'] = WINDOWS_USER_DATA % password | |
756 |
|
756 | |||
757 | with temporary_ec2_instances(c.ec2resource, config) as instances: |
|
757 | with temporary_ec2_instances(c.ec2resource, config) as instances: | |
758 | wait_for_ip_addresses(instances) |
|
758 | wait_for_ip_addresses(instances) | |
759 |
|
759 | |||
760 | print('waiting for Windows Remote Management service...') |
|
760 | print('waiting for Windows Remote Management service...') | |
761 |
|
761 | |||
762 | for instance in instances: |
|
762 | for instance in instances: | |
763 | client = wait_for_winrm( |
|
763 | client = wait_for_winrm( | |
764 | instance.public_ip_address, 'Administrator', password |
|
764 | instance.public_ip_address, 'Administrator', password | |
765 | ) |
|
765 | ) | |
766 | print('established WinRM connection to %s' % instance.id) |
|
766 | print('established WinRM connection to %s' % instance.id) | |
767 | instance.winrm_client = client |
|
767 | instance.winrm_client = client | |
768 |
|
768 | |||
769 | yield instances |
|
769 | yield instances | |
770 |
|
770 | |||
771 |
|
771 | |||
772 | def resolve_fingerprint(fingerprint): |
|
772 | def resolve_fingerprint(fingerprint): | |
773 | fingerprint = json.dumps(fingerprint, sort_keys=True) |
|
773 | fingerprint = json.dumps(fingerprint, sort_keys=True) | |
774 | return hashlib.sha256(fingerprint.encode('utf-8')).hexdigest() |
|
774 | return hashlib.sha256(fingerprint.encode('utf-8')).hexdigest() | |
775 |
|
775 | |||
776 |
|
776 | |||
777 | def find_and_reconcile_image(ec2resource, name, fingerprint): |
|
777 | def find_and_reconcile_image(ec2resource, name, fingerprint): | |
778 | """Attempt to find an existing EC2 AMI with a name and fingerprint. |
|
778 | """Attempt to find an existing EC2 AMI with a name and fingerprint. | |
779 |
|
779 | |||
780 | If an image with the specified fingerprint is found, it is returned. |
|
780 | If an image with the specified fingerprint is found, it is returned. | |
781 | Otherwise None is returned. |
|
781 | Otherwise None is returned. | |
782 |
|
782 | |||
783 | Existing images for the specified name that don't have the specified |
|
783 | Existing images for the specified name that don't have the specified | |
784 | fingerprint or are missing required metadata or deleted. |
|
784 | fingerprint or are missing required metadata or deleted. | |
785 | """ |
|
785 | """ | |
786 | # Find existing AMIs with this name and delete the ones that are invalid. |
|
786 | # Find existing AMIs with this name and delete the ones that are invalid. | |
787 | # Store a reference to a good image so it can be returned one the |
|
787 | # Store a reference to a good image so it can be returned one the | |
788 | # image state is reconciled. |
|
788 | # image state is reconciled. | |
789 | images = ec2resource.images.filter( |
|
789 | images = ec2resource.images.filter( | |
790 | Filters=[{'Name': 'name', 'Values': [name]}] |
|
790 | Filters=[{'Name': 'name', 'Values': [name]}] | |
791 | ) |
|
791 | ) | |
792 |
|
792 | |||
793 | existing_image = None |
|
793 | existing_image = None | |
794 |
|
794 | |||
795 | for image in images: |
|
795 | for image in images: | |
796 | if image.tags is None: |
|
796 | if image.tags is None: | |
797 | print( |
|
797 | print( | |
798 | 'image %s for %s lacks required tags; removing' |
|
798 | 'image %s for %s lacks required tags; removing' | |
799 | % (image.id, image.name) |
|
799 | % (image.id, image.name) | |
800 | ) |
|
800 | ) | |
801 | remove_ami(ec2resource, image) |
|
801 | remove_ami(ec2resource, image) | |
802 | else: |
|
802 | else: | |
803 | tags = {t['Key']: t['Value'] for t in image.tags} |
|
803 | tags = {t['Key']: t['Value'] for t in image.tags} | |
804 |
|
804 | |||
805 | if tags.get('HGIMAGEFINGERPRINT') == fingerprint: |
|
805 | if tags.get('HGIMAGEFINGERPRINT') == fingerprint: | |
806 | existing_image = image |
|
806 | existing_image = image | |
807 | else: |
|
807 | else: | |
808 | print( |
|
808 | print( | |
809 | 'image %s for %s has wrong fingerprint; removing' |
|
809 | 'image %s for %s has wrong fingerprint; removing' | |
810 | % (image.id, image.name) |
|
810 | % (image.id, image.name) | |
811 | ) |
|
811 | ) | |
812 | remove_ami(ec2resource, image) |
|
812 | remove_ami(ec2resource, image) | |
813 |
|
813 | |||
814 | return existing_image |
|
814 | return existing_image | |
815 |
|
815 | |||
816 |
|
816 | |||
817 | def create_ami_from_instance( |
|
817 | def create_ami_from_instance( | |
818 | ec2client, instance, name, description, fingerprint |
|
818 | ec2client, instance, name, description, fingerprint | |
819 | ): |
|
819 | ): | |
820 | """Create an AMI from a running instance. |
|
820 | """Create an AMI from a running instance. | |
821 |
|
821 | |||
822 | Returns the ``ec2resource.Image`` representing the created AMI. |
|
822 | Returns the ``ec2resource.Image`` representing the created AMI. | |
823 | """ |
|
823 | """ | |
824 | instance.stop() |
|
824 | instance.stop() | |
825 |
|
825 | |||
826 | ec2client.get_waiter('instance_stopped').wait( |
|
826 | ec2client.get_waiter('instance_stopped').wait( | |
827 | InstanceIds=[instance.id], |
|
827 | InstanceIds=[instance.id], | |
828 | WaiterConfig={ |
|
828 | WaiterConfig={ | |
829 | 'Delay': 5, |
|
829 | 'Delay': 5, | |
830 | }, |
|
830 | }, | |
831 | ) |
|
831 | ) | |
832 | print('%s is stopped' % instance.id) |
|
832 | print('%s is stopped' % instance.id) | |
833 |
|
833 | |||
834 | image = instance.create_image( |
|
834 | image = instance.create_image( | |
835 | Name=name, |
|
835 | Name=name, | |
836 | Description=description, |
|
836 | Description=description, | |
837 | ) |
|
837 | ) | |
838 |
|
838 | |||
839 | image.create_tags( |
|
839 | image.create_tags( | |
840 | Tags=[ |
|
840 | Tags=[ | |
841 | { |
|
841 | { | |
842 | 'Key': 'HGIMAGEFINGERPRINT', |
|
842 | 'Key': 'HGIMAGEFINGERPRINT', | |
843 | 'Value': fingerprint, |
|
843 | 'Value': fingerprint, | |
844 | }, |
|
844 | }, | |
845 | ] |
|
845 | ] | |
846 | ) |
|
846 | ) | |
847 |
|
847 | |||
848 | print('waiting for image %s' % image.id) |
|
848 | print('waiting for image %s' % image.id) | |
849 |
|
849 | |||
850 | ec2client.get_waiter('image_available').wait( |
|
850 | ec2client.get_waiter('image_available').wait( | |
851 | ImageIds=[image.id], |
|
851 | ImageIds=[image.id], | |
852 | ) |
|
852 | ) | |
853 |
|
853 | |||
854 | print('image %s available as %s' % (image.id, image.name)) |
|
854 | print('image %s available as %s' % (image.id, image.name)) | |
855 |
|
855 | |||
856 | return image |
|
856 | return image | |
857 |
|
857 | |||
858 |
|
858 | |||
859 | def ensure_linux_dev_ami(c: AWSConnection, distro='debian10', prefix='hg-'): |
|
859 | def ensure_linux_dev_ami(c: AWSConnection, distro='debian10', prefix='hg-'): | |
860 | """Ensures a Linux development AMI is available and up-to-date. |
|
860 | """Ensures a Linux development AMI is available and up-to-date. | |
861 |
|
861 | |||
862 | Returns an ``ec2.Image`` of either an existing AMI or a newly-built one. |
|
862 | Returns an ``ec2.Image`` of either an existing AMI or a newly-built one. | |
863 | """ |
|
863 | """ | |
864 | ec2client = c.ec2client |
|
864 | ec2client = c.ec2client | |
865 | ec2resource = c.ec2resource |
|
865 | ec2resource = c.ec2resource | |
866 |
|
866 | |||
867 | name = '%s%s-%s' % (prefix, 'linux-dev', distro) |
|
867 | name = '%s%s-%s' % (prefix, 'linux-dev', distro) | |
868 |
|
868 | |||
869 | if distro == 'debian9': |
|
869 | if distro == 'debian9': | |
870 | image = find_image( |
|
870 | image = find_image( | |
871 | ec2resource, |
|
871 | ec2resource, | |
872 | DEBIAN_ACCOUNT_ID, |
|
872 | DEBIAN_ACCOUNT_ID, | |
873 | 'debian-stretch-hvm-x86_64-gp2-2019-09-08-17994', |
|
873 | 'debian-stretch-hvm-x86_64-gp2-2019-09-08-17994', | |
874 | ) |
|
874 | ) | |
875 | ssh_username = 'admin' |
|
875 | ssh_username = 'admin' | |
876 | elif distro == 'debian10': |
|
876 | elif distro == 'debian10': | |
877 | image = find_image( |
|
877 | image = find_image( | |
878 | ec2resource, |
|
878 | ec2resource, | |
879 | DEBIAN_ACCOUNT_ID_2, |
|
879 | DEBIAN_ACCOUNT_ID_2, | |
880 | 'debian-10-amd64-20190909-10', |
|
880 | 'debian-10-amd64-20190909-10', | |
881 | ) |
|
881 | ) | |
882 | ssh_username = 'admin' |
|
882 | ssh_username = 'admin' | |
883 | elif distro == 'ubuntu18.04': |
|
883 | elif distro == 'ubuntu18.04': | |
884 | image = find_image( |
|
884 | image = find_image( | |
885 | ec2resource, |
|
885 | ec2resource, | |
886 | UBUNTU_ACCOUNT_ID, |
|
886 | UBUNTU_ACCOUNT_ID, | |
887 | 'ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190918', |
|
887 | 'ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190918', | |
888 | ) |
|
888 | ) | |
889 | ssh_username = 'ubuntu' |
|
889 | ssh_username = 'ubuntu' | |
890 | elif distro == 'ubuntu19.04': |
|
890 | elif distro == 'ubuntu19.04': | |
891 | image = find_image( |
|
891 | image = find_image( | |
892 | ec2resource, |
|
892 | ec2resource, | |
893 | UBUNTU_ACCOUNT_ID, |
|
893 | UBUNTU_ACCOUNT_ID, | |
894 | 'ubuntu/images/hvm-ssd/ubuntu-disco-19.04-amd64-server-20190918', |
|
894 | 'ubuntu/images/hvm-ssd/ubuntu-disco-19.04-amd64-server-20190918', | |
895 | ) |
|
895 | ) | |
896 | ssh_username = 'ubuntu' |
|
896 | ssh_username = 'ubuntu' | |
897 | else: |
|
897 | else: | |
898 | raise ValueError('unsupported Linux distro: %s' % distro) |
|
898 | raise ValueError('unsupported Linux distro: %s' % distro) | |
899 |
|
899 | |||
900 | config = { |
|
900 | config = { | |
901 | 'BlockDeviceMappings': [ |
|
901 | 'BlockDeviceMappings': [ | |
902 | { |
|
902 | { | |
903 | 'DeviceName': image.block_device_mappings[0]['DeviceName'], |
|
903 | 'DeviceName': image.block_device_mappings[0]['DeviceName'], | |
904 | 'Ebs': { |
|
904 | 'Ebs': { | |
905 | 'DeleteOnTermination': True, |
|
905 | 'DeleteOnTermination': True, | |
906 | 'VolumeSize': 10, |
|
906 | 'VolumeSize': 10, | |
907 | 'VolumeType': 'gp2', |
|
907 | 'VolumeType': 'gp2', | |
908 | }, |
|
908 | }, | |
909 | }, |
|
909 | }, | |
910 | ], |
|
910 | ], | |
911 | 'EbsOptimized': True, |
|
911 | 'EbsOptimized': True, | |
912 | 'ImageId': image.id, |
|
912 | 'ImageId': image.id, | |
913 | 'InstanceInitiatedShutdownBehavior': 'stop', |
|
913 | 'InstanceInitiatedShutdownBehavior': 'stop', | |
914 | # 8 VCPUs for compiling Python. |
|
914 | # 8 VCPUs for compiling Python. | |
915 | 'InstanceType': 't3.2xlarge', |
|
915 | 'InstanceType': 't3.2xlarge', | |
916 | 'KeyName': '%sautomation' % prefix, |
|
916 | 'KeyName': '%sautomation' % prefix, | |
917 | 'MaxCount': 1, |
|
917 | 'MaxCount': 1, | |
918 | 'MinCount': 1, |
|
918 | 'MinCount': 1, | |
919 | 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id], |
|
919 | 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id], | |
920 | } |
|
920 | } | |
921 |
|
921 | |||
922 | requirements2_path = ( |
|
922 | requirements2_path = ( | |
923 | pathlib.Path(__file__).parent.parent / 'linux-requirements-py2.txt' |
|
923 | pathlib.Path(__file__).parent.parent / 'linux-requirements-py2.txt' | |
924 | ) |
|
924 | ) | |
925 | requirements3_path = ( |
|
925 | requirements3_path = ( | |
926 | pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.txt' |
|
926 | pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.txt' | |
927 | ) |
|
927 | ) | |
|
928 | requirements35_path = ( | |||
|
929 | pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.5.txt' | |||
|
930 | ) | |||
928 | with requirements2_path.open('r', encoding='utf-8') as fh: |
|
931 | with requirements2_path.open('r', encoding='utf-8') as fh: | |
929 | requirements2 = fh.read() |
|
932 | requirements2 = fh.read() | |
930 | with requirements3_path.open('r', encoding='utf-8') as fh: |
|
933 | with requirements3_path.open('r', encoding='utf-8') as fh: | |
931 | requirements3 = fh.read() |
|
934 | requirements3 = fh.read() | |
|
935 | with requirements35_path.open('r', encoding='utf-8') as fh: | |||
|
936 | requirements35 = fh.read() | |||
932 |
|
937 | |||
933 | # Compute a deterministic fingerprint to determine whether image needs to |
|
938 | # Compute a deterministic fingerprint to determine whether image needs to | |
934 | # be regenerated. |
|
939 | # be regenerated. | |
935 | fingerprint = resolve_fingerprint( |
|
940 | fingerprint = resolve_fingerprint( | |
936 | { |
|
941 | { | |
937 | 'instance_config': config, |
|
942 | 'instance_config': config, | |
938 | 'bootstrap_script': BOOTSTRAP_DEBIAN, |
|
943 | 'bootstrap_script': BOOTSTRAP_DEBIAN, | |
939 | 'requirements_py2': requirements2, |
|
944 | 'requirements_py2': requirements2, | |
940 | 'requirements_py3': requirements3, |
|
945 | 'requirements_py3': requirements3, | |
|
946 | 'requirements_py35': requirements35, | |||
941 | } |
|
947 | } | |
942 | ) |
|
948 | ) | |
943 |
|
949 | |||
944 | existing_image = find_and_reconcile_image(ec2resource, name, fingerprint) |
|
950 | existing_image = find_and_reconcile_image(ec2resource, name, fingerprint) | |
945 |
|
951 | |||
946 | if existing_image: |
|
952 | if existing_image: | |
947 | return existing_image |
|
953 | return existing_image | |
948 |
|
954 | |||
949 | print('no suitable %s image found; creating one...' % name) |
|
955 | print('no suitable %s image found; creating one...' % name) | |
950 |
|
956 | |||
951 | with temporary_ec2_instances(ec2resource, config) as instances: |
|
957 | with temporary_ec2_instances(ec2resource, config) as instances: | |
952 | wait_for_ip_addresses(instances) |
|
958 | wait_for_ip_addresses(instances) | |
953 |
|
959 | |||
954 | instance = instances[0] |
|
960 | instance = instances[0] | |
955 |
|
961 | |||
956 | client = wait_for_ssh( |
|
962 | client = wait_for_ssh( | |
957 | instance.public_ip_address, |
|
963 | instance.public_ip_address, | |
958 | 22, |
|
964 | 22, | |
959 | username=ssh_username, |
|
965 | username=ssh_username, | |
960 | key_filename=str(c.key_pair_path_private('automation')), |
|
966 | key_filename=str(c.key_pair_path_private('automation')), | |
961 | ) |
|
967 | ) | |
962 |
|
968 | |||
963 | home = '/home/%s' % ssh_username |
|
969 | home = '/home/%s' % ssh_username | |
964 |
|
970 | |||
965 | with client: |
|
971 | with client: | |
966 | print('connecting to SSH server') |
|
972 | print('connecting to SSH server') | |
967 | sftp = client.open_sftp() |
|
973 | sftp = client.open_sftp() | |
968 |
|
974 | |||
969 | print('uploading bootstrap files') |
|
975 | print('uploading bootstrap files') | |
970 | with sftp.open('%s/bootstrap' % home, 'wb') as fh: |
|
976 | with sftp.open('%s/bootstrap' % home, 'wb') as fh: | |
971 | fh.write(BOOTSTRAP_DEBIAN) |
|
977 | fh.write(BOOTSTRAP_DEBIAN) | |
972 | fh.chmod(0o0700) |
|
978 | fh.chmod(0o0700) | |
973 |
|
979 | |||
974 | with sftp.open('%s/requirements-py2.txt' % home, 'wb') as fh: |
|
980 | with sftp.open('%s/requirements-py2.txt' % home, 'wb') as fh: | |
975 | fh.write(requirements2) |
|
981 | fh.write(requirements2) | |
976 | fh.chmod(0o0700) |
|
982 | fh.chmod(0o0700) | |
977 |
|
983 | |||
978 | with sftp.open('%s/requirements-py3.txt' % home, 'wb') as fh: |
|
984 | with sftp.open('%s/requirements-py3.txt' % home, 'wb') as fh: | |
979 | fh.write(requirements3) |
|
985 | fh.write(requirements3) | |
980 | fh.chmod(0o0700) |
|
986 | fh.chmod(0o0700) | |
981 |
|
987 | |||
|
988 | with sftp.open('%s/requirements-py3.5.txt' % home, 'wb') as fh: | |||
|
989 | fh.write(requirements35) | |||
|
990 | fh.chmod(0o0700) | |||
|
991 | ||||
982 | print('executing bootstrap') |
|
992 | print('executing bootstrap') | |
983 | chan, stdin, stdout = ssh_exec_command( |
|
993 | chan, stdin, stdout = ssh_exec_command( | |
984 | client, '%s/bootstrap' % home |
|
994 | client, '%s/bootstrap' % home | |
985 | ) |
|
995 | ) | |
986 | stdin.close() |
|
996 | stdin.close() | |
987 |
|
997 | |||
988 | for line in stdout: |
|
998 | for line in stdout: | |
989 | print(line, end='') |
|
999 | print(line, end='') | |
990 |
|
1000 | |||
991 | res = chan.recv_exit_status() |
|
1001 | res = chan.recv_exit_status() | |
992 | if res: |
|
1002 | if res: | |
993 | raise Exception('non-0 exit from bootstrap: %d' % res) |
|
1003 | raise Exception('non-0 exit from bootstrap: %d' % res) | |
994 |
|
1004 | |||
995 | print( |
|
1005 | print( | |
996 | 'bootstrap completed; stopping %s to create %s' |
|
1006 | 'bootstrap completed; stopping %s to create %s' | |
997 | % (instance.id, name) |
|
1007 | % (instance.id, name) | |
998 | ) |
|
1008 | ) | |
999 |
|
1009 | |||
1000 | return create_ami_from_instance( |
|
1010 | return create_ami_from_instance( | |
1001 | ec2client, |
|
1011 | ec2client, | |
1002 | instance, |
|
1012 | instance, | |
1003 | name, |
|
1013 | name, | |
1004 | 'Mercurial Linux development environment', |
|
1014 | 'Mercurial Linux development environment', | |
1005 | fingerprint, |
|
1015 | fingerprint, | |
1006 | ) |
|
1016 | ) | |
1007 |
|
1017 | |||
1008 |
|
1018 | |||
1009 | @contextlib.contextmanager |
|
1019 | @contextlib.contextmanager | |
1010 | def temporary_linux_dev_instances( |
|
1020 | def temporary_linux_dev_instances( | |
1011 | c: AWSConnection, |
|
1021 | c: AWSConnection, | |
1012 | image, |
|
1022 | image, | |
1013 | instance_type, |
|
1023 | instance_type, | |
1014 | prefix='hg-', |
|
1024 | prefix='hg-', | |
1015 | ensure_extra_volume=False, |
|
1025 | ensure_extra_volume=False, | |
1016 | ): |
|
1026 | ): | |
1017 | """Create temporary Linux development EC2 instances. |
|
1027 | """Create temporary Linux development EC2 instances. | |
1018 |
|
1028 | |||
1019 | Context manager resolves to a list of ``ec2.Instance`` that were created |
|
1029 | Context manager resolves to a list of ``ec2.Instance`` that were created | |
1020 | and are running. |
|
1030 | and are running. | |
1021 |
|
1031 | |||
1022 | ``ensure_extra_volume`` can be set to ``True`` to require that instances |
|
1032 | ``ensure_extra_volume`` can be set to ``True`` to require that instances | |
1023 | have a 2nd storage volume available other than the primary AMI volume. |
|
1033 | have a 2nd storage volume available other than the primary AMI volume. | |
1024 | For instance types with instance storage, this does nothing special. |
|
1034 | For instance types with instance storage, this does nothing special. | |
1025 | But for instance types without instance storage, an additional EBS volume |
|
1035 | But for instance types without instance storage, an additional EBS volume | |
1026 | will be added to the instance. |
|
1036 | will be added to the instance. | |
1027 |
|
1037 | |||
1028 | Instances have an ``ssh_client`` attribute containing a paramiko SSHClient |
|
1038 | Instances have an ``ssh_client`` attribute containing a paramiko SSHClient | |
1029 | instance bound to the instance. |
|
1039 | instance bound to the instance. | |
1030 |
|
1040 | |||
1031 | Instances have an ``ssh_private_key_path`` attributing containing the |
|
1041 | Instances have an ``ssh_private_key_path`` attributing containing the | |
1032 | str path to the SSH private key to connect to the instance. |
|
1042 | str path to the SSH private key to connect to the instance. | |
1033 | """ |
|
1043 | """ | |
1034 |
|
1044 | |||
1035 | block_device_mappings = [ |
|
1045 | block_device_mappings = [ | |
1036 | { |
|
1046 | { | |
1037 | 'DeviceName': image.block_device_mappings[0]['DeviceName'], |
|
1047 | 'DeviceName': image.block_device_mappings[0]['DeviceName'], | |
1038 | 'Ebs': { |
|
1048 | 'Ebs': { | |
1039 | 'DeleteOnTermination': True, |
|
1049 | 'DeleteOnTermination': True, | |
1040 | 'VolumeSize': 12, |
|
1050 | 'VolumeSize': 12, | |
1041 | 'VolumeType': 'gp2', |
|
1051 | 'VolumeType': 'gp2', | |
1042 | }, |
|
1052 | }, | |
1043 | } |
|
1053 | } | |
1044 | ] |
|
1054 | ] | |
1045 |
|
1055 | |||
1046 | # This is not an exhaustive list of instance types having instance storage. |
|
1056 | # This is not an exhaustive list of instance types having instance storage. | |
1047 | # But |
|
1057 | # But | |
1048 | if ensure_extra_volume and not instance_type.startswith( |
|
1058 | if ensure_extra_volume and not instance_type.startswith( | |
1049 | tuple(INSTANCE_TYPES_WITH_STORAGE) |
|
1059 | tuple(INSTANCE_TYPES_WITH_STORAGE) | |
1050 | ): |
|
1060 | ): | |
1051 | main_device = block_device_mappings[0]['DeviceName'] |
|
1061 | main_device = block_device_mappings[0]['DeviceName'] | |
1052 |
|
1062 | |||
1053 | if main_device == 'xvda': |
|
1063 | if main_device == 'xvda': | |
1054 | second_device = 'xvdb' |
|
1064 | second_device = 'xvdb' | |
1055 | elif main_device == '/dev/sda1': |
|
1065 | elif main_device == '/dev/sda1': | |
1056 | second_device = '/dev/sdb' |
|
1066 | second_device = '/dev/sdb' | |
1057 | else: |
|
1067 | else: | |
1058 | raise ValueError( |
|
1068 | raise ValueError( | |
1059 | 'unhandled primary EBS device name: %s' % main_device |
|
1069 | 'unhandled primary EBS device name: %s' % main_device | |
1060 | ) |
|
1070 | ) | |
1061 |
|
1071 | |||
1062 | block_device_mappings.append( |
|
1072 | block_device_mappings.append( | |
1063 | { |
|
1073 | { | |
1064 | 'DeviceName': second_device, |
|
1074 | 'DeviceName': second_device, | |
1065 | 'Ebs': { |
|
1075 | 'Ebs': { | |
1066 | 'DeleteOnTermination': True, |
|
1076 | 'DeleteOnTermination': True, | |
1067 | 'VolumeSize': 8, |
|
1077 | 'VolumeSize': 8, | |
1068 | 'VolumeType': 'gp2', |
|
1078 | 'VolumeType': 'gp2', | |
1069 | }, |
|
1079 | }, | |
1070 | } |
|
1080 | } | |
1071 | ) |
|
1081 | ) | |
1072 |
|
1082 | |||
1073 | config = { |
|
1083 | config = { | |
1074 | 'BlockDeviceMappings': block_device_mappings, |
|
1084 | 'BlockDeviceMappings': block_device_mappings, | |
1075 | 'EbsOptimized': True, |
|
1085 | 'EbsOptimized': True, | |
1076 | 'ImageId': image.id, |
|
1086 | 'ImageId': image.id, | |
1077 | 'InstanceInitiatedShutdownBehavior': 'terminate', |
|
1087 | 'InstanceInitiatedShutdownBehavior': 'terminate', | |
1078 | 'InstanceType': instance_type, |
|
1088 | 'InstanceType': instance_type, | |
1079 | 'KeyName': '%sautomation' % prefix, |
|
1089 | 'KeyName': '%sautomation' % prefix, | |
1080 | 'MaxCount': 1, |
|
1090 | 'MaxCount': 1, | |
1081 | 'MinCount': 1, |
|
1091 | 'MinCount': 1, | |
1082 | 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id], |
|
1092 | 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id], | |
1083 | } |
|
1093 | } | |
1084 |
|
1094 | |||
1085 | with temporary_ec2_instances(c.ec2resource, config) as instances: |
|
1095 | with temporary_ec2_instances(c.ec2resource, config) as instances: | |
1086 | wait_for_ip_addresses(instances) |
|
1096 | wait_for_ip_addresses(instances) | |
1087 |
|
1097 | |||
1088 | ssh_private_key_path = str(c.key_pair_path_private('automation')) |
|
1098 | ssh_private_key_path = str(c.key_pair_path_private('automation')) | |
1089 |
|
1099 | |||
1090 | for instance in instances: |
|
1100 | for instance in instances: | |
1091 | client = wait_for_ssh( |
|
1101 | client = wait_for_ssh( | |
1092 | instance.public_ip_address, |
|
1102 | instance.public_ip_address, | |
1093 | 22, |
|
1103 | 22, | |
1094 | username='hg', |
|
1104 | username='hg', | |
1095 | key_filename=ssh_private_key_path, |
|
1105 | key_filename=ssh_private_key_path, | |
1096 | ) |
|
1106 | ) | |
1097 |
|
1107 | |||
1098 | instance.ssh_client = client |
|
1108 | instance.ssh_client = client | |
1099 | instance.ssh_private_key_path = ssh_private_key_path |
|
1109 | instance.ssh_private_key_path = ssh_private_key_path | |
1100 |
|
1110 | |||
1101 | try: |
|
1111 | try: | |
1102 | yield instances |
|
1112 | yield instances | |
1103 | finally: |
|
1113 | finally: | |
1104 | for instance in instances: |
|
1114 | for instance in instances: | |
1105 | instance.ssh_client.close() |
|
1115 | instance.ssh_client.close() | |
1106 |
|
1116 | |||
1107 |
|
1117 | |||
1108 | def ensure_windows_dev_ami( |
|
1118 | def ensure_windows_dev_ami( | |
1109 | c: AWSConnection, |
|
1119 | c: AWSConnection, | |
1110 | prefix='hg-', |
|
1120 | prefix='hg-', | |
1111 | base_image_name=WINDOWS_BASE_IMAGE_NAME, |
|
1121 | base_image_name=WINDOWS_BASE_IMAGE_NAME, | |
1112 | ): |
|
1122 | ): | |
1113 | """Ensure Windows Development AMI is available and up-to-date. |
|
1123 | """Ensure Windows Development AMI is available and up-to-date. | |
1114 |
|
1124 | |||
1115 | If necessary, a modern AMI will be built by starting a temporary EC2 |
|
1125 | If necessary, a modern AMI will be built by starting a temporary EC2 | |
1116 | instance and bootstrapping it. |
|
1126 | instance and bootstrapping it. | |
1117 |
|
1127 | |||
1118 | Obsolete AMIs will be deleted so there is only a single AMI having the |
|
1128 | Obsolete AMIs will be deleted so there is only a single AMI having the | |
1119 | desired name. |
|
1129 | desired name. | |
1120 |
|
1130 | |||
1121 | Returns an ``ec2.Image`` of either an existing AMI or a newly-built |
|
1131 | Returns an ``ec2.Image`` of either an existing AMI or a newly-built | |
1122 | one. |
|
1132 | one. | |
1123 | """ |
|
1133 | """ | |
1124 | ec2client = c.ec2client |
|
1134 | ec2client = c.ec2client | |
1125 | ec2resource = c.ec2resource |
|
1135 | ec2resource = c.ec2resource | |
1126 | ssmclient = c.session.client('ssm') |
|
1136 | ssmclient = c.session.client('ssm') | |
1127 |
|
1137 | |||
1128 | name = '%s%s' % (prefix, 'windows-dev') |
|
1138 | name = '%s%s' % (prefix, 'windows-dev') | |
1129 |
|
1139 | |||
1130 | image = find_image( |
|
1140 | image = find_image( | |
1131 | ec2resource, |
|
1141 | ec2resource, | |
1132 | AMAZON_ACCOUNT_ID, |
|
1142 | AMAZON_ACCOUNT_ID, | |
1133 | base_image_name, |
|
1143 | base_image_name, | |
1134 | reverse_sort_field="name", |
|
1144 | reverse_sort_field="name", | |
1135 | ) |
|
1145 | ) | |
1136 |
|
1146 | |||
1137 | config = { |
|
1147 | config = { | |
1138 | 'BlockDeviceMappings': [ |
|
1148 | 'BlockDeviceMappings': [ | |
1139 | { |
|
1149 | { | |
1140 | 'DeviceName': '/dev/sda1', |
|
1150 | 'DeviceName': '/dev/sda1', | |
1141 | 'Ebs': { |
|
1151 | 'Ebs': { | |
1142 | 'DeleteOnTermination': True, |
|
1152 | 'DeleteOnTermination': True, | |
1143 | 'VolumeSize': 32, |
|
1153 | 'VolumeSize': 32, | |
1144 | 'VolumeType': 'gp2', |
|
1154 | 'VolumeType': 'gp2', | |
1145 | }, |
|
1155 | }, | |
1146 | } |
|
1156 | } | |
1147 | ], |
|
1157 | ], | |
1148 | 'ImageId': image.id, |
|
1158 | 'ImageId': image.id, | |
1149 | 'InstanceInitiatedShutdownBehavior': 'stop', |
|
1159 | 'InstanceInitiatedShutdownBehavior': 'stop', | |
1150 | 'InstanceType': 't3.medium', |
|
1160 | 'InstanceType': 't3.medium', | |
1151 | 'KeyName': '%sautomation' % prefix, |
|
1161 | 'KeyName': '%sautomation' % prefix, | |
1152 | 'MaxCount': 1, |
|
1162 | 'MaxCount': 1, | |
1153 | 'MinCount': 1, |
|
1163 | 'MinCount': 1, | |
1154 | 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id], |
|
1164 | 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id], | |
1155 | } |
|
1165 | } | |
1156 |
|
1166 | |||
1157 | commands = [ |
|
1167 | commands = [ | |
1158 | # Need to start the service so sshd_config is generated. |
|
1168 | # Need to start the service so sshd_config is generated. | |
1159 | 'Start-Service sshd', |
|
1169 | 'Start-Service sshd', | |
1160 | 'Write-Output "modifying sshd_config"', |
|
1170 | 'Write-Output "modifying sshd_config"', | |
1161 | r'$content = Get-Content C:\ProgramData\ssh\sshd_config', |
|
1171 | r'$content = Get-Content C:\ProgramData\ssh\sshd_config', | |
1162 | '$content = $content -replace "Match Group administrators","" -replace "AuthorizedKeysFile __PROGRAMDATA__/ssh/administrators_authorized_keys",""', |
|
1172 | '$content = $content -replace "Match Group administrators","" -replace "AuthorizedKeysFile __PROGRAMDATA__/ssh/administrators_authorized_keys",""', | |
1163 | r'$content | Set-Content C:\ProgramData\ssh\sshd_config', |
|
1173 | r'$content | Set-Content C:\ProgramData\ssh\sshd_config', | |
1164 | 'Import-Module OpenSSHUtils', |
|
1174 | 'Import-Module OpenSSHUtils', | |
1165 | r'Repair-SshdConfigPermission C:\ProgramData\ssh\sshd_config -Confirm:$false', |
|
1175 | r'Repair-SshdConfigPermission C:\ProgramData\ssh\sshd_config -Confirm:$false', | |
1166 | 'Restart-Service sshd', |
|
1176 | 'Restart-Service sshd', | |
1167 | 'Write-Output "installing OpenSSL client"', |
|
1177 | 'Write-Output "installing OpenSSL client"', | |
1168 | 'Add-WindowsCapability -Online -Name OpenSSH.Client~~~~0.0.1.0', |
|
1178 | 'Add-WindowsCapability -Online -Name OpenSSH.Client~~~~0.0.1.0', | |
1169 | 'Set-Service -Name sshd -StartupType "Automatic"', |
|
1179 | 'Set-Service -Name sshd -StartupType "Automatic"', | |
1170 | 'Write-Output "OpenSSH server running"', |
|
1180 | 'Write-Output "OpenSSH server running"', | |
1171 | ] |
|
1181 | ] | |
1172 |
|
1182 | |||
1173 | with INSTALL_WINDOWS_DEPENDENCIES.open('r', encoding='utf-8') as fh: |
|
1183 | with INSTALL_WINDOWS_DEPENDENCIES.open('r', encoding='utf-8') as fh: | |
1174 | commands.extend(l.rstrip() for l in fh) |
|
1184 | commands.extend(l.rstrip() for l in fh) | |
1175 |
|
1185 | |||
1176 | # Schedule run of EC2Launch on next boot. This ensures that UserData |
|
1186 | # Schedule run of EC2Launch on next boot. This ensures that UserData | |
1177 | # is executed. |
|
1187 | # is executed. | |
1178 | # We disable setComputerName because it forces a reboot. |
|
1188 | # We disable setComputerName because it forces a reboot. | |
1179 | # We set an explicit admin password because this causes UserData to run |
|
1189 | # We set an explicit admin password because this causes UserData to run | |
1180 | # as Administrator instead of System. |
|
1190 | # as Administrator instead of System. | |
1181 | commands.extend( |
|
1191 | commands.extend( | |
1182 | [ |
|
1192 | [ | |
1183 | r'''Set-Content -Path C:\ProgramData\Amazon\EC2-Windows\Launch\Config\LaunchConfig.json ''' |
|
1193 | r'''Set-Content -Path C:\ProgramData\Amazon\EC2-Windows\Launch\Config\LaunchConfig.json ''' | |
1184 | r'''-Value '{"setComputerName": false, "setWallpaper": true, "addDnsSuffixList": true, ''' |
|
1194 | r'''-Value '{"setComputerName": false, "setWallpaper": true, "addDnsSuffixList": true, ''' | |
1185 | r'''"extendBootVolumeSize": true, "handleUserData": true, ''' |
|
1195 | r'''"extendBootVolumeSize": true, "handleUserData": true, ''' | |
1186 | r'''"adminPasswordType": "Specify", "adminPassword": "%s"}' ''' |
|
1196 | r'''"adminPasswordType": "Specify", "adminPassword": "%s"}' ''' | |
1187 | % c.automation.default_password(), |
|
1197 | % c.automation.default_password(), | |
1188 | r'C:\ProgramData\Amazon\EC2-Windows\Launch\Scripts\InitializeInstance.ps1 ' |
|
1198 | r'C:\ProgramData\Amazon\EC2-Windows\Launch\Scripts\InitializeInstance.ps1 ' | |
1189 | r'–Schedule', |
|
1199 | r'–Schedule', | |
1190 | ] |
|
1200 | ] | |
1191 | ) |
|
1201 | ) | |
1192 |
|
1202 | |||
1193 | # Disable Windows Defender when bootstrapping because it just slows |
|
1203 | # Disable Windows Defender when bootstrapping because it just slows | |
1194 | # things down. |
|
1204 | # things down. | |
1195 | commands.insert(0, 'Set-MpPreference -DisableRealtimeMonitoring $true') |
|
1205 | commands.insert(0, 'Set-MpPreference -DisableRealtimeMonitoring $true') | |
1196 | commands.append('Set-MpPreference -DisableRealtimeMonitoring $false') |
|
1206 | commands.append('Set-MpPreference -DisableRealtimeMonitoring $false') | |
1197 |
|
1207 | |||
1198 | # Compute a deterministic fingerprint to determine whether image needs |
|
1208 | # Compute a deterministic fingerprint to determine whether image needs | |
1199 | # to be regenerated. |
|
1209 | # to be regenerated. | |
1200 | fingerprint = resolve_fingerprint( |
|
1210 | fingerprint = resolve_fingerprint( | |
1201 | { |
|
1211 | { | |
1202 | 'instance_config': config, |
|
1212 | 'instance_config': config, | |
1203 | 'user_data': WINDOWS_USER_DATA, |
|
1213 | 'user_data': WINDOWS_USER_DATA, | |
1204 | 'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL, |
|
1214 | 'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL, | |
1205 | 'bootstrap_commands': commands, |
|
1215 | 'bootstrap_commands': commands, | |
1206 | 'base_image_name': base_image_name, |
|
1216 | 'base_image_name': base_image_name, | |
1207 | } |
|
1217 | } | |
1208 | ) |
|
1218 | ) | |
1209 |
|
1219 | |||
1210 | existing_image = find_and_reconcile_image(ec2resource, name, fingerprint) |
|
1220 | existing_image = find_and_reconcile_image(ec2resource, name, fingerprint) | |
1211 |
|
1221 | |||
1212 | if existing_image: |
|
1222 | if existing_image: | |
1213 | return existing_image |
|
1223 | return existing_image | |
1214 |
|
1224 | |||
1215 | print('no suitable Windows development image found; creating one...') |
|
1225 | print('no suitable Windows development image found; creating one...') | |
1216 |
|
1226 | |||
1217 | with create_temp_windows_ec2_instances( |
|
1227 | with create_temp_windows_ec2_instances( | |
1218 | c, config, bootstrap=True |
|
1228 | c, config, bootstrap=True | |
1219 | ) as instances: |
|
1229 | ) as instances: | |
1220 | assert len(instances) == 1 |
|
1230 | assert len(instances) == 1 | |
1221 | instance = instances[0] |
|
1231 | instance = instances[0] | |
1222 |
|
1232 | |||
1223 | wait_for_ssm(ssmclient, [instance]) |
|
1233 | wait_for_ssm(ssmclient, [instance]) | |
1224 |
|
1234 | |||
1225 | # On first boot, install various Windows updates. |
|
1235 | # On first boot, install various Windows updates. | |
1226 | # We would ideally use PowerShell Remoting for this. However, there are |
|
1236 | # We would ideally use PowerShell Remoting for this. However, there are | |
1227 | # trust issues that make it difficult to invoke Windows Update |
|
1237 | # trust issues that make it difficult to invoke Windows Update | |
1228 | # remotely. So we use SSM, which has a mechanism for running Windows |
|
1238 | # remotely. So we use SSM, which has a mechanism for running Windows | |
1229 | # Update. |
|
1239 | # Update. | |
1230 | print('installing Windows features...') |
|
1240 | print('installing Windows features...') | |
1231 | run_ssm_command( |
|
1241 | run_ssm_command( | |
1232 | ssmclient, |
|
1242 | ssmclient, | |
1233 | [instance], |
|
1243 | [instance], | |
1234 | 'AWS-RunPowerShellScript', |
|
1244 | 'AWS-RunPowerShellScript', | |
1235 | { |
|
1245 | { | |
1236 | 'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'), |
|
1246 | 'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'), | |
1237 | }, |
|
1247 | }, | |
1238 | ) |
|
1248 | ) | |
1239 |
|
1249 | |||
1240 | # Reboot so all updates are fully applied. |
|
1250 | # Reboot so all updates are fully applied. | |
1241 | # |
|
1251 | # | |
1242 | # We don't use instance.reboot() here because it is asynchronous and |
|
1252 | # We don't use instance.reboot() here because it is asynchronous and | |
1243 | # we don't know when exactly the instance has rebooted. It could take |
|
1253 | # we don't know when exactly the instance has rebooted. It could take | |
1244 | # a while to stop and we may start trying to interact with the instance |
|
1254 | # a while to stop and we may start trying to interact with the instance | |
1245 | # before it has rebooted. |
|
1255 | # before it has rebooted. | |
1246 | print('rebooting instance %s' % instance.id) |
|
1256 | print('rebooting instance %s' % instance.id) | |
1247 | instance.stop() |
|
1257 | instance.stop() | |
1248 | ec2client.get_waiter('instance_stopped').wait( |
|
1258 | ec2client.get_waiter('instance_stopped').wait( | |
1249 | InstanceIds=[instance.id], |
|
1259 | InstanceIds=[instance.id], | |
1250 | WaiterConfig={ |
|
1260 | WaiterConfig={ | |
1251 | 'Delay': 5, |
|
1261 | 'Delay': 5, | |
1252 | }, |
|
1262 | }, | |
1253 | ) |
|
1263 | ) | |
1254 |
|
1264 | |||
1255 | instance.start() |
|
1265 | instance.start() | |
1256 | wait_for_ip_addresses([instance]) |
|
1266 | wait_for_ip_addresses([instance]) | |
1257 |
|
1267 | |||
1258 | # There is a race condition here between the User Data PS script running |
|
1268 | # There is a race condition here between the User Data PS script running | |
1259 | # and us connecting to WinRM. This can manifest as |
|
1269 | # and us connecting to WinRM. This can manifest as | |
1260 | # "AuthorizationManager check failed" failures during run_powershell(). |
|
1270 | # "AuthorizationManager check failed" failures during run_powershell(). | |
1261 | # TODO figure out a workaround. |
|
1271 | # TODO figure out a workaround. | |
1262 |
|
1272 | |||
1263 | print('waiting for Windows Remote Management to come back...') |
|
1273 | print('waiting for Windows Remote Management to come back...') | |
1264 | client = wait_for_winrm( |
|
1274 | client = wait_for_winrm( | |
1265 | instance.public_ip_address, |
|
1275 | instance.public_ip_address, | |
1266 | 'Administrator', |
|
1276 | 'Administrator', | |
1267 | c.automation.default_password(), |
|
1277 | c.automation.default_password(), | |
1268 | ) |
|
1278 | ) | |
1269 | print('established WinRM connection to %s' % instance.id) |
|
1279 | print('established WinRM connection to %s' % instance.id) | |
1270 | instance.winrm_client = client |
|
1280 | instance.winrm_client = client | |
1271 |
|
1281 | |||
1272 | print('bootstrapping instance...') |
|
1282 | print('bootstrapping instance...') | |
1273 | run_powershell(instance.winrm_client, '\n'.join(commands)) |
|
1283 | run_powershell(instance.winrm_client, '\n'.join(commands)) | |
1274 |
|
1284 | |||
1275 | print('bootstrap completed; stopping %s to create image' % instance.id) |
|
1285 | print('bootstrap completed; stopping %s to create image' % instance.id) | |
1276 | return create_ami_from_instance( |
|
1286 | return create_ami_from_instance( | |
1277 | ec2client, |
|
1287 | ec2client, | |
1278 | instance, |
|
1288 | instance, | |
1279 | name, |
|
1289 | name, | |
1280 | 'Mercurial Windows development environment', |
|
1290 | 'Mercurial Windows development environment', | |
1281 | fingerprint, |
|
1291 | fingerprint, | |
1282 | ) |
|
1292 | ) | |
1283 |
|
1293 | |||
1284 |
|
1294 | |||
1285 | @contextlib.contextmanager |
|
1295 | @contextlib.contextmanager | |
1286 | def temporary_windows_dev_instances( |
|
1296 | def temporary_windows_dev_instances( | |
1287 | c: AWSConnection, |
|
1297 | c: AWSConnection, | |
1288 | image, |
|
1298 | image, | |
1289 | instance_type, |
|
1299 | instance_type, | |
1290 | prefix='hg-', |
|
1300 | prefix='hg-', | |
1291 | disable_antivirus=False, |
|
1301 | disable_antivirus=False, | |
1292 | ): |
|
1302 | ): | |
1293 | """Create a temporary Windows development EC2 instance. |
|
1303 | """Create a temporary Windows development EC2 instance. | |
1294 |
|
1304 | |||
1295 | Context manager resolves to the list of ``EC2.Instance`` that were created. |
|
1305 | Context manager resolves to the list of ``EC2.Instance`` that were created. | |
1296 | """ |
|
1306 | """ | |
1297 | config = { |
|
1307 | config = { | |
1298 | 'BlockDeviceMappings': [ |
|
1308 | 'BlockDeviceMappings': [ | |
1299 | { |
|
1309 | { | |
1300 | 'DeviceName': '/dev/sda1', |
|
1310 | 'DeviceName': '/dev/sda1', | |
1301 | 'Ebs': { |
|
1311 | 'Ebs': { | |
1302 | 'DeleteOnTermination': True, |
|
1312 | 'DeleteOnTermination': True, | |
1303 | 'VolumeSize': 32, |
|
1313 | 'VolumeSize': 32, | |
1304 | 'VolumeType': 'gp2', |
|
1314 | 'VolumeType': 'gp2', | |
1305 | }, |
|
1315 | }, | |
1306 | } |
|
1316 | } | |
1307 | ], |
|
1317 | ], | |
1308 | 'ImageId': image.id, |
|
1318 | 'ImageId': image.id, | |
1309 | 'InstanceInitiatedShutdownBehavior': 'stop', |
|
1319 | 'InstanceInitiatedShutdownBehavior': 'stop', | |
1310 | 'InstanceType': instance_type, |
|
1320 | 'InstanceType': instance_type, | |
1311 | 'KeyName': '%sautomation' % prefix, |
|
1321 | 'KeyName': '%sautomation' % prefix, | |
1312 | 'MaxCount': 1, |
|
1322 | 'MaxCount': 1, | |
1313 | 'MinCount': 1, |
|
1323 | 'MinCount': 1, | |
1314 | 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id], |
|
1324 | 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id], | |
1315 | } |
|
1325 | } | |
1316 |
|
1326 | |||
1317 | with create_temp_windows_ec2_instances(c, config) as instances: |
|
1327 | with create_temp_windows_ec2_instances(c, config) as instances: | |
1318 | if disable_antivirus: |
|
1328 | if disable_antivirus: | |
1319 | for instance in instances: |
|
1329 | for instance in instances: | |
1320 | run_powershell( |
|
1330 | run_powershell( | |
1321 | instance.winrm_client, |
|
1331 | instance.winrm_client, | |
1322 | 'Set-MpPreference -DisableRealtimeMonitoring $true', |
|
1332 | 'Set-MpPreference -DisableRealtimeMonitoring $true', | |
1323 | ) |
|
1333 | ) | |
1324 |
|
1334 | |||
1325 | yield instances |
|
1335 | yield instances |
@@ -1,597 +1,621 b'' | |||||
1 | # linux.py - Linux specific automation functionality |
|
1 | # linux.py - Linux specific automation functionality | |
2 | # |
|
2 | # | |
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | # no-check-code because Python 3 native. |
|
8 | # no-check-code because Python 3 native. | |
9 |
|
9 | |||
10 | import os |
|
10 | import os | |
11 | import pathlib |
|
11 | import pathlib | |
12 | import shlex |
|
12 | import shlex | |
13 | import subprocess |
|
13 | import subprocess | |
14 | import tempfile |
|
14 | import tempfile | |
15 |
|
15 | |||
16 | from .ssh import exec_command |
|
16 | from .ssh import exec_command | |
17 |
|
17 | |||
18 |
|
18 | |||
19 | # Linux distributions that are supported. |
|
19 | # Linux distributions that are supported. | |
20 | DISTROS = { |
|
20 | DISTROS = { | |
21 | 'debian9', |
|
21 | 'debian9', | |
22 | 'debian10', |
|
22 | 'debian10', | |
23 | 'ubuntu18.04', |
|
23 | 'ubuntu18.04', | |
24 | 'ubuntu19.04', |
|
24 | 'ubuntu19.04', | |
25 | } |
|
25 | } | |
26 |
|
26 | |||
27 | INSTALL_PYTHONS = r''' |
|
27 | INSTALL_PYTHONS = r''' | |
28 | PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0" |
|
28 | PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0" | |
29 |
PYENV3_VERSIONS="3.5.10 3.6.1 |
|
29 | PYENV3_VERSIONS="3.5.10 3.6.13 3.7.10 3.8.10 3.9.5 pypy3.5-7.0.0 pypy3.6-7.3.3 pypy3.7-7.3.3" | |
30 |
|
30 | |||
31 | git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv |
|
31 | git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv | |
32 | pushd /hgdev/pyenv |
|
32 | pushd /hgdev/pyenv | |
33 | git checkout 8ac91b4fd678a8c04356f5ec85cfcd565c265e9a |
|
33 | git checkout 328fd42c3a2fbf14ae46dae2021a087fe27ba7e2 | |
34 | popd |
|
34 | popd | |
35 |
|
35 | |||
36 | export PYENV_ROOT="/hgdev/pyenv" |
|
36 | export PYENV_ROOT="/hgdev/pyenv" | |
37 | export PATH="$PYENV_ROOT/bin:$PATH" |
|
37 | export PATH="$PYENV_ROOT/bin:$PATH" | |
38 |
|
38 | |||
39 | # pip 19.2.3. |
|
39 | # pip 19.2.3. | |
40 | PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe |
|
40 | PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe | |
41 | wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py |
|
41 | wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py | |
42 | echo "${PIP_SHA256} get-pip.py" | sha256sum --check - |
|
42 | echo "${PIP_SHA256} get-pip.py" | sha256sum --check - | |
43 |
|
43 | |||
44 | VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2 |
|
44 | VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2 | |
45 | VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz |
|
45 | VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz | |
46 | wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL} |
|
46 | wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL} | |
47 | echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check - |
|
47 | echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check - | |
48 |
|
48 | |||
49 | for v in ${PYENV2_VERSIONS}; do |
|
49 | for v in ${PYENV2_VERSIONS}; do | |
50 | pyenv install -v ${v} |
|
50 | pyenv install -v ${v} | |
51 | ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py |
|
51 | ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py | |
52 | ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL} |
|
52 | ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL} | |
53 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt |
|
53 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt | |
54 | done |
|
54 | done | |
55 |
|
55 | |||
56 | for v in ${PYENV3_VERSIONS}; do |
|
56 | for v in ${PYENV3_VERSIONS}; do | |
57 | pyenv install -v ${v} |
|
57 | pyenv install -v ${v} | |
58 | ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py |
|
58 | ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py | |
59 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt |
|
59 | ||
|
60 | case ${v} in | |||
|
61 | 3.5.*) | |||
|
62 | REQUIREMENTS=requirements-py3.5.txt | |||
|
63 | ;; | |||
|
64 | pypy3.5*) | |||
|
65 | REQUIREMENTS=requirements-py3.5.txt | |||
|
66 | ;; | |||
|
67 | *) | |||
|
68 | REQUIREMENTS=requirements-py3.txt | |||
|
69 | ;; | |||
|
70 | esac | |||
|
71 | ||||
|
72 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/${REQUIREMENTS} | |||
60 | done |
|
73 | done | |
61 |
|
74 | |||
62 | pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system |
|
75 | pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system | |
63 | '''.lstrip().replace( |
|
76 | '''.lstrip().replace( | |
64 | '\r\n', '\n' |
|
77 | '\r\n', '\n' | |
65 | ) |
|
78 | ) | |
66 |
|
79 | |||
|
80 | INSTALL_PYOXIDIZER = r''' | |||
|
81 | PYOXIDIZER_VERSION=0.16.0 | |||
|
82 | PYOXIDIZER_SHA256=8875471c270312fbb934007fd30f65f1904cc0f5da6188d61c90ed2129b9f9c1 | |||
|
83 | PYOXIDIZER_URL=https://github.com/indygreg/PyOxidizer/releases/download/pyoxidizer%2F${PYOXIDIZER_VERSION}/pyoxidizer-${PYOXIDIZER_VERSION}-linux_x86_64.zip | |||
|
84 | ||||
|
85 | wget -O pyoxidizer.zip --progress dot:mega ${PYOXIDIZER_URL} | |||
|
86 | echo "${PYOXIDIZER_SHA256} pyoxidizer.zip" | sha256sum --check - | |||
|
87 | ||||
|
88 | unzip pyoxidizer.zip | |||
|
89 | chmod +x pyoxidizer | |||
|
90 | sudo mv pyoxidizer /usr/local/bin/pyoxidizer | |||
|
91 | ''' | |||
67 |
|
92 | |||
68 | INSTALL_RUST = r''' |
|
93 | INSTALL_RUST = r''' | |
69 | RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076 |
|
94 | RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076 | |
70 | wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init |
|
95 | wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init | |
71 | echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check - |
|
96 | echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check - | |
72 |
|
97 | |||
73 | chmod +x rustup-init |
|
98 | chmod +x rustup-init | |
74 | sudo -H -u hg -g hg ./rustup-init -y |
|
99 | sudo -H -u hg -g hg ./rustup-init -y | |
75 |
sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1. |
|
100 | sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.41.1 1.52.0 | |
76 | sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy |
|
101 | sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy | |
77 |
|
||||
78 | sudo -H -u hg -g hg /home/hg/.cargo/bin/cargo install --version 0.10.3 pyoxidizer |
|
|||
79 | ''' |
|
102 | ''' | |
80 |
|
103 | |||
81 |
|
104 | |||
82 | BOOTSTRAP_VIRTUALENV = r''' |
|
105 | BOOTSTRAP_VIRTUALENV = r''' | |
83 | /usr/bin/virtualenv /hgdev/venv-bootstrap |
|
106 | /usr/bin/virtualenv /hgdev/venv-bootstrap | |
84 |
|
107 | |||
85 | HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f |
|
108 | HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f | |
86 | HG_TARBALL=mercurial-5.1.1.tar.gz |
|
109 | HG_TARBALL=mercurial-5.1.1.tar.gz | |
87 |
|
110 | |||
88 | wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL} |
|
111 | wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL} | |
89 | echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check - |
|
112 | echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check - | |
90 |
|
113 | |||
91 | /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL} |
|
114 | /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL} | |
92 | '''.lstrip().replace( |
|
115 | '''.lstrip().replace( | |
93 | '\r\n', '\n' |
|
116 | '\r\n', '\n' | |
94 | ) |
|
117 | ) | |
95 |
|
118 | |||
96 |
|
119 | |||
97 | BOOTSTRAP_DEBIAN = ( |
|
120 | BOOTSTRAP_DEBIAN = ( | |
98 | r''' |
|
121 | r''' | |
99 | #!/bin/bash |
|
122 | #!/bin/bash | |
100 |
|
123 | |||
101 | set -ex |
|
124 | set -ex | |
102 |
|
125 | |||
103 | DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'` |
|
126 | DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'` | |
104 | DEBIAN_VERSION=`cat /etc/debian_version` |
|
127 | DEBIAN_VERSION=`cat /etc/debian_version` | |
105 | LSB_RELEASE=`lsb_release -cs` |
|
128 | LSB_RELEASE=`lsb_release -cs` | |
106 |
|
129 | |||
107 | sudo /usr/sbin/groupadd hg |
|
130 | sudo /usr/sbin/groupadd hg | |
108 | sudo /usr/sbin/groupadd docker |
|
131 | sudo /usr/sbin/groupadd docker | |
109 | sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg |
|
132 | sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg | |
110 | sudo mkdir /home/hg/.ssh |
|
133 | sudo mkdir /home/hg/.ssh | |
111 | sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys |
|
134 | sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys | |
112 | sudo chown -R hg:hg /home/hg/.ssh |
|
135 | sudo chown -R hg:hg /home/hg/.ssh | |
113 | sudo chmod 700 /home/hg/.ssh |
|
136 | sudo chmod 700 /home/hg/.ssh | |
114 | sudo chmod 600 /home/hg/.ssh/authorized_keys |
|
137 | sudo chmod 600 /home/hg/.ssh/authorized_keys | |
115 |
|
138 | |||
116 | cat << EOF | sudo tee /etc/sudoers.d/90-hg |
|
139 | cat << EOF | sudo tee /etc/sudoers.d/90-hg | |
117 | hg ALL=(ALL) NOPASSWD:ALL |
|
140 | hg ALL=(ALL) NOPASSWD:ALL | |
118 | EOF |
|
141 | EOF | |
119 |
|
142 | |||
120 | sudo apt-get update |
|
143 | sudo apt-get update | |
121 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade |
|
144 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade | |
122 |
|
145 | |||
123 | # Install packages necessary to set up Docker Apt repo. |
|
146 | # Install packages necessary to set up Docker Apt repo. | |
124 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \ |
|
147 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \ | |
125 | apt-transport-https \ |
|
148 | apt-transport-https \ | |
126 | gnupg |
|
149 | gnupg | |
127 |
|
150 | |||
128 | cat > docker-apt-key << EOF |
|
151 | cat > docker-apt-key << EOF | |
129 | -----BEGIN PGP PUBLIC KEY BLOCK----- |
|
152 | -----BEGIN PGP PUBLIC KEY BLOCK----- | |
130 |
|
153 | |||
131 | mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth |
|
154 | mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth | |
132 | lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh |
|
155 | lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh | |
133 | 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq |
|
156 | 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq | |
134 | L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7 |
|
157 | L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7 | |
135 | UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N |
|
158 | UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N | |
136 | cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht |
|
159 | cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht | |
137 | ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo |
|
160 | ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo | |
138 | vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD |
|
161 | vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD | |
139 | G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ |
|
162 | G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ | |
140 | XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj |
|
163 | XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj | |
141 | q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB |
|
164 | q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB | |
142 | tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3 |
|
165 | tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3 | |
143 | BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO |
|
166 | BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO | |
144 | v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd |
|
167 | v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd | |
145 | tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk |
|
168 | tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk | |
146 | jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m |
|
169 | jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m | |
147 | 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P |
|
170 | 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P | |
148 | XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc |
|
171 | XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc | |
149 | FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8 |
|
172 | FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8 | |
150 | g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm |
|
173 | g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm | |
151 | ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh |
|
174 | ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh | |
152 | 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5 |
|
175 | 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5 | |
153 | G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW |
|
176 | G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW | |
154 | FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB |
|
177 | FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB | |
155 | EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF |
|
178 | EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF | |
156 | M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx |
|
179 | M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx | |
157 | Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu |
|
180 | Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu | |
158 | w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk |
|
181 | w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk | |
159 | z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8 |
|
182 | z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8 | |
160 | eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb |
|
183 | eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb | |
161 | VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa |
|
184 | VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa | |
162 | 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X |
|
185 | 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X | |
163 | zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ |
|
186 | zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ | |
164 | pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7 |
|
187 | pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7 | |
165 | ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ |
|
188 | ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ | |
166 | BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY |
|
189 | BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY | |
167 | 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp |
|
190 | 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp | |
168 | YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI |
|
191 | YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI | |
169 | mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES |
|
192 | mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES | |
170 | KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7 |
|
193 | KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7 | |
171 | JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ |
|
194 | JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ | |
172 | cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0 |
|
195 | cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0 | |
173 | 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5 |
|
196 | 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5 | |
174 | U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z |
|
197 | U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z | |
175 | VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f |
|
198 | VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f | |
176 | irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk |
|
199 | irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk | |
177 | SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz |
|
200 | SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz | |
178 | QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W |
|
201 | QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W | |
179 | 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw |
|
202 | 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw | |
180 | 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe |
|
203 | 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe | |
181 | dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y |
|
204 | dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y | |
182 | Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR |
|
205 | Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR | |
183 | H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh |
|
206 | H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh | |
184 | /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ |
|
207 | /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ | |
185 | M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S |
|
208 | M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S | |
186 | xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O |
|
209 | xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O | |
187 | jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG |
|
210 | jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG | |
188 | YT90qFF93M3v01BbxP+EIY2/9tiIPbrd |
|
211 | YT90qFF93M3v01BbxP+EIY2/9tiIPbrd | |
189 | =0YYh |
|
212 | =0YYh | |
190 | -----END PGP PUBLIC KEY BLOCK----- |
|
213 | -----END PGP PUBLIC KEY BLOCK----- | |
191 | EOF |
|
214 | EOF | |
192 |
|
215 | |||
193 | sudo apt-key add docker-apt-key |
|
216 | sudo apt-key add docker-apt-key | |
194 |
|
217 | |||
195 | if [ "$LSB_RELEASE" = "stretch" ]; then |
|
218 | if [ "$LSB_RELEASE" = "stretch" ]; then | |
196 | cat << EOF | sudo tee -a /etc/apt/sources.list |
|
219 | cat << EOF | sudo tee -a /etc/apt/sources.list | |
197 | # Need backports for clang-format-6.0 |
|
220 | # Need backports for clang-format-6.0 | |
198 | deb http://deb.debian.org/debian stretch-backports main |
|
221 | deb http://deb.debian.org/debian stretch-backports main | |
199 | EOF |
|
222 | EOF | |
200 | fi |
|
223 | fi | |
201 |
|
224 | |||
202 | if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then |
|
225 | if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then | |
203 | cat << EOF | sudo tee -a /etc/apt/sources.list |
|
226 | cat << EOF | sudo tee -a /etc/apt/sources.list | |
204 | # Sources are useful if we want to compile things locally. |
|
227 | # Sources are useful if we want to compile things locally. | |
205 | deb-src http://deb.debian.org/debian $LSB_RELEASE main |
|
228 | deb-src http://deb.debian.org/debian $LSB_RELEASE main | |
206 | deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main |
|
229 | deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main | |
207 | deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main |
|
230 | deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main | |
208 | deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main |
|
231 | deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main | |
209 |
|
232 | |||
210 | deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable |
|
233 | deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable | |
211 | EOF |
|
234 | EOF | |
212 |
|
235 | |||
213 | elif [ "$DISTRO" = "Ubuntu" ]; then |
|
236 | elif [ "$DISTRO" = "Ubuntu" ]; then | |
214 | cat << EOF | sudo tee -a /etc/apt/sources.list |
|
237 | cat << EOF | sudo tee -a /etc/apt/sources.list | |
215 | deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable |
|
238 | deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable | |
216 | EOF |
|
239 | EOF | |
217 |
|
240 | |||
218 | fi |
|
241 | fi | |
219 |
|
242 | |||
220 | sudo apt-get update |
|
243 | sudo apt-get update | |
221 |
|
244 | |||
222 | PACKAGES="\ |
|
245 | PACKAGES="\ | |
223 | awscli \ |
|
246 | awscli \ | |
224 | btrfs-progs \ |
|
247 | btrfs-progs \ | |
225 | build-essential \ |
|
248 | build-essential \ | |
226 | bzr \ |
|
249 | bzr \ | |
227 | clang-format-6.0 \ |
|
250 | clang-format-6.0 \ | |
228 | cvs \ |
|
251 | cvs \ | |
229 | darcs \ |
|
252 | darcs \ | |
230 | debhelper \ |
|
253 | debhelper \ | |
231 | devscripts \ |
|
254 | devscripts \ | |
232 | docker-ce \ |
|
255 | docker-ce \ | |
233 | dpkg-dev \ |
|
256 | dpkg-dev \ | |
234 | dstat \ |
|
257 | dstat \ | |
235 | emacs \ |
|
258 | emacs \ | |
236 | gettext \ |
|
259 | gettext \ | |
237 | git \ |
|
260 | git \ | |
238 | htop \ |
|
261 | htop \ | |
239 | iotop \ |
|
262 | iotop \ | |
240 | jfsutils \ |
|
263 | jfsutils \ | |
241 | libbz2-dev \ |
|
264 | libbz2-dev \ | |
242 | libexpat1-dev \ |
|
265 | libexpat1-dev \ | |
243 | libffi-dev \ |
|
266 | libffi-dev \ | |
244 | libgdbm-dev \ |
|
267 | libgdbm-dev \ | |
245 | liblzma-dev \ |
|
268 | liblzma-dev \ | |
246 | libncurses5-dev \ |
|
269 | libncurses5-dev \ | |
247 | libnss3-dev \ |
|
270 | libnss3-dev \ | |
248 | libreadline-dev \ |
|
271 | libreadline-dev \ | |
249 | libsqlite3-dev \ |
|
272 | libsqlite3-dev \ | |
250 | libssl-dev \ |
|
273 | libssl-dev \ | |
251 | netbase \ |
|
274 | netbase \ | |
252 | ntfs-3g \ |
|
275 | ntfs-3g \ | |
253 | nvme-cli \ |
|
276 | nvme-cli \ | |
254 | pyflakes \ |
|
277 | pyflakes \ | |
255 | pyflakes3 \ |
|
278 | pyflakes3 \ | |
256 | pylint \ |
|
279 | pylint \ | |
257 | pylint3 \ |
|
280 | pylint3 \ | |
258 | python-all-dev \ |
|
281 | python-all-dev \ | |
259 | python-dev \ |
|
282 | python-dev \ | |
260 | python-docutils \ |
|
283 | python-docutils \ | |
261 | python-fuzzywuzzy \ |
|
284 | python-fuzzywuzzy \ | |
262 | python-pygments \ |
|
285 | python-pygments \ | |
263 | python-subversion \ |
|
286 | python-subversion \ | |
264 | python-vcr \ |
|
287 | python-vcr \ | |
265 | python3-boto3 \ |
|
288 | python3-boto3 \ | |
266 | python3-dev \ |
|
289 | python3-dev \ | |
267 | python3-docutils \ |
|
290 | python3-docutils \ | |
268 | python3-fuzzywuzzy \ |
|
291 | python3-fuzzywuzzy \ | |
269 | python3-pygments \ |
|
292 | python3-pygments \ | |
270 | python3-vcr \ |
|
293 | python3-vcr \ | |
271 | python3-venv \ |
|
294 | python3-venv \ | |
272 | rsync \ |
|
295 | rsync \ | |
273 | sqlite3 \ |
|
296 | sqlite3 \ | |
274 | subversion \ |
|
297 | subversion \ | |
275 | tcl-dev \ |
|
298 | tcl-dev \ | |
276 | tk-dev \ |
|
299 | tk-dev \ | |
277 | tla \ |
|
300 | tla \ | |
278 | unzip \ |
|
301 | unzip \ | |
279 | uuid-dev \ |
|
302 | uuid-dev \ | |
280 | vim \ |
|
303 | vim \ | |
281 | virtualenv \ |
|
304 | virtualenv \ | |
282 | wget \ |
|
305 | wget \ | |
283 | xfsprogs \ |
|
306 | xfsprogs \ | |
284 | zip \ |
|
307 | zip \ | |
285 | zlib1g-dev" |
|
308 | zlib1g-dev" | |
286 |
|
309 | |||
287 | if [ "LSB_RELEASE" = "stretch" ]; then |
|
310 | if [ "LSB_RELEASE" = "stretch" ]; then | |
288 | PACKAGES="$PACKAGES linux-perf" |
|
311 | PACKAGES="$PACKAGES linux-perf" | |
289 | elif [ "$DISTRO" = "Ubuntu" ]; then |
|
312 | elif [ "$DISTRO" = "Ubuntu" ]; then | |
290 | PACKAGES="$PACKAGES linux-tools-common" |
|
313 | PACKAGES="$PACKAGES linux-tools-common" | |
291 | fi |
|
314 | fi | |
292 |
|
315 | |||
293 | # Monotone only available in older releases. |
|
316 | # Monotone only available in older releases. | |
294 | if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then |
|
317 | if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then | |
295 | PACKAGES="$PACKAGES monotone" |
|
318 | PACKAGES="$PACKAGES monotone" | |
296 | fi |
|
319 | fi | |
297 |
|
320 | |||
298 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES |
|
321 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES | |
299 |
|
322 | |||
300 | # Create clang-format symlink so test harness finds it. |
|
323 | # Create clang-format symlink so test harness finds it. | |
301 | sudo update-alternatives --install /usr/bin/clang-format clang-format \ |
|
324 | sudo update-alternatives --install /usr/bin/clang-format clang-format \ | |
302 | /usr/bin/clang-format-6.0 1000 |
|
325 | /usr/bin/clang-format-6.0 1000 | |
303 |
|
326 | |||
304 | sudo mkdir /hgdev |
|
327 | sudo mkdir /hgdev | |
305 | # Will be normalized to hg:hg later. |
|
328 | # Will be normalized to hg:hg later. | |
306 | sudo chown `whoami` /hgdev |
|
329 | sudo chown `whoami` /hgdev | |
307 |
|
330 | |||
308 | {install_rust} |
|
331 | {install_rust} | |
|
332 | {install_pyoxidizer} | |||
309 |
|
333 | |||
310 |
cp requirements- |
|
334 | cp requirements-*.txt /hgdev/ | |
311 | cp requirements-py3.txt /hgdev/requirements-py3.txt |
|
|||
312 |
|
335 | |||
313 | # Disable the pip version check because it uses the network and can |
|
336 | # Disable the pip version check because it uses the network and can | |
314 | # be annoying. |
|
337 | # be annoying. | |
315 | cat << EOF | sudo tee -a /etc/pip.conf |
|
338 | cat << EOF | sudo tee -a /etc/pip.conf | |
316 | [global] |
|
339 | [global] | |
317 | disable-pip-version-check = True |
|
340 | disable-pip-version-check = True | |
318 | EOF |
|
341 | EOF | |
319 |
|
342 | |||
320 | {install_pythons} |
|
343 | {install_pythons} | |
321 | {bootstrap_virtualenv} |
|
344 | {bootstrap_virtualenv} | |
322 |
|
345 | |||
323 | /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src |
|
346 | /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src | |
324 |
|
347 | |||
325 | # Mark the repo as non-publishing. |
|
348 | # Mark the repo as non-publishing. | |
326 | cat >> /hgdev/src/.hg/hgrc << EOF |
|
349 | cat >> /hgdev/src/.hg/hgrc << EOF | |
327 | [phases] |
|
350 | [phases] | |
328 | publish = false |
|
351 | publish = false | |
329 | EOF |
|
352 | EOF | |
330 |
|
353 | |||
331 | sudo chown -R hg:hg /hgdev |
|
354 | sudo chown -R hg:hg /hgdev | |
332 | '''.lstrip() |
|
355 | '''.lstrip() | |
333 | .format( |
|
356 | .format( | |
334 | install_rust=INSTALL_RUST, |
|
357 | install_rust=INSTALL_RUST, | |
|
358 | install_pyoxidizer=INSTALL_PYOXIDIZER, | |||
335 | install_pythons=INSTALL_PYTHONS, |
|
359 | install_pythons=INSTALL_PYTHONS, | |
336 | bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV, |
|
360 | bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV, | |
337 | ) |
|
361 | ) | |
338 | .replace('\r\n', '\n') |
|
362 | .replace('\r\n', '\n') | |
339 | ) |
|
363 | ) | |
340 |
|
364 | |||
341 |
|
365 | |||
342 | # Prepares /hgdev for operations. |
|
366 | # Prepares /hgdev for operations. | |
343 | PREPARE_HGDEV = ''' |
|
367 | PREPARE_HGDEV = ''' | |
344 | #!/bin/bash |
|
368 | #!/bin/bash | |
345 |
|
369 | |||
346 | set -e |
|
370 | set -e | |
347 |
|
371 | |||
348 | FS=$1 |
|
372 | FS=$1 | |
349 |
|
373 | |||
350 | ensure_device() { |
|
374 | ensure_device() { | |
351 | if [ -z "${DEVICE}" ]; then |
|
375 | if [ -z "${DEVICE}" ]; then | |
352 | echo "could not find block device to format" |
|
376 | echo "could not find block device to format" | |
353 | exit 1 |
|
377 | exit 1 | |
354 | fi |
|
378 | fi | |
355 | } |
|
379 | } | |
356 |
|
380 | |||
357 | # Determine device to partition for extra filesystem. |
|
381 | # Determine device to partition for extra filesystem. | |
358 | # If only 1 volume is present, it will be the root volume and |
|
382 | # If only 1 volume is present, it will be the root volume and | |
359 | # should be /dev/nvme0. If multiple volumes are present, the |
|
383 | # should be /dev/nvme0. If multiple volumes are present, the | |
360 | # root volume could be nvme0 or nvme1. Use whichever one doesn't have |
|
384 | # root volume could be nvme0 or nvme1. Use whichever one doesn't have | |
361 | # a partition. |
|
385 | # a partition. | |
362 | if [ -e /dev/nvme1n1 ]; then |
|
386 | if [ -e /dev/nvme1n1 ]; then | |
363 | if [ -e /dev/nvme0n1p1 ]; then |
|
387 | if [ -e /dev/nvme0n1p1 ]; then | |
364 | DEVICE=/dev/nvme1n1 |
|
388 | DEVICE=/dev/nvme1n1 | |
365 | else |
|
389 | else | |
366 | DEVICE=/dev/nvme0n1 |
|
390 | DEVICE=/dev/nvme0n1 | |
367 | fi |
|
391 | fi | |
368 | else |
|
392 | else | |
369 | DEVICE= |
|
393 | DEVICE= | |
370 | fi |
|
394 | fi | |
371 |
|
395 | |||
372 | sudo mkdir /hgwork |
|
396 | sudo mkdir /hgwork | |
373 |
|
397 | |||
374 | if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then |
|
398 | if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then | |
375 | ensure_device |
|
399 | ensure_device | |
376 | echo "creating ${FS} filesystem on ${DEVICE}" |
|
400 | echo "creating ${FS} filesystem on ${DEVICE}" | |
377 | fi |
|
401 | fi | |
378 |
|
402 | |||
379 | if [ "${FS}" = "default" ]; then |
|
403 | if [ "${FS}" = "default" ]; then | |
380 | : |
|
404 | : | |
381 |
|
405 | |||
382 | elif [ "${FS}" = "btrfs" ]; then |
|
406 | elif [ "${FS}" = "btrfs" ]; then | |
383 | sudo mkfs.btrfs ${DEVICE} |
|
407 | sudo mkfs.btrfs ${DEVICE} | |
384 | sudo mount ${DEVICE} /hgwork |
|
408 | sudo mount ${DEVICE} /hgwork | |
385 |
|
409 | |||
386 | elif [ "${FS}" = "ext3" ]; then |
|
410 | elif [ "${FS}" = "ext3" ]; then | |
387 | # lazy_journal_init speeds up filesystem creation at the expense of |
|
411 | # lazy_journal_init speeds up filesystem creation at the expense of | |
388 | # integrity if things crash. We are an ephemeral instance, so we don't |
|
412 | # integrity if things crash. We are an ephemeral instance, so we don't | |
389 | # care about integrity. |
|
413 | # care about integrity. | |
390 | sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE} |
|
414 | sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE} | |
391 | sudo mount ${DEVICE} /hgwork |
|
415 | sudo mount ${DEVICE} /hgwork | |
392 |
|
416 | |||
393 | elif [ "${FS}" = "ext4" ]; then |
|
417 | elif [ "${FS}" = "ext4" ]; then | |
394 | sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE} |
|
418 | sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE} | |
395 | sudo mount ${DEVICE} /hgwork |
|
419 | sudo mount ${DEVICE} /hgwork | |
396 |
|
420 | |||
397 | elif [ "${FS}" = "jfs" ]; then |
|
421 | elif [ "${FS}" = "jfs" ]; then | |
398 | sudo mkfs.jfs ${DEVICE} |
|
422 | sudo mkfs.jfs ${DEVICE} | |
399 | sudo mount ${DEVICE} /hgwork |
|
423 | sudo mount ${DEVICE} /hgwork | |
400 |
|
424 | |||
401 | elif [ "${FS}" = "tmpfs" ]; then |
|
425 | elif [ "${FS}" = "tmpfs" ]; then | |
402 | echo "creating tmpfs volume in /hgwork" |
|
426 | echo "creating tmpfs volume in /hgwork" | |
403 | sudo mount -t tmpfs -o size=1024M tmpfs /hgwork |
|
427 | sudo mount -t tmpfs -o size=1024M tmpfs /hgwork | |
404 |
|
428 | |||
405 | elif [ "${FS}" = "xfs" ]; then |
|
429 | elif [ "${FS}" = "xfs" ]; then | |
406 | sudo mkfs.xfs ${DEVICE} |
|
430 | sudo mkfs.xfs ${DEVICE} | |
407 | sudo mount ${DEVICE} /hgwork |
|
431 | sudo mount ${DEVICE} /hgwork | |
408 |
|
432 | |||
409 | else |
|
433 | else | |
410 | echo "unsupported filesystem: ${FS}" |
|
434 | echo "unsupported filesystem: ${FS}" | |
411 | exit 1 |
|
435 | exit 1 | |
412 | fi |
|
436 | fi | |
413 |
|
437 | |||
414 | echo "/hgwork ready" |
|
438 | echo "/hgwork ready" | |
415 |
|
439 | |||
416 | sudo chown hg:hg /hgwork |
|
440 | sudo chown hg:hg /hgwork | |
417 | mkdir /hgwork/tmp |
|
441 | mkdir /hgwork/tmp | |
418 | chown hg:hg /hgwork/tmp |
|
442 | chown hg:hg /hgwork/tmp | |
419 |
|
443 | |||
420 | rsync -a /hgdev/src /hgwork/ |
|
444 | rsync -a /hgdev/src /hgwork/ | |
421 | '''.lstrip().replace( |
|
445 | '''.lstrip().replace( | |
422 | '\r\n', '\n' |
|
446 | '\r\n', '\n' | |
423 | ) |
|
447 | ) | |
424 |
|
448 | |||
425 |
|
449 | |||
426 | HG_UPDATE_CLEAN = ''' |
|
450 | HG_UPDATE_CLEAN = ''' | |
427 | set -ex |
|
451 | set -ex | |
428 |
|
452 | |||
429 | HG=/hgdev/venv-bootstrap/bin/hg |
|
453 | HG=/hgdev/venv-bootstrap/bin/hg | |
430 |
|
454 | |||
431 | cd /hgwork/src |
|
455 | cd /hgwork/src | |
432 | ${HG} --config extensions.purge= purge --all |
|
456 | ${HG} --config extensions.purge= purge --all | |
433 | ${HG} update -C $1 |
|
457 | ${HG} update -C $1 | |
434 | ${HG} log -r . |
|
458 | ${HG} log -r . | |
435 | '''.lstrip().replace( |
|
459 | '''.lstrip().replace( | |
436 | '\r\n', '\n' |
|
460 | '\r\n', '\n' | |
437 | ) |
|
461 | ) | |
438 |
|
462 | |||
439 |
|
463 | |||
440 | def prepare_exec_environment(ssh_client, filesystem='default'): |
|
464 | def prepare_exec_environment(ssh_client, filesystem='default'): | |
441 | """Prepare an EC2 instance to execute things. |
|
465 | """Prepare an EC2 instance to execute things. | |
442 |
|
466 | |||
443 | The AMI has an ``/hgdev`` bootstrapped with various Python installs |
|
467 | The AMI has an ``/hgdev`` bootstrapped with various Python installs | |
444 | and a clone of the Mercurial repo. |
|
468 | and a clone of the Mercurial repo. | |
445 |
|
469 | |||
446 | In EC2, EBS volumes launched from snapshots have wonky performance behavior. |
|
470 | In EC2, EBS volumes launched from snapshots have wonky performance behavior. | |
447 | Notably, blocks have to be copied on first access, which makes volume |
|
471 | Notably, blocks have to be copied on first access, which makes volume | |
448 | I/O extremely slow on fresh volumes. |
|
472 | I/O extremely slow on fresh volumes. | |
449 |
|
473 | |||
450 | Furthermore, we may want to run operations, tests, etc on alternative |
|
474 | Furthermore, we may want to run operations, tests, etc on alternative | |
451 | filesystems so we examine behavior on different filesystems. |
|
475 | filesystems so we examine behavior on different filesystems. | |
452 |
|
476 | |||
453 | This function is used to facilitate executing operations on alternate |
|
477 | This function is used to facilitate executing operations on alternate | |
454 | volumes. |
|
478 | volumes. | |
455 | """ |
|
479 | """ | |
456 | sftp = ssh_client.open_sftp() |
|
480 | sftp = ssh_client.open_sftp() | |
457 |
|
481 | |||
458 | with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh: |
|
482 | with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh: | |
459 | fh.write(PREPARE_HGDEV) |
|
483 | fh.write(PREPARE_HGDEV) | |
460 | fh.chmod(0o0777) |
|
484 | fh.chmod(0o0777) | |
461 |
|
485 | |||
462 | command = 'sudo /hgdev/prepare-hgdev %s' % filesystem |
|
486 | command = 'sudo /hgdev/prepare-hgdev %s' % filesystem | |
463 | chan, stdin, stdout = exec_command(ssh_client, command) |
|
487 | chan, stdin, stdout = exec_command(ssh_client, command) | |
464 | stdin.close() |
|
488 | stdin.close() | |
465 |
|
489 | |||
466 | for line in stdout: |
|
490 | for line in stdout: | |
467 | print(line, end='') |
|
491 | print(line, end='') | |
468 |
|
492 | |||
469 | res = chan.recv_exit_status() |
|
493 | res = chan.recv_exit_status() | |
470 |
|
494 | |||
471 | if res: |
|
495 | if res: | |
472 | raise Exception('non-0 exit code updating working directory; %d' % res) |
|
496 | raise Exception('non-0 exit code updating working directory; %d' % res) | |
473 |
|
497 | |||
474 |
|
498 | |||
475 | def synchronize_hg( |
|
499 | def synchronize_hg( | |
476 | source_path: pathlib.Path, ec2_instance, revision: str = None |
|
500 | source_path: pathlib.Path, ec2_instance, revision: str = None | |
477 | ): |
|
501 | ): | |
478 | """Synchronize a local Mercurial source path to remote EC2 instance.""" |
|
502 | """Synchronize a local Mercurial source path to remote EC2 instance.""" | |
479 |
|
503 | |||
480 | with tempfile.TemporaryDirectory() as temp_dir: |
|
504 | with tempfile.TemporaryDirectory() as temp_dir: | |
481 | temp_dir = pathlib.Path(temp_dir) |
|
505 | temp_dir = pathlib.Path(temp_dir) | |
482 |
|
506 | |||
483 | ssh_dir = temp_dir / '.ssh' |
|
507 | ssh_dir = temp_dir / '.ssh' | |
484 | ssh_dir.mkdir() |
|
508 | ssh_dir.mkdir() | |
485 | ssh_dir.chmod(0o0700) |
|
509 | ssh_dir.chmod(0o0700) | |
486 |
|
510 | |||
487 | public_ip = ec2_instance.public_ip_address |
|
511 | public_ip = ec2_instance.public_ip_address | |
488 |
|
512 | |||
489 | ssh_config = ssh_dir / 'config' |
|
513 | ssh_config = ssh_dir / 'config' | |
490 |
|
514 | |||
491 | with ssh_config.open('w', encoding='utf-8') as fh: |
|
515 | with ssh_config.open('w', encoding='utf-8') as fh: | |
492 | fh.write('Host %s\n' % public_ip) |
|
516 | fh.write('Host %s\n' % public_ip) | |
493 | fh.write(' User hg\n') |
|
517 | fh.write(' User hg\n') | |
494 | fh.write(' StrictHostKeyChecking no\n') |
|
518 | fh.write(' StrictHostKeyChecking no\n') | |
495 | fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts')) |
|
519 | fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts')) | |
496 | fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path) |
|
520 | fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path) | |
497 |
|
521 | |||
498 | if not (source_path / '.hg').is_dir(): |
|
522 | if not (source_path / '.hg').is_dir(): | |
499 | raise Exception( |
|
523 | raise Exception( | |
500 | '%s is not a Mercurial repository; synchronization ' |
|
524 | '%s is not a Mercurial repository; synchronization ' | |
501 | 'not yet supported' % source_path |
|
525 | 'not yet supported' % source_path | |
502 | ) |
|
526 | ) | |
503 |
|
527 | |||
504 | env = dict(os.environ) |
|
528 | env = dict(os.environ) | |
505 | env['HGPLAIN'] = '1' |
|
529 | env['HGPLAIN'] = '1' | |
506 | env['HGENCODING'] = 'utf-8' |
|
530 | env['HGENCODING'] = 'utf-8' | |
507 |
|
531 | |||
508 | hg_bin = source_path / 'hg' |
|
532 | hg_bin = source_path / 'hg' | |
509 |
|
533 | |||
510 | res = subprocess.run( |
|
534 | res = subprocess.run( | |
511 | ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'], |
|
535 | ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'], | |
512 | cwd=str(source_path), |
|
536 | cwd=str(source_path), | |
513 | env=env, |
|
537 | env=env, | |
514 | check=True, |
|
538 | check=True, | |
515 | capture_output=True, |
|
539 | capture_output=True, | |
516 | ) |
|
540 | ) | |
517 |
|
541 | |||
518 | full_revision = res.stdout.decode('ascii') |
|
542 | full_revision = res.stdout.decode('ascii') | |
519 |
|
543 | |||
520 | args = [ |
|
544 | args = [ | |
521 | 'python2.7', |
|
545 | 'python2.7', | |
522 | str(hg_bin), |
|
546 | str(hg_bin), | |
523 | '--config', |
|
547 | '--config', | |
524 | 'ui.ssh=ssh -F %s' % ssh_config, |
|
548 | 'ui.ssh=ssh -F %s' % ssh_config, | |
525 | '--config', |
|
549 | '--config', | |
526 | 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg', |
|
550 | 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg', | |
527 | # Also ensure .hgtags changes are present so auto version |
|
551 | # Also ensure .hgtags changes are present so auto version | |
528 | # calculation works. |
|
552 | # calculation works. | |
529 | 'push', |
|
553 | 'push', | |
530 | '-f', |
|
554 | '-f', | |
531 | '-r', |
|
555 | '-r', | |
532 | full_revision, |
|
556 | full_revision, | |
533 | '-r', |
|
557 | '-r', | |
534 | 'file(.hgtags)', |
|
558 | 'file(.hgtags)', | |
535 | 'ssh://%s//hgwork/src' % public_ip, |
|
559 | 'ssh://%s//hgwork/src' % public_ip, | |
536 | ] |
|
560 | ] | |
537 |
|
561 | |||
538 | res = subprocess.run(args, cwd=str(source_path), env=env) |
|
562 | res = subprocess.run(args, cwd=str(source_path), env=env) | |
539 |
|
563 | |||
540 | # Allow 1 (no-op) to not trigger error. |
|
564 | # Allow 1 (no-op) to not trigger error. | |
541 | if res.returncode not in (0, 1): |
|
565 | if res.returncode not in (0, 1): | |
542 | res.check_returncode() |
|
566 | res.check_returncode() | |
543 |
|
567 | |||
544 | # TODO support synchronizing dirty working directory. |
|
568 | # TODO support synchronizing dirty working directory. | |
545 |
|
569 | |||
546 | sftp = ec2_instance.ssh_client.open_sftp() |
|
570 | sftp = ec2_instance.ssh_client.open_sftp() | |
547 |
|
571 | |||
548 | with sftp.open('/hgdev/hgup', 'wb') as fh: |
|
572 | with sftp.open('/hgdev/hgup', 'wb') as fh: | |
549 | fh.write(HG_UPDATE_CLEAN) |
|
573 | fh.write(HG_UPDATE_CLEAN) | |
550 | fh.chmod(0o0700) |
|
574 | fh.chmod(0o0700) | |
551 |
|
575 | |||
552 | chan, stdin, stdout = exec_command( |
|
576 | chan, stdin, stdout = exec_command( | |
553 | ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision |
|
577 | ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision | |
554 | ) |
|
578 | ) | |
555 | stdin.close() |
|
579 | stdin.close() | |
556 |
|
580 | |||
557 | for line in stdout: |
|
581 | for line in stdout: | |
558 | print(line, end='') |
|
582 | print(line, end='') | |
559 |
|
583 | |||
560 | res = chan.recv_exit_status() |
|
584 | res = chan.recv_exit_status() | |
561 |
|
585 | |||
562 | if res: |
|
586 | if res: | |
563 | raise Exception( |
|
587 | raise Exception( | |
564 | 'non-0 exit code updating working directory; %d' % res |
|
588 | 'non-0 exit code updating working directory; %d' % res | |
565 | ) |
|
589 | ) | |
566 |
|
590 | |||
567 |
|
591 | |||
568 | def run_tests(ssh_client, python_version, test_flags=None): |
|
592 | def run_tests(ssh_client, python_version, test_flags=None): | |
569 | """Run tests on a remote Linux machine via an SSH client.""" |
|
593 | """Run tests on a remote Linux machine via an SSH client.""" | |
570 | test_flags = test_flags or [] |
|
594 | test_flags = test_flags or [] | |
571 |
|
595 | |||
572 | print('running tests') |
|
596 | print('running tests') | |
573 |
|
597 | |||
574 | if python_version == 'system2': |
|
598 | if python_version == 'system2': | |
575 | python = '/usr/bin/python2' |
|
599 | python = '/usr/bin/python2' | |
576 | elif python_version == 'system3': |
|
600 | elif python_version == 'system3': | |
577 | python = '/usr/bin/python3' |
|
601 | python = '/usr/bin/python3' | |
578 | elif python_version.startswith('pypy'): |
|
602 | elif python_version.startswith('pypy'): | |
579 | python = '/hgdev/pyenv/shims/%s' % python_version |
|
603 | python = '/hgdev/pyenv/shims/%s' % python_version | |
580 | else: |
|
604 | else: | |
581 | python = '/hgdev/pyenv/shims/python%s' % python_version |
|
605 | python = '/hgdev/pyenv/shims/python%s' % python_version | |
582 |
|
606 | |||
583 | test_flags = ' '.join(shlex.quote(a) for a in test_flags) |
|
607 | test_flags = ' '.join(shlex.quote(a) for a in test_flags) | |
584 |
|
608 | |||
585 | command = ( |
|
609 | command = ( | |
586 | '/bin/sh -c "export TMPDIR=/hgwork/tmp; ' |
|
610 | '/bin/sh -c "export TMPDIR=/hgwork/tmp; ' | |
587 | 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags) |
|
611 | 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags) | |
588 | ) |
|
612 | ) | |
589 |
|
613 | |||
590 | chan, stdin, stdout = exec_command(ssh_client, command) |
|
614 | chan, stdin, stdout = exec_command(ssh_client, command) | |
591 |
|
615 | |||
592 | stdin.close() |
|
616 | stdin.close() | |
593 |
|
617 | |||
594 | for line in stdout: |
|
618 | for line in stdout: | |
595 | print(line, end='') |
|
619 | print(line, end='') | |
596 |
|
620 | |||
597 | return chan.recv_exit_status() |
|
621 | return chan.recv_exit_status() |
@@ -1,215 +1,306 b'' | |||||
1 | # |
|
1 | # | |
2 | # This file is autogenerated by pip-compile |
|
2 | # This file is autogenerated by pip-compile | |
3 | # To update, run: |
|
3 | # To update, run: | |
4 | # |
|
4 | # | |
5 | # pip-compile --generate-hashes --output-file=contrib/automation/linux-requirements-py3.txt contrib/automation/linux-requirements.txt.in |
|
5 | # pip-compile --generate-hashes --output-file=contrib/automation/linux-requirements-py3.txt contrib/automation/linux-requirements.txt.in | |
6 | # |
|
6 | # | |
7 | appdirs==1.4.4 \ |
|
7 | appdirs==1.4.4 \ | |
8 | --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ |
|
8 | --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ | |
9 |
--hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 |
|
9 | --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 | |
10 | # via black |
|
10 | # via black | |
11 |
astroid==2. |
|
11 | astroid==2.5.6 \ | |
12 | --hash=sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703 \ |
|
12 | --hash=sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e \ | |
13 | --hash=sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386 \ |
|
13 | --hash=sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975 | |
14 | # via pylint |
|
14 | # via pylint | |
15 |
attrs==2 |
|
15 | attrs==21.1.0 \ | |
16 | --hash=sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594 \ |
|
16 | --hash=sha256:3901be1cb7c2a780f14668691474d9252c070a756be0a9ead98cfeabfa11aeb8 \ | |
17 | --hash=sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc \ |
|
17 | --hash=sha256:8ee1e5f5a1afc5b19bdfae4fdf0c35ed324074bdce3500c939842c8f818645d9 | |
18 | # via black |
|
18 | # via black | |
19 | black==19.10b0 ; python_version >= "3.6" and platform_python_implementation != "PyPy" \ |
|
19 | black==19.10b0 ; python_version >= "3.6" and platform_python_implementation != "PyPy" \ | |
20 | --hash=sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b \ |
|
20 | --hash=sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b \ | |
21 |
--hash=sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539 |
|
21 | --hash=sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539 | |
22 | # via -r contrib/automation/linux-requirements.txt.in |
|
22 | # via -r contrib/automation/linux-requirements.txt.in | |
23 | click==7.1.2 \ |
|
23 | click==7.1.2 \ | |
24 | --hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a \ |
|
24 | --hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a \ | |
25 |
--hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc |
|
25 | --hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc | |
26 | # via black |
|
26 | # via black | |
27 |
docutils==0.1 |
|
27 | docutils==0.17.1 \ | |
28 | --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \ |
|
28 | --hash=sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125 \ | |
29 | --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \ |
|
29 | --hash=sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61 | |
30 | # via -r contrib/automation/linux-requirements.txt.in |
|
30 | # via -r contrib/automation/linux-requirements.txt.in | |
31 | fuzzywuzzy==0.18.0 \ |
|
31 | fuzzywuzzy==0.18.0 \ | |
32 | --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8 \ |
|
32 | --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8 \ | |
33 |
--hash=sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993 |
|
33 | --hash=sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993 | |
34 | # via -r contrib/automation/linux-requirements.txt.in |
|
34 | # via -r contrib/automation/linux-requirements.txt.in | |
35 |
idna== |
|
35 | idna==3.1 \ | |
36 | --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ |
|
36 | --hash=sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16 \ | |
37 | --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 \ |
|
37 | --hash=sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1 | |
38 | # via yarl |
|
38 | # via yarl | |
39 | isort==4.3.21 \ |
|
39 | isort==4.3.21 \ | |
40 | --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \ |
|
40 | --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \ | |
41 |
--hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd |
|
41 | --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd | |
42 | # via -r contrib/automation/linux-requirements.txt.in, pylint |
|
42 | # via | |
43 | lazy-object-proxy==1.4.3 \ |
|
43 | # -r contrib/automation/linux-requirements.txt.in | |
44 | --hash=sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d \ |
|
44 | # pylint | |
45 | --hash=sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449 \ |
|
45 | lazy-object-proxy==1.6.0 \ | |
46 | --hash=sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08 \ |
|
46 | --hash=sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653 \ | |
47 | --hash=sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a \ |
|
47 | --hash=sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61 \ | |
48 | --hash=sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50 \ |
|
48 | --hash=sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2 \ | |
49 | --hash=sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd \ |
|
49 | --hash=sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837 \ | |
50 | --hash=sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239 \ |
|
50 | --hash=sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3 \ | |
51 | --hash=sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb \ |
|
51 | --hash=sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43 \ | |
52 | --hash=sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea \ |
|
52 | --hash=sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726 \ | |
53 | --hash=sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e \ |
|
53 | --hash=sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3 \ | |
54 | --hash=sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156 \ |
|
54 | --hash=sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587 \ | |
55 | --hash=sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142 \ |
|
55 | --hash=sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8 \ | |
56 | --hash=sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442 \ |
|
56 | --hash=sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a \ | |
57 | --hash=sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62 \ |
|
57 | --hash=sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd \ | |
58 | --hash=sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db \ |
|
58 | --hash=sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f \ | |
59 | --hash=sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531 \ |
|
59 | --hash=sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad \ | |
60 | --hash=sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383 \ |
|
60 | --hash=sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4 \ | |
61 | --hash=sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a \ |
|
61 | --hash=sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b \ | |
62 | --hash=sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357 \ |
|
62 | --hash=sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf \ | |
63 | --hash=sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4 \ |
|
63 | --hash=sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981 \ | |
64 | --hash=sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0 \ |
|
64 | --hash=sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741 \ | |
|
65 | --hash=sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e \ | |||
|
66 | --hash=sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93 \ | |||
|
67 | --hash=sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b | |||
65 | # via astroid |
|
68 | # via astroid | |
66 | mccabe==0.6.1 \ |
|
69 | mccabe==0.6.1 \ | |
67 | --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ |
|
70 | --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ | |
68 |
--hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f |
|
71 | --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f | |
69 | # via pylint |
|
72 | # via pylint | |
70 |
multidict== |
|
73 | multidict==5.1.0 \ | |
71 | --hash=sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a \ |
|
74 | --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \ | |
72 | --hash=sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000 \ |
|
75 | --hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \ | |
73 | --hash=sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2 \ |
|
76 | --hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \ | |
74 | --hash=sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507 \ |
|
77 | --hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \ | |
75 | --hash=sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5 \ |
|
78 | --hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \ | |
76 | --hash=sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7 \ |
|
79 | --hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \ | |
77 | --hash=sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d \ |
|
80 | --hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \ | |
78 | --hash=sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463 \ |
|
81 | --hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5 \ | |
79 | --hash=sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19 \ |
|
82 | --hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \ | |
80 | --hash=sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3 \ |
|
83 | --hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \ | |
81 | --hash=sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b \ |
|
84 | --hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \ | |
82 | --hash=sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c \ |
|
85 | --hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \ | |
83 | --hash=sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87 \ |
|
86 | --hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \ | |
84 | --hash=sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7 \ |
|
87 | --hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \ | |
85 | --hash=sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430 \ |
|
88 | --hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \ | |
86 | --hash=sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255 \ |
|
89 | --hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \ | |
87 | --hash=sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d \ |
|
90 | --hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \ | |
|
91 | --hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \ | |||
|
92 | --hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \ | |||
|
93 | --hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \ | |||
|
94 | --hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \ | |||
|
95 | --hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \ | |||
|
96 | --hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \ | |||
|
97 | --hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \ | |||
|
98 | --hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \ | |||
|
99 | --hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \ | |||
|
100 | --hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \ | |||
|
101 | --hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \ | |||
|
102 | --hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \ | |||
|
103 | --hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \ | |||
|
104 | --hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \ | |||
|
105 | --hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \ | |||
|
106 | --hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \ | |||
|
107 | --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \ | |||
|
108 | --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \ | |||
|
109 | --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \ | |||
|
110 | --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 | |||
88 | # via yarl |
|
111 | # via yarl | |
89 |
pathspec==0.8. |
|
112 | pathspec==0.8.1 \ | |
90 | --hash=sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0 \ |
|
113 | --hash=sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd \ | |
91 | --hash=sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061 \ |
|
114 | --hash=sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d | |
92 | # via black |
|
115 | # via black | |
93 |
pyflakes==2. |
|
116 | pyflakes==2.3.1 \ | |
94 | --hash=sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92 \ |
|
117 | --hash=sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3 \ | |
95 | --hash=sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8 \ |
|
118 | --hash=sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db | |
96 | # via -r contrib/automation/linux-requirements.txt.in |
|
119 | # via -r contrib/automation/linux-requirements.txt.in | |
97 |
pygments==2. |
|
120 | pygments==2.9.0 \ | |
98 | --hash=sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998 \ |
|
121 | --hash=sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f \ | |
99 | --hash=sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7 \ |
|
122 | --hash=sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e | |
100 | # via -r contrib/automation/linux-requirements.txt.in |
|
123 | # via -r contrib/automation/linux-requirements.txt.in | |
101 |
pylint==2. |
|
124 | pylint==2.8.2 \ | |
102 | --hash=sha256:bb4a908c9dadbc3aac18860550e870f58e1a02c9f2c204fdf5693d73be061210 \ |
|
125 | --hash=sha256:586d8fa9b1891f4b725f587ef267abe2a1bad89d6b184520c7f07a253dd6e217 \ | |
103 | --hash=sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f \ |
|
126 | --hash=sha256:f7e2072654a6b6afdf5e2fb38147d3e2d2d43c89f648637baab63e026481279b | |
|
127 | # via -r contrib/automation/linux-requirements.txt.in | |||
|
128 | python-levenshtein==0.12.2 \ | |||
|
129 | --hash=sha256:dc2395fbd148a1ab31090dd113c366695934b9e85fe5a4b2a032745efd0346f6 | |||
104 | # via -r contrib/automation/linux-requirements.txt.in |
|
130 | # via -r contrib/automation/linux-requirements.txt.in | |
105 | python-levenshtein==0.12.0 \ |
|
131 | pyyaml==5.4.1 \ | |
106 | --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1 \ |
|
132 | --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ | |
107 | # via -r contrib/automation/linux-requirements.txt.in |
|
133 | --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ | |
108 | pyyaml==5.3.1 \ |
|
134 | --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \ | |
109 | --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ |
|
135 | --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \ | |
110 | --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ |
|
136 | --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \ | |
111 | --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ |
|
137 | --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \ | |
112 | --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ |
|
138 | --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \ | |
113 | --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ |
|
139 | --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \ | |
114 | --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ |
|
140 | --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \ | |
115 | --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ |
|
141 | --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ | |
116 | --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ |
|
142 | --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ | |
117 | --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ |
|
143 | --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ | |
118 | --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ |
|
144 | --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ | |
119 | --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \ |
|
145 | --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ | |
|
146 | --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ | |||
|
147 | --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ | |||
|
148 | --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ | |||
|
149 | --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ | |||
|
150 | --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ | |||
|
151 | --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ | |||
|
152 | --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ | |||
|
153 | --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ | |||
|
154 | --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ | |||
|
155 | --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ | |||
|
156 | --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ | |||
|
157 | --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ | |||
|
158 | --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ | |||
|
159 | --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ | |||
|
160 | --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 | |||
120 | # via vcrpy |
|
161 | # via vcrpy | |
121 |
regex==202 |
|
162 | regex==2021.4.4 \ | |
122 | --hash=sha256:088afc8c63e7bd187a3c70a94b9e50ab3f17e1d3f52a32750b5b77dbe99ef5ef \ |
|
163 | --hash=sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5 \ | |
123 | --hash=sha256:1fe0a41437bbd06063aa184c34804efa886bcc128222e9916310c92cd54c3b4c \ |
|
164 | --hash=sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79 \ | |
124 | --hash=sha256:3d20024a70b97b4f9546696cbf2fd30bae5f42229fbddf8661261b1eaff0deb7 \ |
|
165 | --hash=sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31 \ | |
125 | --hash=sha256:41bb65f54bba392643557e617316d0d899ed5b4946dccee1cb6696152b29844b \ |
|
166 | --hash=sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500 \ | |
126 | --hash=sha256:4318d56bccfe7d43e5addb272406ade7a2274da4b70eb15922a071c58ab0108c \ |
|
167 | --hash=sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11 \ | |
127 | --hash=sha256:4707f3695b34335afdfb09be3802c87fa0bc27030471dbc082f815f23688bc63 \ |
|
168 | --hash=sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14 \ | |
128 | --hash=sha256:49f23ebd5ac073765ecbcf046edc10d63dcab2f4ae2bce160982cb30df0c0302 \ |
|
169 | --hash=sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3 \ | |
129 | --hash=sha256:5533a959a1748a5c042a6da71fe9267a908e21eded7a4f373efd23a2cbdb0ecc \ |
|
170 | --hash=sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439 \ | |
130 | --hash=sha256:5d892a4f1c999834eaa3c32bc9e8b976c5825116cde553928c4c8e7e48ebda67 \ |
|
171 | --hash=sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c \ | |
131 | --hash=sha256:5f18875ac23d9aa2f060838e8b79093e8bb2313dbaaa9f54c6d8e52a5df097be \ |
|
172 | --hash=sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82 \ | |
132 | --hash=sha256:60b0e9e6dc45683e569ec37c55ac20c582973841927a85f2d8a7d20ee80216ab \ |
|
173 | --hash=sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711 \ | |
133 | --hash=sha256:816064fc915796ea1f26966163f6845de5af78923dfcecf6551e095f00983650 \ |
|
174 | --hash=sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093 \ | |
134 | --hash=sha256:84cada8effefe9a9f53f9b0d2ba9b7b6f5edf8d2155f9fdbe34616e06ececf81 \ |
|
175 | --hash=sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a \ | |
135 | --hash=sha256:84e9407db1b2eb368b7ecc283121b5e592c9aaedbe8c78b1a2f1102eb2e21d19 \ |
|
176 | --hash=sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb \ | |
136 | --hash=sha256:8d69cef61fa50c8133382e61fd97439de1ae623fe943578e477e76a9d9471637 \ |
|
177 | --hash=sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8 \ | |
137 | --hash=sha256:9a02d0ae31d35e1ec12a4ea4d4cca990800f66a917d0fb997b20fbc13f5321fc \ |
|
178 | --hash=sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17 \ | |
138 | --hash=sha256:9bc13e0d20b97ffb07821aa3e113f9998e84994fe4d159ffa3d3a9d1b805043b \ |
|
179 | --hash=sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000 \ | |
139 | --hash=sha256:a6f32aea4260dfe0e55dc9733ea162ea38f0ea86aa7d0f77b15beac5bf7b369d \ |
|
180 | --hash=sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d \ | |
140 | --hash=sha256:ae91972f8ac958039920ef6e8769277c084971a142ce2b660691793ae44aae6b \ |
|
181 | --hash=sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480 \ | |
141 | --hash=sha256:c570f6fa14b9c4c8a4924aaad354652366577b4f98213cf76305067144f7b100 \ |
|
182 | --hash=sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc \ | |
142 | --hash=sha256:c9443124c67b1515e4fe0bb0aa18df640965e1030f468a2a5dc2589b26d130ad \ |
|
183 | --hash=sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0 \ | |
143 | --hash=sha256:d23a18037313714fb3bb5a94434d3151ee4300bae631894b1ac08111abeaa4a3 \ |
|
184 | --hash=sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9 \ | |
144 | --hash=sha256:eaf548d117b6737df379fdd53bdde4f08870e66d7ea653e230477f071f861121 \ |
|
185 | --hash=sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765 \ | |
145 | --hash=sha256:ebbe29186a3d9b0c591e71b7393f1ae08c83cb2d8e517d2a822b8f7ec99dfd8b \ |
|
186 | --hash=sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e \ | |
146 | --hash=sha256:eda4771e0ace7f67f58bc5b560e27fb20f32a148cbc993b0c3835970935c2707 \ |
|
187 | --hash=sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a \ | |
147 | --hash=sha256:f1b3afc574a3db3b25c89161059d857bd4909a1269b0b3cb3c904677c8c4a3f7 \ |
|
188 | --hash=sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07 \ | |
148 | --hash=sha256:f2388013e68e750eaa16ccbea62d4130180c26abb1d8e5d584b9baf69672b30f \ |
|
189 | --hash=sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f \ | |
|
190 | --hash=sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac \ | |||
|
191 | --hash=sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7 \ | |||
|
192 | --hash=sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed \ | |||
|
193 | --hash=sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968 \ | |||
|
194 | --hash=sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7 \ | |||
|
195 | --hash=sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2 \ | |||
|
196 | --hash=sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4 \ | |||
|
197 | --hash=sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87 \ | |||
|
198 | --hash=sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8 \ | |||
|
199 | --hash=sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10 \ | |||
|
200 | --hash=sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29 \ | |||
|
201 | --hash=sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605 \ | |||
|
202 | --hash=sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6 \ | |||
|
203 | --hash=sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042 | |||
149 | # via black |
|
204 | # via black | |
150 |
six==1.1 |
|
205 | six==1.16.0 \ | |
151 | --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ |
|
206 | --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ | |
152 | --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \ |
|
207 | --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 | |
153 |
# via |
|
208 | # via vcrpy | |
154 |
toml==0.10. |
|
209 | toml==0.10.2 \ | |
155 | --hash=sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f \ |
|
210 | --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ | |
156 | --hash=sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88 \ |
|
211 | --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f | |
157 | # via black, pylint |
|
212 | # via | |
158 | typed-ast==1.4.1 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \ |
|
213 | # black | |
159 | --hash=sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355 \ |
|
214 | # pylint | |
160 | --hash=sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919 \ |
|
215 | typed-ast==1.4.3 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \ | |
161 | --hash=sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa \ |
|
216 | --hash=sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace \ | |
162 | --hash=sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652 \ |
|
217 | --hash=sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff \ | |
163 | --hash=sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75 \ |
|
218 | --hash=sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266 \ | |
164 | --hash=sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01 \ |
|
219 | --hash=sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528 \ | |
165 | --hash=sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d \ |
|
220 | --hash=sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6 \ | |
166 | --hash=sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1 \ |
|
221 | --hash=sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808 \ | |
167 | --hash=sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907 \ |
|
222 | --hash=sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4 \ | |
168 | --hash=sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c \ |
|
223 | --hash=sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363 \ | |
169 | --hash=sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3 \ |
|
224 | --hash=sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341 \ | |
170 | --hash=sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b \ |
|
225 | --hash=sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04 \ | |
171 | --hash=sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614 \ |
|
226 | --hash=sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41 \ | |
172 | --hash=sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb \ |
|
227 | --hash=sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e \ | |
173 | --hash=sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b \ |
|
228 | --hash=sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3 \ | |
174 | --hash=sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41 \ |
|
229 | --hash=sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899 \ | |
175 | --hash=sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6 \ |
|
230 | --hash=sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805 \ | |
176 | --hash=sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34 \ |
|
231 | --hash=sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c \ | |
177 | --hash=sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe \ |
|
232 | --hash=sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c \ | |
178 | --hash=sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4 \ |
|
233 | --hash=sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39 \ | |
179 | --hash=sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7 \ |
|
234 | --hash=sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a \ | |
180 | # via -r contrib/automation/linux-requirements.txt.in, astroid, black |
|
235 | --hash=sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3 \ | |
181 | typing-extensions==3.7.4.3 \ |
|
236 | --hash=sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7 \ | |
182 | --hash=sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 \ |
|
237 | --hash=sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f \ | |
183 | --hash=sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ |
|
238 | --hash=sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075 \ | |
184 | --hash=sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f \ |
|
239 | --hash=sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0 \ | |
|
240 | --hash=sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40 \ | |||
|
241 | --hash=sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428 \ | |||
|
242 | --hash=sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927 \ | |||
|
243 | --hash=sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3 \ | |||
|
244 | --hash=sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f \ | |||
|
245 | --hash=sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65 | |||
|
246 | # via | |||
|
247 | # -r contrib/automation/linux-requirements.txt.in | |||
|
248 | # astroid | |||
|
249 | # black | |||
|
250 | typing-extensions==3.10.0.0 \ | |||
|
251 | --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ | |||
|
252 | --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ | |||
|
253 | --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 | |||
185 | # via yarl |
|
254 | # via yarl | |
186 |
vcrpy==4.1. |
|
255 | vcrpy==4.1.1 \ | |
187 | --hash=sha256:4138e79eb35981ad391406cbb7227bce7eba8bad788dcf1a89c2e4a8b740debe \ |
|
256 | --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \ | |
188 | --hash=sha256:d833248442bbc560599add895c9ab0ef518676579e8dc72d8b0933bdb3880253 \ |
|
257 | --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 | |
189 | # via -r contrib/automation/linux-requirements.txt.in |
|
258 | # via -r contrib/automation/linux-requirements.txt.in | |
190 | wrapt==1.12.1 \ |
|
259 | wrapt==1.12.1 \ | |
191 |
--hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 |
|
260 | --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 | |
192 | # via astroid, vcrpy |
|
261 | # via | |
193 | yarl==1.6.0 \ |
|
262 | # astroid | |
194 | --hash=sha256:04a54f126a0732af75e5edc9addeaa2113e2ca7c6fce8974a63549a70a25e50e \ |
|
263 | # vcrpy | |
195 | --hash=sha256:3cc860d72ed989f3b1f3abbd6ecf38e412de722fb38b8f1b1a086315cf0d69c5 \ |
|
264 | yarl==1.6.3 \ | |
196 | --hash=sha256:5d84cc36981eb5a8533be79d6c43454c8e6a39ee3118ceaadbd3c029ab2ee580 \ |
|
265 | --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \ | |
197 | --hash=sha256:5e447e7f3780f44f890360ea973418025e8c0cdcd7d6a1b221d952600fd945dc \ |
|
266 | --hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \ | |
198 | --hash=sha256:61d3ea3c175fe45f1498af868879c6ffeb989d4143ac542163c45538ba5ec21b \ |
|
267 | --hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \ | |
199 | --hash=sha256:67c5ea0970da882eaf9efcf65b66792557c526f8e55f752194eff8ec722c75c2 \ |
|
268 | --hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \ | |
200 | --hash=sha256:6f6898429ec3c4cfbef12907047136fd7b9e81a6ee9f105b45505e633427330a \ |
|
269 | --hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \ | |
201 | --hash=sha256:7ce35944e8e61927a8f4eb78f5bc5d1e6da6d40eadd77e3f79d4e9399e263921 \ |
|
270 | --hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \ | |
202 | --hash=sha256:b7c199d2cbaf892ba0f91ed36d12ff41ecd0dde46cbf64ff4bfe997a3ebc925e \ |
|
271 | --hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \ | |
203 | --hash=sha256:c15d71a640fb1f8e98a1423f9c64d7f1f6a3a168f803042eaf3a5b5022fde0c1 \ |
|
272 | --hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \ | |
204 | --hash=sha256:c22607421f49c0cb6ff3ed593a49b6a99c6ffdeaaa6c944cdda83c2393c8864d \ |
|
273 | --hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \ | |
205 | --hash=sha256:c604998ab8115db802cc55cb1b91619b2831a6128a62ca7eea577fc8ea4d3131 \ |
|
274 | --hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \ | |
206 | --hash=sha256:d088ea9319e49273f25b1c96a3763bf19a882cff774d1792ae6fba34bd40550a \ |
|
275 | --hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \ | |
207 | --hash=sha256:db9eb8307219d7e09b33bcb43287222ef35cbcf1586ba9472b0a4b833666ada1 \ |
|
276 | --hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \ | |
208 | --hash=sha256:e31fef4e7b68184545c3d68baec7074532e077bd1906b040ecfba659737df188 \ |
|
277 | --hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \ | |
209 | --hash=sha256:e32f0fb443afcfe7f01f95172b66f279938fbc6bdaebe294b0ff6747fb6db020 \ |
|
278 | --hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \ | |
210 | --hash=sha256:fcbe419805c9b20db9a51d33b942feddbf6e7fb468cb20686fd7089d4164c12a \ |
|
279 | --hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \ | |
|
280 | --hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \ | |||
|
281 | --hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \ | |||
|
282 | --hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \ | |||
|
283 | --hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \ | |||
|
284 | --hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10 \ | |||
|
285 | --hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \ | |||
|
286 | --hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \ | |||
|
287 | --hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \ | |||
|
288 | --hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \ | |||
|
289 | --hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \ | |||
|
290 | --hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \ | |||
|
291 | --hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \ | |||
|
292 | --hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \ | |||
|
293 | --hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \ | |||
|
294 | --hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \ | |||
|
295 | --hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \ | |||
|
296 | --hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \ | |||
|
297 | --hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \ | |||
|
298 | --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \ | |||
|
299 | --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \ | |||
|
300 | --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \ | |||
|
301 | --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 | |||
211 | # via vcrpy |
|
302 | # via vcrpy | |
212 |
|
303 | |||
213 | # WARNING: The following packages were not pinned, but pip requires them to be |
|
304 | # WARNING: The following packages were not pinned, but pip requires them to be | |
214 | # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. |
|
305 | # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. | |
215 | # setuptools |
|
306 | # setuptools |
@@ -1,1109 +1,1131 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 | # |
|
2 | # | |
3 | # check-code - a style and portability checker for Mercurial |
|
3 | # check-code - a style and portability checker for Mercurial | |
4 | # |
|
4 | # | |
5 | # Copyright 2010 Olivia Mackall <olivia@selenic.com> |
|
5 | # Copyright 2010 Olivia Mackall <olivia@selenic.com> | |
6 | # |
|
6 | # | |
7 | # This software may be used and distributed according to the terms of the |
|
7 | # This software may be used and distributed according to the terms of the | |
8 | # GNU General Public License version 2 or any later version. |
|
8 | # GNU General Public License version 2 or any later version. | |
9 |
|
9 | |||
10 | """style and portability checker for Mercurial |
|
10 | """style and portability checker for Mercurial | |
11 |
|
11 | |||
12 | when a rule triggers wrong, do one of the following (prefer one from top): |
|
12 | when a rule triggers wrong, do one of the following (prefer one from top): | |
13 | * do the work-around the rule suggests |
|
13 | * do the work-around the rule suggests | |
14 | * doublecheck that it is a false match |
|
14 | * doublecheck that it is a false match | |
15 | * improve the rule pattern |
|
15 | * improve the rule pattern | |
16 | * add an ignore pattern to the rule (3rd arg) which matches your good line |
|
16 | * add an ignore pattern to the rule (3rd arg) which matches your good line | |
17 | (you can append a short comment and match this, like: #re-raises) |
|
17 | (you can append a short comment and match this, like: #re-raises) | |
18 | * change the pattern to a warning and list the exception in test-check-code-hg |
|
18 | * change the pattern to a warning and list the exception in test-check-code-hg | |
19 | * ONLY use no--check-code for skipping entire files from external sources |
|
19 | * ONLY use no--check-code for skipping entire files from external sources | |
20 | """ |
|
20 | """ | |
21 |
|
21 | |||
22 | from __future__ import absolute_import, print_function |
|
22 | from __future__ import absolute_import, print_function | |
23 | import glob |
|
23 | import glob | |
24 | import keyword |
|
24 | import keyword | |
25 | import optparse |
|
25 | import optparse | |
26 | import os |
|
26 | import os | |
27 | import re |
|
27 | import re | |
28 | import sys |
|
28 | import sys | |
29 |
|
29 | |||
30 | if sys.version_info[0] < 3: |
|
30 | if sys.version_info[0] < 3: | |
31 | opentext = open |
|
31 | opentext = open | |
32 | else: |
|
32 | else: | |
33 |
|
33 | |||
34 | def opentext(f): |
|
34 | def opentext(f): | |
35 | return open(f, encoding='latin1') |
|
35 | return open(f, encoding='latin1') | |
36 |
|
36 | |||
37 |
|
37 | |||
38 | try: |
|
38 | try: | |
39 | xrange |
|
39 | xrange | |
40 | except NameError: |
|
40 | except NameError: | |
41 | xrange = range |
|
41 | xrange = range | |
42 | try: |
|
42 | try: | |
43 | import re2 |
|
43 | import re2 | |
44 | except ImportError: |
|
44 | except ImportError: | |
45 | re2 = None |
|
45 | re2 = None | |
46 |
|
46 | |||
47 | import testparseutil |
|
47 | import testparseutil | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | def compilere(pat, multiline=False): |
|
50 | def compilere(pat, multiline=False): | |
51 | if multiline: |
|
51 | if multiline: | |
52 | pat = '(?m)' + pat |
|
52 | pat = '(?m)' + pat | |
53 | if re2: |
|
53 | if re2: | |
54 | try: |
|
54 | try: | |
55 | return re2.compile(pat) |
|
55 | return re2.compile(pat) | |
56 | except re2.error: |
|
56 | except re2.error: | |
57 | pass |
|
57 | pass | |
58 | return re.compile(pat) |
|
58 | return re.compile(pat) | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | # check "rules depending on implementation of repquote()" in each |
|
61 | # check "rules depending on implementation of repquote()" in each | |
62 | # patterns (especially pypats), before changing around repquote() |
|
62 | # patterns (especially pypats), before changing around repquote() | |
63 | _repquotefixedmap = { |
|
63 | _repquotefixedmap = { | |
64 | ' ': ' ', |
|
64 | ' ': ' ', | |
65 | '\n': '\n', |
|
65 | '\n': '\n', | |
66 | '.': 'p', |
|
66 | '.': 'p', | |
67 | ':': 'q', |
|
67 | ':': 'q', | |
68 | '%': '%', |
|
68 | '%': '%', | |
69 | '\\': 'b', |
|
69 | '\\': 'b', | |
70 | '*': 'A', |
|
70 | '*': 'A', | |
71 | '+': 'P', |
|
71 | '+': 'P', | |
72 | '-': 'M', |
|
72 | '-': 'M', | |
73 | } |
|
73 | } | |
74 |
|
74 | |||
75 |
|
75 | |||
76 | def _repquoteencodechr(i): |
|
76 | def _repquoteencodechr(i): | |
77 | if i > 255: |
|
77 | if i > 255: | |
78 | return 'u' |
|
78 | return 'u' | |
79 | c = chr(i) |
|
79 | c = chr(i) | |
80 | if c in _repquotefixedmap: |
|
80 | if c in _repquotefixedmap: | |
81 | return _repquotefixedmap[c] |
|
81 | return _repquotefixedmap[c] | |
82 | if c.isalpha(): |
|
82 | if c.isalpha(): | |
83 | return 'x' |
|
83 | return 'x' | |
84 | if c.isdigit(): |
|
84 | if c.isdigit(): | |
85 | return 'n' |
|
85 | return 'n' | |
86 | return 'o' |
|
86 | return 'o' | |
87 |
|
87 | |||
88 |
|
88 | |||
89 | _repquotett = ''.join(_repquoteencodechr(i) for i in xrange(256)) |
|
89 | _repquotett = ''.join(_repquoteencodechr(i) for i in xrange(256)) | |
90 |
|
90 | |||
91 |
|
91 | |||
92 | def repquote(m): |
|
92 | def repquote(m): | |
93 | t = m.group('text') |
|
93 | t = m.group('text') | |
94 | t = t.translate(_repquotett) |
|
94 | t = t.translate(_repquotett) | |
95 | return m.group('quote') + t + m.group('quote') |
|
95 | return m.group('quote') + t + m.group('quote') | |
96 |
|
96 | |||
97 |
|
97 | |||
98 | def reppython(m): |
|
98 | def reppython(m): | |
99 | comment = m.group('comment') |
|
99 | comment = m.group('comment') | |
100 | if comment: |
|
100 | if comment: | |
101 | l = len(comment.rstrip()) |
|
101 | l = len(comment.rstrip()) | |
102 | return "#" * l + comment[l:] |
|
102 | return "#" * l + comment[l:] | |
103 | return repquote(m) |
|
103 | return repquote(m) | |
104 |
|
104 | |||
105 |
|
105 | |||
106 | def repcomment(m): |
|
106 | def repcomment(m): | |
107 | return m.group(1) + "#" * len(m.group(2)) |
|
107 | return m.group(1) + "#" * len(m.group(2)) | |
108 |
|
108 | |||
109 |
|
109 | |||
110 | def repccomment(m): |
|
110 | def repccomment(m): | |
111 | t = re.sub(r"((?<=\n) )|\S", "x", m.group(2)) |
|
111 | t = re.sub(r"((?<=\n) )|\S", "x", m.group(2)) | |
112 | return m.group(1) + t + "*/" |
|
112 | return m.group(1) + t + "*/" | |
113 |
|
113 | |||
114 |
|
114 | |||
115 | def repcallspaces(m): |
|
115 | def repcallspaces(m): | |
116 | t = re.sub(r"\n\s+", "\n", m.group(2)) |
|
116 | t = re.sub(r"\n\s+", "\n", m.group(2)) | |
117 | return m.group(1) + t |
|
117 | return m.group(1) + t | |
118 |
|
118 | |||
119 |
|
119 | |||
120 | def repinclude(m): |
|
120 | def repinclude(m): | |
121 | return m.group(1) + "<foo>" |
|
121 | return m.group(1) + "<foo>" | |
122 |
|
122 | |||
123 |
|
123 | |||
124 | def rephere(m): |
|
124 | def rephere(m): | |
125 | t = re.sub(r"\S", "x", m.group(2)) |
|
125 | t = re.sub(r"\S", "x", m.group(2)) | |
126 | return m.group(1) + t |
|
126 | return m.group(1) + t | |
127 |
|
127 | |||
128 |
|
128 | |||
129 | testpats = [ |
|
129 | testpats = [ | |
130 | [ |
|
130 | [ | |
131 | (r'\b(push|pop)d\b', "don't use 'pushd' or 'popd', use 'cd'"), |
|
131 | (r'\b(push|pop)d\b', "don't use 'pushd' or 'popd', use 'cd'"), | |
132 | (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"), |
|
132 | (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"), | |
133 | (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"), |
|
133 | (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"), | |
134 | (r'(?<!hg )grep.* -a', "don't use 'grep -a', use in-line python"), |
|
134 | (r'(?<!hg )grep.* -a', "don't use 'grep -a', use in-line python"), | |
135 | (r'sed.*-i', "don't use 'sed -i', use a temporary file"), |
|
135 | (r'sed.*-i', "don't use 'sed -i', use a temporary file"), | |
136 | (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"), |
|
136 | (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"), | |
137 | (r'echo -n', "don't use 'echo -n', use printf"), |
|
137 | (r'echo -n', "don't use 'echo -n', use printf"), | |
138 | (r'(^|\|\s*)\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"), |
|
138 | (r'(^|\|\s*)\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"), | |
139 | (r'head -c', "don't use 'head -c', use 'dd'"), |
|
139 | (r'head -c', "don't use 'head -c', use 'dd'"), | |
140 | (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"), |
|
140 | (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"), | |
141 | (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"), |
|
141 | (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"), | |
142 | (r'\bls\b.*-\w*R', "don't use 'ls -R', use 'find'"), |
|
142 | (r'\bls\b.*-\w*R', "don't use 'ls -R', use 'find'"), | |
143 | (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"), |
|
143 | (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"), | |
144 | (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"), |
|
144 | (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"), | |
145 | (r'rm -rf \*', "don't use naked rm -rf, target a directory"), |
|
145 | (r'rm -rf \*', "don't use naked rm -rf, target a directory"), | |
146 | ( |
|
146 | ( | |
147 | r'\[[^\]]+==', |
|
147 | r'\[[^\]]+==', | |
148 | '[ foo == bar ] is a bashism, use [ foo = bar ] instead', |
|
148 | '[ foo == bar ] is a bashism, use [ foo = bar ] instead', | |
149 | ), |
|
149 | ), | |
150 | ( |
|
150 | ( | |
151 | r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w', |
|
151 | r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w', | |
152 | "use egrep for extended grep syntax", |
|
152 | "use egrep for extended grep syntax", | |
153 | ), |
|
153 | ), | |
154 | (r'(^|\|\s*)e?grep .*\\S', "don't use \\S in regular expression"), |
|
154 | (r'(^|\|\s*)e?grep .*\\S', "don't use \\S in regular expression"), | |
155 | (r'(?<!!)/bin/', "don't use explicit paths for tools"), |
|
155 | (r'(?<!!)/bin/', "don't use explicit paths for tools"), | |
156 | (r'#!.*/bash', "don't use bash in shebang, use sh"), |
|
156 | (r'#!.*/bash', "don't use bash in shebang, use sh"), | |
157 | (r'[^\n]\Z', "no trailing newline"), |
|
157 | (r'[^\n]\Z', "no trailing newline"), | |
158 | (r'export .*=', "don't export and assign at once"), |
|
158 | (r'export .*=', "don't export and assign at once"), | |
159 | (r'^source\b', "don't use 'source', use '.'"), |
|
159 | (r'^source\b', "don't use 'source', use '.'"), | |
160 | (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"), |
|
160 | (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"), | |
161 | (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"), |
|
161 | (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"), | |
162 | (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"), |
|
162 | (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"), | |
163 | (r'^stop\(\)', "don't use 'stop' as a shell function name"), |
|
163 | (r'^stop\(\)', "don't use 'stop' as a shell function name"), | |
164 | (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"), |
|
164 | (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"), | |
165 | (r'\[\[\s+[^\]]*\]\]', "don't use '[[ ]]', use '[ ]'"), |
|
165 | (r'\[\[\s+[^\]]*\]\]', "don't use '[[ ]]', use '[ ]'"), | |
166 | (r'^alias\b.*=', "don't use alias, use a function"), |
|
166 | (r'^alias\b.*=', "don't use alias, use a function"), | |
167 | (r'if\s*!', "don't use '!' to negate exit status"), |
|
167 | (r'if\s*!', "don't use '!' to negate exit status"), | |
168 | (r'/dev/u?random', "don't use entropy, use /dev/zero"), |
|
168 | (r'/dev/u?random', "don't use entropy, use /dev/zero"), | |
169 | (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"), |
|
169 | (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"), | |
170 | ( |
|
170 | ( | |
171 | r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)', |
|
171 | r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)', | |
172 | "put a backslash-escaped newline after sed 'i' command", |
|
172 | "put a backslash-escaped newline after sed 'i' command", | |
173 | ), |
|
173 | ), | |
174 | (r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"), |
|
174 | (r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"), | |
175 | (r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"), |
|
175 | (r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"), | |
176 | (r'[\s="`\']python\s(?!bindings)', "don't use 'python', use '$PYTHON'"), |
|
176 | (r'[\s="`\']python\s(?!bindings)', "don't use 'python', use '$PYTHON'"), | |
177 | (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"), |
|
177 | (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"), | |
178 | (r'\butil\.Abort\b', "directly use error.Abort"), |
|
178 | (r'\butil\.Abort\b', "directly use error.Abort"), | |
179 | (r'\|&', "don't use |&, use 2>&1"), |
|
179 | (r'\|&', "don't use |&, use 2>&1"), | |
180 | (r'\w = +\w', "only one space after = allowed"), |
|
180 | (r'\w = +\w', "only one space after = allowed"), | |
181 | ( |
|
181 | ( | |
182 | r'\bsed\b.*[^\\]\\n', |
|
182 | r'\bsed\b.*[^\\]\\n', | |
183 | "don't use 'sed ... \\n', use a \\ and a newline", |
|
183 | "don't use 'sed ... \\n', use a \\ and a newline", | |
184 | ), |
|
184 | ), | |
185 | (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"), |
|
185 | (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"), | |
186 | (r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"), |
|
186 | (r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"), | |
187 | (r'grep.* -[ABC]', "don't use grep's context flags"), |
|
187 | (r'grep.* -[ABC]', "don't use grep's context flags"), | |
188 | ( |
|
188 | ( | |
189 | r'find.*-printf', |
|
189 | r'find.*-printf', | |
190 | "don't use 'find -printf', it doesn't exist on BSD find(1)", |
|
190 | "don't use 'find -printf', it doesn't exist on BSD find(1)", | |
191 | ), |
|
191 | ), | |
192 | (r'\$RANDOM ', "don't use bash-only $RANDOM to generate random values"), |
|
192 | (r'\$RANDOM ', "don't use bash-only $RANDOM to generate random values"), | |
193 | ], |
|
193 | ], | |
194 | # warnings |
|
194 | # warnings | |
195 | [ |
|
195 | [ | |
196 | (r'^function', "don't use 'function', use old style"), |
|
196 | (r'^function', "don't use 'function', use old style"), | |
197 | (r'^diff.*-\w*N', "don't use 'diff -N'"), |
|
197 | (r'^diff.*-\w*N', "don't use 'diff -N'"), | |
198 | (r'\$PWD|\${PWD}', "don't use $PWD, use `pwd`"), |
|
198 | (r'\$PWD|\${PWD}', "don't use $PWD, use `pwd`"), | |
199 | (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"), |
|
199 | (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"), | |
200 | (r'kill (`|\$\()', "don't use kill, use killdaemons.py"), |
|
200 | (r'kill (`|\$\()', "don't use kill, use killdaemons.py"), | |
201 | ], |
|
201 | ], | |
202 | ] |
|
202 | ] | |
203 |
|
203 | |||
204 | testfilters = [ |
|
204 | testfilters = [ | |
205 | (r"( *)(#([^!][^\n]*\S)?)", repcomment), |
|
205 | (r"( *)(#([^!][^\n]*\S)?)", repcomment), | |
206 | (r"<<(\S+)((.|\n)*?\n\1)", rephere), |
|
206 | (r"<<(\S+)((.|\n)*?\n\1)", rephere), | |
207 | ] |
|
207 | ] | |
208 |
|
208 | |||
209 | uprefix = r"^ \$ " |
|
209 | uprefix = r"^ \$ " | |
210 | utestpats = [ |
|
210 | utestpats = [ | |
211 | [ |
|
211 | [ | |
212 | (r'^(\S.*|| [$>] \S.*)[ \t]\n', "trailing whitespace on non-output"), |
|
212 | (r'^(\S.*|| [$>] \S.*)[ \t]\n', "trailing whitespace on non-output"), | |
213 | ( |
|
213 | ( | |
214 | uprefix + r'.*\|\s*sed[^|>\n]*\n', |
|
214 | uprefix + r'.*\|\s*sed[^|>\n]*\n', | |
215 | "use regex test output patterns instead of sed", |
|
215 | "use regex test output patterns instead of sed", | |
216 | ), |
|
216 | ), | |
217 | (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"), |
|
217 | (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"), | |
218 | (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"), |
|
|||
219 | ( |
|
218 | ( | |
220 | uprefix + r'.*\|\| echo.*(fail|error)', |
|
219 | uprefix + r'.*\|\| echo.*(fail|error)', | |
221 | "explicit exit code checks unnecessary", |
|
220 | "explicit exit code checks unnecessary", | |
222 | ), |
|
221 | ), | |
223 | (uprefix + r'set -e', "don't use set -e"), |
|
222 | (uprefix + r'set -e', "don't use set -e"), | |
224 | (uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"), |
|
223 | (uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"), | |
225 | ( |
|
224 | ( | |
226 | uprefix + r'.*:\.\S*/', |
|
225 | uprefix + r'.*:\.\S*/', | |
227 | "x:.y in a path does not work on msys, rewrite " |
|
226 | "x:.y in a path does not work on msys, rewrite " | |
228 | "as x://.y, or see `hg log -k msys` for alternatives", |
|
227 | "as x://.y, or see `hg log -k msys` for alternatives", | |
229 | r'-\S+:\.|' '# no-msys', # -Rxxx |
|
228 | r'-\S+:\.|' '# no-msys', # -Rxxx | |
230 | ), # in test-pull.t which is skipped on windows |
|
229 | ), # in test-pull.t which is skipped on windows | |
231 | ( |
|
230 | ( | |
232 | r'^ [^$>].*27\.0\.0\.1', |
|
231 | r'^ [^$>].*27\.0\.0\.1', | |
233 | 'use $LOCALIP not an explicit loopback address', |
|
232 | 'use $LOCALIP not an explicit loopback address', | |
234 | ), |
|
233 | ), | |
235 | ( |
|
234 | ( | |
236 | r'^ (?![>$] ).*\$LOCALIP.*[^)]$', |
|
235 | r'^ (?![>$] ).*\$LOCALIP.*[^)]$', | |
237 | 'mark $LOCALIP output lines with (glob) to help tests in BSD jails', |
|
236 | 'mark $LOCALIP output lines with (glob) to help tests in BSD jails', | |
238 | ), |
|
237 | ), | |
239 | ( |
|
238 | ( | |
240 | r'^ (cat|find): .*: \$ENOENT\$', |
|
239 | r'^ (cat|find): .*: \$ENOENT\$', | |
241 | 'use test -f to test for file existence', |
|
240 | 'use test -f to test for file existence', | |
242 | ), |
|
241 | ), | |
243 | ( |
|
242 | ( | |
244 | r'^ diff -[^ -]*p', |
|
243 | r'^ diff -[^ -]*p', | |
245 | "don't use (external) diff with -p for portability", |
|
244 | "don't use (external) diff with -p for portability", | |
246 | ), |
|
245 | ), | |
247 | (r' readlink ', 'use readlink.py instead of readlink'), |
|
246 | (r' readlink ', 'use readlink.py instead of readlink'), | |
248 | ( |
|
247 | ( | |
249 | r'^ [-+][-+][-+] .* [-+]0000 \(glob\)', |
|
248 | r'^ [-+][-+][-+] .* [-+]0000 \(glob\)', | |
250 | "glob timezone field in diff output for portability", |
|
249 | "glob timezone field in diff output for portability", | |
251 | ), |
|
250 | ), | |
252 | ( |
|
251 | ( | |
253 | r'^ @@ -[0-9]+ [+][0-9]+,[0-9]+ @@', |
|
252 | r'^ @@ -[0-9]+ [+][0-9]+,[0-9]+ @@', | |
254 | "use '@@ -N* +N,n @@ (glob)' style chunk header for portability", |
|
253 | "use '@@ -N* +N,n @@ (glob)' style chunk header for portability", | |
255 | ), |
|
254 | ), | |
256 | ( |
|
255 | ( | |
257 | r'^ @@ -[0-9]+,[0-9]+ [+][0-9]+ @@', |
|
256 | r'^ @@ -[0-9]+,[0-9]+ [+][0-9]+ @@', | |
258 | "use '@@ -N,n +N* @@ (glob)' style chunk header for portability", |
|
257 | "use '@@ -N,n +N* @@ (glob)' style chunk header for portability", | |
259 | ), |
|
258 | ), | |
260 | ( |
|
259 | ( | |
261 | r'^ @@ -[0-9]+ [+][0-9]+ @@', |
|
260 | r'^ @@ -[0-9]+ [+][0-9]+ @@', | |
262 | "use '@@ -N* +N* @@ (glob)' style chunk header for portability", |
|
261 | "use '@@ -N* +N* @@ (glob)' style chunk header for portability", | |
263 | ), |
|
262 | ), | |
264 | ( |
|
263 | ( | |
265 | uprefix + r'hg( +-[^ ]+( +[^ ]+)?)* +extdiff' |
|
264 | uprefix + r'hg( +-[^ ]+( +[^ ]+)?)* +extdiff' | |
266 | r'( +(-[^ po-]+|--(?!program|option)[^ ]+|[^-][^ ]*))*$', |
|
265 | r'( +(-[^ po-]+|--(?!program|option)[^ ]+|[^-][^ ]*))*$', | |
267 | "use $RUNTESTDIR/pdiff via extdiff (or -o/-p for false-positives)", |
|
266 | "use $RUNTESTDIR/pdiff via extdiff (or -o/-p for false-positives)", | |
268 | ), |
|
267 | ), | |
269 | ], |
|
268 | ], | |
270 | # warnings |
|
269 | # warnings | |
271 | [ |
|
270 | [ | |
272 | ( |
|
271 | ( | |
273 | r'^ (?!.*\$LOCALIP)[^*?/\n]* \(glob\)$', |
|
272 | r'^ (?!.*\$LOCALIP)[^*?/\n]* \(glob\)$', | |
274 | "glob match with no glob string (?, *, /, and $LOCALIP)", |
|
273 | "glob match with no glob string (?, *, /, and $LOCALIP)", | |
275 | ), |
|
274 | ), | |
276 | ], |
|
275 | ], | |
277 | ] |
|
276 | ] | |
278 |
|
277 | |||
279 | # transform plain test rules to unified test's |
|
278 | # transform plain test rules to unified test's | |
280 | for i in [0, 1]: |
|
279 | for i in [0, 1]: | |
281 | for tp in testpats[i]: |
|
280 | for tp in testpats[i]: | |
282 | p = tp[0] |
|
281 | p = tp[0] | |
283 | m = tp[1] |
|
282 | m = tp[1] | |
284 | if p.startswith('^'): |
|
283 | if p.startswith('^'): | |
285 | p = "^ [$>] (%s)" % p[1:] |
|
284 | p = "^ [$>] (%s)" % p[1:] | |
286 | else: |
|
285 | else: | |
287 | p = "^ [$>] .*(%s)" % p |
|
286 | p = "^ [$>] .*(%s)" % p | |
288 | utestpats[i].append((p, m) + tp[2:]) |
|
287 | utestpats[i].append((p, m) + tp[2:]) | |
289 |
|
288 | |||
290 | # don't transform the following rules: |
|
289 | # don't transform the following rules: | |
291 | # " > \t" and " \t" should be allowed in unified tests |
|
290 | # " > \t" and " \t" should be allowed in unified tests | |
292 | testpats[0].append((r'^( *)\t', "don't use tabs to indent")) |
|
291 | testpats[0].append((r'^( *)\t', "don't use tabs to indent")) | |
293 | utestpats[0].append((r'^( ?)\t', "don't use tabs to indent")) |
|
292 | utestpats[0].append((r'^( ?)\t', "don't use tabs to indent")) | |
294 |
|
293 | |||
295 | utestfilters = [ |
|
294 | utestfilters = [ | |
296 | (r"<<(\S+)((.|\n)*?\n > \1)", rephere), |
|
295 | (r"<<(\S+)((.|\n)*?\n > \1)", rephere), | |
297 | (r"( +)(#([^!][^\n]*\S)?)", repcomment), |
|
296 | (r"( +)(#([^!][^\n]*\S)?)", repcomment), | |
298 | ] |
|
297 | ] | |
299 |
|
298 | |||
300 | # common patterns to check *.py |
|
299 | # common patterns to check *.py | |
301 | commonpypats = [ |
|
300 | commonpypats = [ | |
302 | [ |
|
301 | [ | |
303 | (r'\\$', 'Use () to wrap long lines in Python, not \\'), |
|
302 | (r'\\$', 'Use () to wrap long lines in Python, not \\'), | |
304 | ( |
|
303 | ( | |
305 | r'^\s*def\s*\w+\s*\(.*,\s*\(', |
|
304 | r'^\s*def\s*\w+\s*\(.*,\s*\(', | |
306 | "tuple parameter unpacking not available in Python 3+", |
|
305 | "tuple parameter unpacking not available in Python 3+", | |
307 | ), |
|
306 | ), | |
308 | ( |
|
307 | ( | |
309 | r'lambda\s*\(.*,.*\)', |
|
308 | r'lambda\s*\(.*,.*\)', | |
310 | "tuple parameter unpacking not available in Python 3+", |
|
309 | "tuple parameter unpacking not available in Python 3+", | |
311 | ), |
|
310 | ), | |
312 | (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"), |
|
311 | (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"), | |
313 | (r'(?<!\.)\breduce\s*\(.*', "reduce is not available in Python 3+"), |
|
312 | (r'(?<!\.)\breduce\s*\(.*', "reduce is not available in Python 3+"), | |
314 | ( |
|
313 | ( | |
315 | r'\bdict\(.*=', |
|
314 | r'\bdict\(.*=', | |
316 | 'dict() is different in Py2 and 3 and is slower than {}', |
|
315 | 'dict() is different in Py2 and 3 and is slower than {}', | |
317 | 'dict-from-generator', |
|
316 | 'dict-from-generator', | |
318 | ), |
|
317 | ), | |
319 | (r'\.has_key\b', "dict.has_key is not available in Python 3+"), |
|
318 | (r'\.has_key\b', "dict.has_key is not available in Python 3+"), | |
320 | (r'\s<>\s', '<> operator is not available in Python 3+, use !='), |
|
319 | (r'\s<>\s', '<> operator is not available in Python 3+, use !='), | |
321 | (r'^\s*\t', "don't use tabs"), |
|
320 | (r'^\s*\t', "don't use tabs"), | |
322 | (r'\S;\s*\n', "semicolon"), |
|
321 | (r'\S;\s*\n', "semicolon"), | |
323 | (r'[^_]_\([ \t\n]*(?:"[^"]+"[ \t\n+]*)+%', "don't use % inside _()"), |
|
322 | (r'[^_]_\([ \t\n]*(?:"[^"]+"[ \t\n+]*)+%', "don't use % inside _()"), | |
324 | (r"[^_]_\([ \t\n]*(?:'[^']+'[ \t\n+]*)+%", "don't use % inside _()"), |
|
323 | (r"[^_]_\([ \t\n]*(?:'[^']+'[ \t\n+]*)+%", "don't use % inside _()"), | |
325 | (r'(\w|\)),\w', "missing whitespace after ,"), |
|
324 | (r'(\w|\)),\w', "missing whitespace after ,"), | |
326 | (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"), |
|
325 | (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"), | |
327 | (r'\w\s=\s\s+\w', "gratuitous whitespace after ="), |
|
326 | (r'\w\s=\s\s+\w', "gratuitous whitespace after ="), | |
328 | ( |
|
327 | ( | |
329 | ( |
|
328 | ( | |
330 | # a line ending with a colon, potentially with trailing comments |
|
329 | # a line ending with a colon, potentially with trailing comments | |
331 | r':([ \t]*#[^\n]*)?\n' |
|
330 | r':([ \t]*#[^\n]*)?\n' | |
332 | # one that is not a pass and not only a comment |
|
331 | # one that is not a pass and not only a comment | |
333 | r'(?P<indent>[ \t]+)[^#][^\n]+\n' |
|
332 | r'(?P<indent>[ \t]+)[^#][^\n]+\n' | |
334 | # more lines at the same indent level |
|
333 | # more lines at the same indent level | |
335 | r'((?P=indent)[^\n]+\n)*' |
|
334 | r'((?P=indent)[^\n]+\n)*' | |
336 | # a pass at the same indent level, which is bogus |
|
335 | # a pass at the same indent level, which is bogus | |
337 | r'(?P=indent)pass[ \t\n#]' |
|
336 | r'(?P=indent)pass[ \t\n#]' | |
338 | ), |
|
337 | ), | |
339 | 'omit superfluous pass', |
|
338 | 'omit superfluous pass', | |
340 | ), |
|
339 | ), | |
341 | (r'[^\n]\Z', "no trailing newline"), |
|
340 | (r'[^\n]\Z', "no trailing newline"), | |
342 | (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"), |
|
341 | (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"), | |
343 | ( |
|
342 | ( | |
344 | r'^\s+(self\.)?[A-Za-z][a-z0-9]+[A-Z]\w* = ', |
|
343 | r'^\s+(self\.)?[A-Za-z][a-z0-9]+[A-Z]\w* = ', | |
345 | "don't use camelcase in identifiers", |
|
344 | "don't use camelcase in identifiers", | |
346 | r'#.*camelcase-required', |
|
345 | r'#.*camelcase-required', | |
347 | ), |
|
346 | ), | |
348 | ( |
|
347 | ( | |
349 | r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+', |
|
348 | r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+', | |
350 | "linebreak after :", |
|
349 | "linebreak after :", | |
351 | ), |
|
350 | ), | |
352 | ( |
|
351 | ( | |
353 | r'class\s[^( \n]+:', |
|
352 | r'class\s[^( \n]+:', | |
354 | "old-style class, use class foo(object)", |
|
353 | "old-style class, use class foo(object)", | |
355 | r'#.*old-style', |
|
354 | r'#.*old-style', | |
356 | ), |
|
355 | ), | |
357 | ( |
|
356 | ( | |
358 | r'class\s[^( \n]+\(\):', |
|
357 | r'class\s[^( \n]+\(\):', | |
359 | "class foo() creates old style object, use class foo(object)", |
|
358 | "class foo() creates old style object, use class foo(object)", | |
360 | r'#.*old-style', |
|
359 | r'#.*old-style', | |
361 | ), |
|
360 | ), | |
362 | ( |
|
361 | ( | |
363 | r'\b(%s)\(' |
|
362 | r'\b(%s)\(' | |
364 | % '|'.join(k for k in keyword.kwlist if k not in ('print', 'exec')), |
|
363 | % '|'.join(k for k in keyword.kwlist if k not in ('print', 'exec')), | |
365 | "Python keyword is not a function", |
|
364 | "Python keyword is not a function", | |
366 | ), |
|
365 | ), | |
367 | # (r'class\s[A-Z][^\(]*\((?!Exception)', |
|
366 | # (r'class\s[A-Z][^\(]*\((?!Exception)', | |
368 | # "don't capitalize non-exception classes"), |
|
367 | # "don't capitalize non-exception classes"), | |
369 | # (r'in range\(', "use xrange"), |
|
368 | # (r'in range\(', "use xrange"), | |
370 | # (r'^\s*print\s+', "avoid using print in core and extensions"), |
|
369 | # (r'^\s*print\s+', "avoid using print in core and extensions"), | |
371 | (r'[\x80-\xff]', "non-ASCII character literal"), |
|
370 | (r'[\x80-\xff]', "non-ASCII character literal"), | |
372 | (r'("\')\.format\(', "str.format() has no bytes counterpart, use %"), |
|
371 | (r'("\')\.format\(', "str.format() has no bytes counterpart, use %"), | |
373 | ( |
|
372 | ( | |
374 | r'([\(\[][ \t]\S)|(\S[ \t][\)\]])', |
|
373 | r'([\(\[][ \t]\S)|(\S[ \t][\)\]])', | |
375 | "gratuitous whitespace in () or []", |
|
374 | "gratuitous whitespace in () or []", | |
376 | ), |
|
375 | ), | |
377 | # (r'\s\s=', "gratuitous whitespace before ="), |
|
376 | # (r'\s\s=', "gratuitous whitespace before ="), | |
378 | ( |
|
377 | ( | |
379 | r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S', |
|
378 | r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S', | |
380 | "missing whitespace around operator", |
|
379 | "missing whitespace around operator", | |
381 | ), |
|
380 | ), | |
382 | ( |
|
381 | ( | |
383 | r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s', |
|
382 | r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s', | |
384 | "missing whitespace around operator", |
|
383 | "missing whitespace around operator", | |
385 | ), |
|
384 | ), | |
386 | ( |
|
385 | ( | |
387 | r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S', |
|
386 | r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S', | |
388 | "missing whitespace around operator", |
|
387 | "missing whitespace around operator", | |
389 | ), |
|
388 | ), | |
390 | (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]', "wrong whitespace around ="), |
|
389 | (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]', "wrong whitespace around ="), | |
391 | ( |
|
390 | ( | |
392 | r'\([^()]*( =[^=]|[^<>!=]= )', |
|
391 | r'\([^()]*( =[^=]|[^<>!=]= )', | |
393 | "no whitespace around = for named parameters", |
|
392 | "no whitespace around = for named parameters", | |
394 | ), |
|
393 | ), | |
395 | ( |
|
394 | ( | |
396 | r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$', |
|
395 | r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$', | |
397 | "don't use old-style two-argument raise, use Exception(message)", |
|
396 | "don't use old-style two-argument raise, use Exception(message)", | |
398 | ), |
|
397 | ), | |
399 | (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"), |
|
398 | (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"), | |
400 | ( |
|
399 | ( | |
401 | r' [=!]=\s+(True|False|None)', |
|
400 | r' [=!]=\s+(True|False|None)', | |
402 | "comparison with singleton, use 'is' or 'is not' instead", |
|
401 | "comparison with singleton, use 'is' or 'is not' instead", | |
403 | ), |
|
402 | ), | |
404 | ( |
|
403 | ( | |
405 | r'^\s*(while|if) [01]:', |
|
404 | r'^\s*(while|if) [01]:', | |
406 | "use True/False for constant Boolean expression", |
|
405 | "use True/False for constant Boolean expression", | |
407 | ), |
|
406 | ), | |
408 | (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'), |
|
407 | (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'), | |
409 | ( |
|
408 | ( | |
410 | r'(?:(?<!def)\s+|\()hasattr\(', |
|
409 | r'(?:(?<!def)\s+|\()hasattr\(', | |
411 | 'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) ' |
|
410 | 'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) ' | |
412 | 'instead', |
|
411 | 'instead', | |
413 | r'#.*hasattr-py3-only', |
|
412 | r'#.*hasattr-py3-only', | |
414 | ), |
|
413 | ), | |
415 | (r'opener\([^)]*\).read\(', "use opener.read() instead"), |
|
414 | (r'opener\([^)]*\).read\(', "use opener.read() instead"), | |
416 | (r'opener\([^)]*\).write\(', "use opener.write() instead"), |
|
415 | (r'opener\([^)]*\).write\(', "use opener.write() instead"), | |
417 | (r'(?i)descend[e]nt', "the proper spelling is descendAnt"), |
|
416 | (r'(?i)descend[e]nt', "the proper spelling is descendAnt"), | |
418 | (r'\.debug\(\_', "don't mark debug messages for translation"), |
|
417 | (r'\.debug\(\_', "don't mark debug messages for translation"), | |
419 | (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"), |
|
418 | (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"), | |
420 | (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'), |
|
419 | (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'), | |
421 | ( |
|
420 | ( | |
422 | r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,', |
|
421 | r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,', | |
423 | 'legacy exception syntax; use "as" instead of ","', |
|
422 | 'legacy exception syntax; use "as" instead of ","', | |
424 | ), |
|
423 | ), | |
425 | (r'release\(.*wlock, .*lock\)', "wrong lock release order"), |
|
424 | (r'release\(.*wlock, .*lock\)', "wrong lock release order"), | |
426 | (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"), |
|
425 | (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"), | |
427 | ( |
|
426 | ( | |
428 | r'os\.path\.join\(.*, *(""|\'\')\)', |
|
427 | r'os\.path\.join\(.*, *(""|\'\')\)', | |
429 | "use pathutil.normasprefix(path) instead of os.path.join(path, '')", |
|
428 | "use pathutil.normasprefix(path) instead of os.path.join(path, '')", | |
430 | ), |
|
429 | ), | |
431 | (r'\s0[0-7]+\b', 'legacy octal syntax; use "0o" prefix instead of "0"'), |
|
430 | (r'\s0[0-7]+\b', 'legacy octal syntax; use "0o" prefix instead of "0"'), | |
432 | # XXX only catch mutable arguments on the first line of the definition |
|
431 | # XXX only catch mutable arguments on the first line of the definition | |
433 | (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"), |
|
432 | (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"), | |
434 | (r'\butil\.Abort\b', "directly use error.Abort"), |
|
433 | (r'\butil\.Abort\b', "directly use error.Abort"), | |
435 | ( |
|
434 | ( | |
436 | r'^@(\w*\.)?cachefunc', |
|
435 | r'^@(\w*\.)?cachefunc', | |
437 | "module-level @cachefunc is risky, please avoid", |
|
436 | "module-level @cachefunc is risky, please avoid", | |
438 | ), |
|
437 | ), | |
439 | ( |
|
438 | ( | |
440 | r'^import Queue', |
|
439 | r'^import Queue', | |
441 | "don't use Queue, use pycompat.queue.Queue + " |
|
440 | "don't use Queue, use pycompat.queue.Queue + " | |
442 | "pycompat.queue.Empty", |
|
441 | "pycompat.queue.Empty", | |
443 | ), |
|
442 | ), | |
444 | ( |
|
443 | ( | |
445 | r'^import cStringIO', |
|
444 | r'^import cStringIO', | |
446 | "don't use cStringIO.StringIO, use util.stringio", |
|
445 | "don't use cStringIO.StringIO, use util.stringio", | |
447 | ), |
|
446 | ), | |
448 | (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"), |
|
447 | (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"), | |
449 | ( |
|
448 | ( | |
450 | r'^import SocketServer', |
|
449 | r'^import SocketServer', | |
451 | "don't use SockerServer, use util.socketserver", |
|
450 | "don't use SockerServer, use util.socketserver", | |
452 | ), |
|
451 | ), | |
453 | (r'^import urlparse', "don't use urlparse, use util.urlreq"), |
|
452 | (r'^import urlparse', "don't use urlparse, use util.urlreq"), | |
454 | (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"), |
|
453 | (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"), | |
455 | (r'^import cPickle', "don't use cPickle, use util.pickle"), |
|
454 | (r'^import cPickle', "don't use cPickle, use util.pickle"), | |
456 | (r'^import pickle', "don't use pickle, use util.pickle"), |
|
455 | (r'^import pickle', "don't use pickle, use util.pickle"), | |
457 | (r'^import httplib', "don't use httplib, use util.httplib"), |
|
456 | (r'^import httplib', "don't use httplib, use util.httplib"), | |
458 | (r'^import BaseHTTPServer', "use util.httpserver instead"), |
|
457 | (r'^import BaseHTTPServer', "use util.httpserver instead"), | |
459 | ( |
|
458 | ( | |
460 | r'^(from|import) mercurial\.(cext|pure|cffi)', |
|
459 | r'^(from|import) mercurial\.(cext|pure|cffi)', | |
461 | "use mercurial.policy.importmod instead", |
|
460 | "use mercurial.policy.importmod instead", | |
462 | ), |
|
461 | ), | |
463 | (r'\.next\(\)', "don't use .next(), use next(...)"), |
|
462 | (r'\.next\(\)', "don't use .next(), use next(...)"), | |
464 | ( |
|
463 | ( | |
465 | r'([a-z]*).revision\(\1\.node\(', |
|
464 | r'([a-z]*).revision\(\1\.node\(', | |
466 | "don't convert rev to node before passing to revision(nodeorrev)", |
|
465 | "don't convert rev to node before passing to revision(nodeorrev)", | |
467 | ), |
|
466 | ), | |
468 | (r'platform\.system\(\)', "don't use platform.system(), use pycompat"), |
|
467 | (r'platform\.system\(\)', "don't use platform.system(), use pycompat"), | |
469 | ], |
|
468 | ], | |
470 | # warnings |
|
469 | # warnings | |
471 | [], |
|
470 | [], | |
472 | ] |
|
471 | ] | |
473 |
|
472 | |||
474 | # patterns to check normal *.py files |
|
473 | # patterns to check normal *.py files | |
475 | pypats = [ |
|
474 | pypats = [ | |
476 | [ |
|
475 | [ | |
477 | # Ideally, these should be placed in "commonpypats" for |
|
476 | # Ideally, these should be placed in "commonpypats" for | |
478 | # consistency of coding rules in Mercurial source tree. |
|
477 | # consistency of coding rules in Mercurial source tree. | |
479 | # But on the other hand, these are not so seriously required for |
|
478 | # But on the other hand, these are not so seriously required for | |
480 | # python code fragments embedded in test scripts. Fixing test |
|
479 | # python code fragments embedded in test scripts. Fixing test | |
481 | # scripts for these patterns requires many changes, and has less |
|
480 | # scripts for these patterns requires many changes, and has less | |
482 | # profit than effort. |
|
481 | # profit than effort. | |
483 | (r'raise Exception', "don't raise generic exceptions"), |
|
482 | (r'raise Exception', "don't raise generic exceptions"), | |
484 | (r'[\s\(](open|file)\([^)]*\)\.read\(', "use util.readfile() instead"), |
|
483 | (r'[\s\(](open|file)\([^)]*\)\.read\(', "use util.readfile() instead"), | |
485 | ( |
|
484 | ( | |
486 | r'[\s\(](open|file)\([^)]*\)\.write\(', |
|
485 | r'[\s\(](open|file)\([^)]*\)\.write\(', | |
487 | "use util.writefile() instead", |
|
486 | "use util.writefile() instead", | |
488 | ), |
|
487 | ), | |
489 | ( |
|
488 | ( | |
490 | r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))', |
|
489 | r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))', | |
491 | "always assign an opened file to a variable, and close it afterwards", |
|
490 | "always assign an opened file to a variable, and close it afterwards", | |
492 | ), |
|
491 | ), | |
493 | ( |
|
492 | ( | |
494 | r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))', |
|
493 | r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))', | |
495 | "always assign an opened file to a variable, and close it afterwards", |
|
494 | "always assign an opened file to a variable, and close it afterwards", | |
496 | ), |
|
495 | ), | |
497 | (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"), |
|
496 | (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"), | |
498 | (r'^import atexit', "don't use atexit, use ui.atexit"), |
|
497 | (r'^import atexit', "don't use atexit, use ui.atexit"), | |
499 | # rules depending on implementation of repquote() |
|
498 | # rules depending on implementation of repquote() | |
500 | ( |
|
499 | ( | |
501 | r' x+[xpqo%APM][\'"]\n\s+[\'"]x', |
|
500 | r' x+[xpqo%APM][\'"]\n\s+[\'"]x', | |
502 | 'string join across lines with no space', |
|
501 | 'string join across lines with no space', | |
503 | ), |
|
502 | ), | |
504 | ( |
|
503 | ( | |
505 | r'''(?x)ui\.(status|progress|write|note|warn)\( |
|
504 | r'''(?x)ui\.(status|progress|write|note|warn)\( | |
506 | [ \t\n#]* |
|
505 | [ \t\n#]* | |
507 | (?# any strings/comments might precede a string, which |
|
506 | (?# any strings/comments might precede a string, which | |
508 | # contains translatable message) |
|
507 | # contains translatable message) | |
509 | b?((['"]|\'\'\'|""")[ \npq%bAPMxno]*(['"]|\'\'\'|""")[ \t\n#]+)* |
|
508 | b?((['"]|\'\'\'|""")[ \npq%bAPMxno]*(['"]|\'\'\'|""")[ \t\n#]+)* | |
510 | (?# sequence consisting of below might precede translatable message |
|
509 | (?# sequence consisting of below might precede translatable message | |
511 | # - formatting string: "% 10s", "%05d", "% -3.2f", "%*s", "%%" ... |
|
510 | # - formatting string: "% 10s", "%05d", "% -3.2f", "%*s", "%%" ... | |
512 | # - escaped character: "\\", "\n", "\0" ... |
|
511 | # - escaped character: "\\", "\n", "\0" ... | |
513 | # - character other than '%', 'b' as '\', and 'x' as alphabet) |
|
512 | # - character other than '%', 'b' as '\', and 'x' as alphabet) | |
514 | (['"]|\'\'\'|""") |
|
513 | (['"]|\'\'\'|""") | |
515 | ((%([ n]?[PM]?([np]+|A))?x)|%%|b[bnx]|[ \nnpqAPMo])*x |
|
514 | ((%([ n]?[PM]?([np]+|A))?x)|%%|b[bnx]|[ \nnpqAPMo])*x | |
516 | (?# this regexp can't use [^...] style, |
|
515 | (?# this regexp can't use [^...] style, | |
517 | # because _preparepats forcibly adds "\n" into [^...], |
|
516 | # because _preparepats forcibly adds "\n" into [^...], | |
518 | # even though this regexp wants match it against "\n")''', |
|
517 | # even though this regexp wants match it against "\n")''', | |
519 | "missing _() in ui message (use () to hide false-positives)", |
|
518 | "missing _() in ui message (use () to hide false-positives)", | |
520 | ), |
|
519 | ), | |
521 | ] |
|
520 | ] | |
522 | + commonpypats[0], |
|
521 | + commonpypats[0], | |
523 | # warnings |
|
522 | # warnings | |
524 | [ |
|
523 | [ | |
525 | # rules depending on implementation of repquote() |
|
524 | # rules depending on implementation of repquote() | |
526 | (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"), |
|
525 | (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"), | |
527 | ] |
|
526 | ] | |
528 | + commonpypats[1], |
|
527 | + commonpypats[1], | |
529 | ] |
|
528 | ] | |
530 |
|
529 | |||
531 | # patterns to check *.py for embedded ones in test script |
|
530 | # patterns to check *.py for embedded ones in test script | |
532 | embeddedpypats = [ |
|
531 | embeddedpypats = [ | |
533 | [] + commonpypats[0], |
|
532 | [] + commonpypats[0], | |
534 | # warnings |
|
533 | # warnings | |
535 | [] + commonpypats[1], |
|
534 | [] + commonpypats[1], | |
536 | ] |
|
535 | ] | |
537 |
|
536 | |||
538 | # common filters to convert *.py |
|
537 | # common filters to convert *.py | |
539 | commonpyfilters = [ |
|
538 | commonpyfilters = [ | |
540 | ( |
|
539 | ( | |
541 | r"""(?msx)(?P<comment>\#.*?$)| |
|
540 | r"""(?msx)(?P<comment>\#.*?$)| | |
542 | ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!"))) |
|
541 | ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!"))) | |
543 | (?P<text>(([^\\]|\\.)*?)) |
|
542 | (?P<text>(([^\\]|\\.)*?)) | |
544 | (?P=quote))""", |
|
543 | (?P=quote))""", | |
545 | reppython, |
|
544 | reppython, | |
546 | ), |
|
545 | ), | |
547 | ] |
|
546 | ] | |
548 |
|
547 | |||
|
548 | # pattern only for mercurial and extensions | |||
|
549 | core_py_pats = [ | |||
|
550 | [ | |||
|
551 | # Windows tend to get confused about capitalization of the drive letter | |||
|
552 | # | |||
|
553 | # see mercurial.windows.abspath for details | |||
|
554 | ( | |||
|
555 | r'os\.path\.abspath', | |||
|
556 | "use util.abspath instead (windows)", | |||
|
557 | r'#.*re-exports', | |||
|
558 | ), | |||
|
559 | ], | |||
|
560 | # warnings | |||
|
561 | [], | |||
|
562 | ] | |||
|
563 | ||||
549 | # filters to convert normal *.py files |
|
564 | # filters to convert normal *.py files | |
550 | pyfilters = [] + commonpyfilters |
|
565 | pyfilters = [] + commonpyfilters | |
551 |
|
566 | |||
552 | # non-filter patterns |
|
567 | # non-filter patterns | |
553 | pynfpats = [ |
|
568 | pynfpats = [ | |
554 | [ |
|
569 | [ | |
555 | (r'pycompat\.osname\s*[=!]=\s*[\'"]nt[\'"]', "use pycompat.iswindows"), |
|
570 | (r'pycompat\.osname\s*[=!]=\s*[\'"]nt[\'"]', "use pycompat.iswindows"), | |
556 | (r'pycompat\.osname\s*[=!]=\s*[\'"]posix[\'"]', "use pycompat.isposix"), |
|
571 | (r'pycompat\.osname\s*[=!]=\s*[\'"]posix[\'"]', "use pycompat.isposix"), | |
557 | ( |
|
572 | ( | |
558 | r'pycompat\.sysplatform\s*[!=]=\s*[\'"]darwin[\'"]', |
|
573 | r'pycompat\.sysplatform\s*[!=]=\s*[\'"]darwin[\'"]', | |
559 | "use pycompat.isdarwin", |
|
574 | "use pycompat.isdarwin", | |
560 | ), |
|
575 | ), | |
561 | ], |
|
576 | ], | |
562 | # warnings |
|
577 | # warnings | |
563 | [], |
|
578 | [], | |
564 | ] |
|
579 | ] | |
565 |
|
580 | |||
566 | # filters to convert *.py for embedded ones in test script |
|
581 | # filters to convert *.py for embedded ones in test script | |
567 | embeddedpyfilters = [] + commonpyfilters |
|
582 | embeddedpyfilters = [] + commonpyfilters | |
568 |
|
583 | |||
569 | # extension non-filter patterns |
|
584 | # extension non-filter patterns | |
570 | pyextnfpats = [ |
|
585 | pyextnfpats = [ | |
571 | [(r'^"""\n?[A-Z]', "don't capitalize docstring title")], |
|
586 | [(r'^"""\n?[A-Z]', "don't capitalize docstring title")], | |
572 | # warnings |
|
587 | # warnings | |
573 | [], |
|
588 | [], | |
574 | ] |
|
589 | ] | |
575 |
|
590 | |||
576 | txtfilters = [] |
|
591 | txtfilters = [] | |
577 |
|
592 | |||
578 | txtpats = [ |
|
593 | txtpats = [ | |
579 | [ |
|
594 | [ | |
580 | (r'\s$', 'trailing whitespace'), |
|
595 | (r'\s$', 'trailing whitespace'), | |
581 | ('.. note::[ \n][^\n]', 'add two newlines after note::'), |
|
596 | ('.. note::[ \n][^\n]', 'add two newlines after note::'), | |
582 | ], |
|
597 | ], | |
583 | [], |
|
598 | [], | |
584 | ] |
|
599 | ] | |
585 |
|
600 | |||
586 | cpats = [ |
|
601 | cpats = [ | |
587 | [ |
|
602 | [ | |
588 | (r'//', "don't use //-style comments"), |
|
603 | (r'//', "don't use //-style comments"), | |
589 | (r'\S\t', "don't use tabs except for indent"), |
|
604 | (r'\S\t', "don't use tabs except for indent"), | |
590 | (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"), |
|
605 | (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"), | |
591 | (r'(while|if|do|for)\(', "use space after while/if/do/for"), |
|
606 | (r'(while|if|do|for)\(', "use space after while/if/do/for"), | |
592 | (r'return\(', "return is not a function"), |
|
607 | (r'return\(', "return is not a function"), | |
593 | (r' ;', "no space before ;"), |
|
608 | (r' ;', "no space before ;"), | |
594 | (r'[^;] \)', "no space before )"), |
|
609 | (r'[^;] \)', "no space before )"), | |
595 | (r'[)][{]', "space between ) and {"), |
|
610 | (r'[)][{]', "space between ) and {"), | |
596 | (r'\w+\* \w+', "use int *foo, not int* foo"), |
|
611 | (r'\w+\* \w+', "use int *foo, not int* foo"), | |
597 | (r'\W\([^\)]+\) \w+', "use (int)foo, not (int) foo"), |
|
612 | (r'\W\([^\)]+\) \w+', "use (int)foo, not (int) foo"), | |
598 | (r'\w+ (\+\+|--)', "use foo++, not foo ++"), |
|
613 | (r'\w+ (\+\+|--)', "use foo++, not foo ++"), | |
599 | (r'\w,\w', "missing whitespace after ,"), |
|
614 | (r'\w,\w', "missing whitespace after ,"), | |
600 | (r'^[^#]\w[+/*]\w', "missing whitespace in expression"), |
|
615 | (r'^[^#]\w[+/*]\w', "missing whitespace in expression"), | |
601 | (r'\w\s=\s\s+\w', "gratuitous whitespace after ="), |
|
616 | (r'\w\s=\s\s+\w', "gratuitous whitespace after ="), | |
602 | (r'^#\s+\w', "use #foo, not # foo"), |
|
617 | (r'^#\s+\w', "use #foo, not # foo"), | |
603 | (r'[^\n]\Z', "no trailing newline"), |
|
618 | (r'[^\n]\Z', "no trailing newline"), | |
604 | (r'^\s*#import\b', "use only #include in standard C code"), |
|
619 | (r'^\s*#import\b', "use only #include in standard C code"), | |
605 | (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"), |
|
620 | (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"), | |
606 | (r'strcat\(', "don't use strcat"), |
|
621 | (r'strcat\(', "don't use strcat"), | |
607 | # rules depending on implementation of repquote() |
|
622 | # rules depending on implementation of repquote() | |
608 | ], |
|
623 | ], | |
609 | # warnings |
|
624 | # warnings | |
610 | [ |
|
625 | [ | |
611 | # rules depending on implementation of repquote() |
|
626 | # rules depending on implementation of repquote() | |
612 | ], |
|
627 | ], | |
613 | ] |
|
628 | ] | |
614 |
|
629 | |||
615 | cfilters = [ |
|
630 | cfilters = [ | |
616 | (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment), |
|
631 | (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment), | |
617 | (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote), |
|
632 | (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote), | |
618 | (r'''(#\s*include\s+<)([^>]+)>''', repinclude), |
|
633 | (r'''(#\s*include\s+<)([^>]+)>''', repinclude), | |
619 | (r'(\()([^)]+\))', repcallspaces), |
|
634 | (r'(\()([^)]+\))', repcallspaces), | |
620 | ] |
|
635 | ] | |
621 |
|
636 | |||
622 | inutilpats = [ |
|
637 | inutilpats = [ | |
623 | [ |
|
638 | [ | |
624 | (r'\bui\.', "don't use ui in util"), |
|
639 | (r'\bui\.', "don't use ui in util"), | |
625 | ], |
|
640 | ], | |
626 | # warnings |
|
641 | # warnings | |
627 | [], |
|
642 | [], | |
628 | ] |
|
643 | ] | |
629 |
|
644 | |||
630 | inrevlogpats = [ |
|
645 | inrevlogpats = [ | |
631 | [ |
|
646 | [ | |
632 | (r'\brepo\.', "don't use repo in revlog"), |
|
647 | (r'\brepo\.', "don't use repo in revlog"), | |
633 | ], |
|
648 | ], | |
634 | # warnings |
|
649 | # warnings | |
635 | [], |
|
650 | [], | |
636 | ] |
|
651 | ] | |
637 |
|
652 | |||
638 | webtemplatefilters = [] |
|
653 | webtemplatefilters = [] | |
639 |
|
654 | |||
640 | webtemplatepats = [ |
|
655 | webtemplatepats = [ | |
641 | [], |
|
656 | [], | |
642 | [ |
|
657 | [ | |
643 | ( |
|
658 | ( | |
644 | r'{desc(\|(?!websub|firstline)[^\|]*)+}', |
|
659 | r'{desc(\|(?!websub|firstline)[^\|]*)+}', | |
645 | 'follow desc keyword with either firstline or websub', |
|
660 | 'follow desc keyword with either firstline or websub', | |
646 | ), |
|
661 | ), | |
647 | ], |
|
662 | ], | |
648 | ] |
|
663 | ] | |
649 |
|
664 | |||
650 | allfilesfilters = [] |
|
665 | allfilesfilters = [] | |
651 |
|
666 | |||
652 | allfilespats = [ |
|
667 | allfilespats = [ | |
653 | [ |
|
668 | [ | |
654 | ( |
|
669 | ( | |
655 | r'(http|https)://[a-zA-Z0-9./]*selenic.com/', |
|
670 | r'(http|https)://[a-zA-Z0-9./]*selenic.com/', | |
656 | 'use mercurial-scm.org domain URL', |
|
671 | 'use mercurial-scm.org domain URL', | |
657 | ), |
|
672 | ), | |
658 | ( |
|
673 | ( | |
659 | r'mercurial@selenic\.com', |
|
674 | r'mercurial@selenic\.com', | |
660 | 'use mercurial-scm.org domain for mercurial ML address', |
|
675 | 'use mercurial-scm.org domain for mercurial ML address', | |
661 | ), |
|
676 | ), | |
662 | ( |
|
677 | ( | |
663 | r'mercurial-devel@selenic\.com', |
|
678 | r'mercurial-devel@selenic\.com', | |
664 | 'use mercurial-scm.org domain for mercurial-devel ML address', |
|
679 | 'use mercurial-scm.org domain for mercurial-devel ML address', | |
665 | ), |
|
680 | ), | |
666 | ], |
|
681 | ], | |
667 | # warnings |
|
682 | # warnings | |
668 | [], |
|
683 | [], | |
669 | ] |
|
684 | ] | |
670 |
|
685 | |||
671 | py3pats = [ |
|
686 | py3pats = [ | |
672 | [ |
|
687 | [ | |
673 | ( |
|
688 | ( | |
674 | r'os\.environ', |
|
689 | r'os\.environ', | |
675 | "use encoding.environ instead (py3)", |
|
690 | "use encoding.environ instead (py3)", | |
676 | r'#.*re-exports', |
|
691 | r'#.*re-exports', | |
677 | ), |
|
692 | ), | |
678 | (r'os\.name', "use pycompat.osname instead (py3)"), |
|
693 | (r'os\.name', "use pycompat.osname instead (py3)"), | |
679 | (r'os\.getcwd', "use encoding.getcwd instead (py3)", r'#.*re-exports'), |
|
694 | (r'os\.getcwd', "use encoding.getcwd instead (py3)", r'#.*re-exports'), | |
680 | (r'os\.sep', "use pycompat.ossep instead (py3)"), |
|
695 | (r'os\.sep', "use pycompat.ossep instead (py3)"), | |
681 | (r'os\.pathsep', "use pycompat.ospathsep instead (py3)"), |
|
696 | (r'os\.pathsep', "use pycompat.ospathsep instead (py3)"), | |
682 | (r'os\.altsep', "use pycompat.osaltsep instead (py3)"), |
|
697 | (r'os\.altsep', "use pycompat.osaltsep instead (py3)"), | |
683 | (r'sys\.platform', "use pycompat.sysplatform instead (py3)"), |
|
698 | (r'sys\.platform', "use pycompat.sysplatform instead (py3)"), | |
684 | (r'getopt\.getopt', "use pycompat.getoptb instead (py3)"), |
|
699 | (r'getopt\.getopt', "use pycompat.getoptb instead (py3)"), | |
685 | (r'os\.getenv', "use encoding.environ.get instead"), |
|
700 | (r'os\.getenv', "use encoding.environ.get instead"), | |
686 | (r'os\.setenv', "modifying the environ dict is not preferred"), |
|
701 | (r'os\.setenv', "modifying the environ dict is not preferred"), | |
687 | (r'(?<!pycompat\.)xrange', "use pycompat.xrange instead (py3)"), |
|
702 | (r'(?<!pycompat\.)xrange', "use pycompat.xrange instead (py3)"), | |
688 | ], |
|
703 | ], | |
689 | # warnings |
|
704 | # warnings | |
690 | [], |
|
705 | [], | |
691 | ] |
|
706 | ] | |
692 |
|
707 | |||
693 | checks = [ |
|
708 | checks = [ | |
694 | ('python', r'.*\.(py|cgi)$', r'^#!.*python', pyfilters, pypats), |
|
709 | ('python', r'.*\.(py|cgi)$', r'^#!.*python', pyfilters, pypats), | |
695 | ('python', r'.*\.(py|cgi)$', r'^#!.*python', [], pynfpats), |
|
710 | ('python', r'.*\.(py|cgi)$', r'^#!.*python', [], pynfpats), | |
696 | ('python', r'.*hgext.*\.py$', '', [], pyextnfpats), |
|
711 | ('python', r'.*hgext.*\.py$', '', [], pyextnfpats), | |
697 | ( |
|
712 | ( | |
698 | 'python 3', |
|
713 | 'python 3', | |
699 | r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py', |
|
714 | r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py', | |
700 | '', |
|
715 | '', | |
701 | pyfilters, |
|
716 | pyfilters, | |
702 | py3pats, |
|
717 | py3pats, | |
703 | ), |
|
718 | ), | |
|
719 | ( | |||
|
720 | 'core files', | |||
|
721 | r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py', | |||
|
722 | '', | |||
|
723 | pyfilters, | |||
|
724 | core_py_pats, | |||
|
725 | ), | |||
704 | ('test script', r'(.*/)?test-[^.~]*$', '', testfilters, testpats), |
|
726 | ('test script', r'(.*/)?test-[^.~]*$', '', testfilters, testpats), | |
705 | ('c', r'.*\.[ch]$', '', cfilters, cpats), |
|
727 | ('c', r'.*\.[ch]$', '', cfilters, cpats), | |
706 | ('unified test', r'.*\.t$', '', utestfilters, utestpats), |
|
728 | ('unified test', r'.*\.t$', '', utestfilters, utestpats), | |
707 | ( |
|
729 | ( | |
708 | 'layering violation repo in revlog', |
|
730 | 'layering violation repo in revlog', | |
709 | r'mercurial/revlog\.py', |
|
731 | r'mercurial/revlog\.py', | |
710 | '', |
|
732 | '', | |
711 | pyfilters, |
|
733 | pyfilters, | |
712 | inrevlogpats, |
|
734 | inrevlogpats, | |
713 | ), |
|
735 | ), | |
714 | ( |
|
736 | ( | |
715 | 'layering violation ui in util', |
|
737 | 'layering violation ui in util', | |
716 | r'mercurial/util\.py', |
|
738 | r'mercurial/util\.py', | |
717 | '', |
|
739 | '', | |
718 | pyfilters, |
|
740 | pyfilters, | |
719 | inutilpats, |
|
741 | inutilpats, | |
720 | ), |
|
742 | ), | |
721 | ('txt', r'.*\.txt$', '', txtfilters, txtpats), |
|
743 | ('txt', r'.*\.txt$', '', txtfilters, txtpats), | |
722 | ( |
|
744 | ( | |
723 | 'web template', |
|
745 | 'web template', | |
724 | r'mercurial/templates/.*\.tmpl', |
|
746 | r'mercurial/templates/.*\.tmpl', | |
725 | '', |
|
747 | '', | |
726 | webtemplatefilters, |
|
748 | webtemplatefilters, | |
727 | webtemplatepats, |
|
749 | webtemplatepats, | |
728 | ), |
|
750 | ), | |
729 | ('all except for .po', r'.*(?<!\.po)$', '', allfilesfilters, allfilespats), |
|
751 | ('all except for .po', r'.*(?<!\.po)$', '', allfilesfilters, allfilespats), | |
730 | ] |
|
752 | ] | |
731 |
|
753 | |||
732 | # (desc, |
|
754 | # (desc, | |
733 | # func to pick up embedded code fragments, |
|
755 | # func to pick up embedded code fragments, | |
734 | # list of patterns to convert target files |
|
756 | # list of patterns to convert target files | |
735 | # list of patterns to detect errors/warnings) |
|
757 | # list of patterns to detect errors/warnings) | |
736 | embeddedchecks = [ |
|
758 | embeddedchecks = [ | |
737 | ( |
|
759 | ( | |
738 | 'embedded python', |
|
760 | 'embedded python', | |
739 | testparseutil.pyembedded, |
|
761 | testparseutil.pyembedded, | |
740 | embeddedpyfilters, |
|
762 | embeddedpyfilters, | |
741 | embeddedpypats, |
|
763 | embeddedpypats, | |
742 | ) |
|
764 | ) | |
743 | ] |
|
765 | ] | |
744 |
|
766 | |||
745 |
|
767 | |||
746 | def _preparepats(): |
|
768 | def _preparepats(): | |
747 | def preparefailandwarn(failandwarn): |
|
769 | def preparefailandwarn(failandwarn): | |
748 | for pats in failandwarn: |
|
770 | for pats in failandwarn: | |
749 | for i, pseq in enumerate(pats): |
|
771 | for i, pseq in enumerate(pats): | |
750 | # fix-up regexes for multi-line searches |
|
772 | # fix-up regexes for multi-line searches | |
751 | p = pseq[0] |
|
773 | p = pseq[0] | |
752 | # \s doesn't match \n (done in two steps) |
|
774 | # \s doesn't match \n (done in two steps) | |
753 | # first, we replace \s that appears in a set already |
|
775 | # first, we replace \s that appears in a set already | |
754 | p = re.sub(r'\[\\s', r'[ \\t', p) |
|
776 | p = re.sub(r'\[\\s', r'[ \\t', p) | |
755 | # now we replace other \s instances. |
|
777 | # now we replace other \s instances. | |
756 | p = re.sub(r'(?<!(\\|\[))\\s', r'[ \\t]', p) |
|
778 | p = re.sub(r'(?<!(\\|\[))\\s', r'[ \\t]', p) | |
757 | # [^...] doesn't match newline |
|
779 | # [^...] doesn't match newline | |
758 | p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p) |
|
780 | p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p) | |
759 |
|
781 | |||
760 | pats[i] = (re.compile(p, re.MULTILINE),) + pseq[1:] |
|
782 | pats[i] = (re.compile(p, re.MULTILINE),) + pseq[1:] | |
761 |
|
783 | |||
762 | def preparefilters(filters): |
|
784 | def preparefilters(filters): | |
763 | for i, flt in enumerate(filters): |
|
785 | for i, flt in enumerate(filters): | |
764 | filters[i] = re.compile(flt[0]), flt[1] |
|
786 | filters[i] = re.compile(flt[0]), flt[1] | |
765 |
|
787 | |||
766 | for cs in (checks, embeddedchecks): |
|
788 | for cs in (checks, embeddedchecks): | |
767 | for c in cs: |
|
789 | for c in cs: | |
768 | failandwarn = c[-1] |
|
790 | failandwarn = c[-1] | |
769 | preparefailandwarn(failandwarn) |
|
791 | preparefailandwarn(failandwarn) | |
770 |
|
792 | |||
771 | filters = c[-2] |
|
793 | filters = c[-2] | |
772 | preparefilters(filters) |
|
794 | preparefilters(filters) | |
773 |
|
795 | |||
774 |
|
796 | |||
775 | class norepeatlogger(object): |
|
797 | class norepeatlogger(object): | |
776 | def __init__(self): |
|
798 | def __init__(self): | |
777 | self._lastseen = None |
|
799 | self._lastseen = None | |
778 |
|
800 | |||
779 | def log(self, fname, lineno, line, msg, blame): |
|
801 | def log(self, fname, lineno, line, msg, blame): | |
780 | """print error related a to given line of a given file. |
|
802 | """print error related a to given line of a given file. | |
781 |
|
803 | |||
782 | The faulty line will also be printed but only once in the case |
|
804 | The faulty line will also be printed but only once in the case | |
783 | of multiple errors. |
|
805 | of multiple errors. | |
784 |
|
806 | |||
785 | :fname: filename |
|
807 | :fname: filename | |
786 | :lineno: line number |
|
808 | :lineno: line number | |
787 | :line: actual content of the line |
|
809 | :line: actual content of the line | |
788 | :msg: error message |
|
810 | :msg: error message | |
789 | """ |
|
811 | """ | |
790 | msgid = fname, lineno, line |
|
812 | msgid = fname, lineno, line | |
791 | if msgid != self._lastseen: |
|
813 | if msgid != self._lastseen: | |
792 | if blame: |
|
814 | if blame: | |
793 | print("%s:%d (%s):" % (fname, lineno, blame)) |
|
815 | print("%s:%d (%s):" % (fname, lineno, blame)) | |
794 | else: |
|
816 | else: | |
795 | print("%s:%d:" % (fname, lineno)) |
|
817 | print("%s:%d:" % (fname, lineno)) | |
796 | print(" > %s" % line) |
|
818 | print(" > %s" % line) | |
797 | self._lastseen = msgid |
|
819 | self._lastseen = msgid | |
798 | print(" " + msg) |
|
820 | print(" " + msg) | |
799 |
|
821 | |||
800 |
|
822 | |||
801 | _defaultlogger = norepeatlogger() |
|
823 | _defaultlogger = norepeatlogger() | |
802 |
|
824 | |||
803 |
|
825 | |||
804 | def getblame(f): |
|
826 | def getblame(f): | |
805 | lines = [] |
|
827 | lines = [] | |
806 | for l in os.popen('hg annotate -un %s' % f): |
|
828 | for l in os.popen('hg annotate -un %s' % f): | |
807 | start, line = l.split(':', 1) |
|
829 | start, line = l.split(':', 1) | |
808 | user, rev = start.split() |
|
830 | user, rev = start.split() | |
809 | lines.append((line[1:-1], user, rev)) |
|
831 | lines.append((line[1:-1], user, rev)) | |
810 | return lines |
|
832 | return lines | |
811 |
|
833 | |||
812 |
|
834 | |||
813 | def checkfile( |
|
835 | def checkfile( | |
814 | f, |
|
836 | f, | |
815 | logfunc=_defaultlogger.log, |
|
837 | logfunc=_defaultlogger.log, | |
816 | maxerr=None, |
|
838 | maxerr=None, | |
817 | warnings=False, |
|
839 | warnings=False, | |
818 | blame=False, |
|
840 | blame=False, | |
819 | debug=False, |
|
841 | debug=False, | |
820 | lineno=True, |
|
842 | lineno=True, | |
821 | ): |
|
843 | ): | |
822 | """checks style and portability of a given file |
|
844 | """checks style and portability of a given file | |
823 |
|
845 | |||
824 | :f: filepath |
|
846 | :f: filepath | |
825 | :logfunc: function used to report error |
|
847 | :logfunc: function used to report error | |
826 | logfunc(filename, linenumber, linecontent, errormessage) |
|
848 | logfunc(filename, linenumber, linecontent, errormessage) | |
827 | :maxerr: number of error to display before aborting. |
|
849 | :maxerr: number of error to display before aborting. | |
828 | Set to false (default) to report all errors |
|
850 | Set to false (default) to report all errors | |
829 |
|
851 | |||
830 | return True if no error is found, False otherwise. |
|
852 | return True if no error is found, False otherwise. | |
831 | """ |
|
853 | """ | |
832 | result = True |
|
854 | result = True | |
833 |
|
855 | |||
834 | try: |
|
856 | try: | |
835 | with opentext(f) as fp: |
|
857 | with opentext(f) as fp: | |
836 | try: |
|
858 | try: | |
837 | pre = fp.read() |
|
859 | pre = fp.read() | |
838 | except UnicodeDecodeError as e: |
|
860 | except UnicodeDecodeError as e: | |
839 | print("%s while reading %s" % (e, f)) |
|
861 | print("%s while reading %s" % (e, f)) | |
840 | return result |
|
862 | return result | |
841 | except IOError as e: |
|
863 | except IOError as e: | |
842 | print("Skipping %s, %s" % (f, str(e).split(':', 1)[0])) |
|
864 | print("Skipping %s, %s" % (f, str(e).split(':', 1)[0])) | |
843 | return result |
|
865 | return result | |
844 |
|
866 | |||
845 | # context information shared while single checkfile() invocation |
|
867 | # context information shared while single checkfile() invocation | |
846 | context = {'blamecache': None} |
|
868 | context = {'blamecache': None} | |
847 |
|
869 | |||
848 | for name, match, magic, filters, pats in checks: |
|
870 | for name, match, magic, filters, pats in checks: | |
849 | if debug: |
|
871 | if debug: | |
850 | print(name, f) |
|
872 | print(name, f) | |
851 | if not (re.match(match, f) or (magic and re.search(magic, pre))): |
|
873 | if not (re.match(match, f) or (magic and re.search(magic, pre))): | |
852 | if debug: |
|
874 | if debug: | |
853 | print( |
|
875 | print( | |
854 | "Skipping %s for %s it doesn't match %s" % (name, match, f) |
|
876 | "Skipping %s for %s it doesn't match %s" % (name, match, f) | |
855 | ) |
|
877 | ) | |
856 | continue |
|
878 | continue | |
857 | if "no-" "check-code" in pre: |
|
879 | if "no-" "check-code" in pre: | |
858 | # If you're looking at this line, it's because a file has: |
|
880 | # If you're looking at this line, it's because a file has: | |
859 | # no- check- code |
|
881 | # no- check- code | |
860 | # but the reason to output skipping is to make life for |
|
882 | # but the reason to output skipping is to make life for | |
861 | # tests easier. So, instead of writing it with a normal |
|
883 | # tests easier. So, instead of writing it with a normal | |
862 | # spelling, we write it with the expected spelling from |
|
884 | # spelling, we write it with the expected spelling from | |
863 | # tests/test-check-code.t |
|
885 | # tests/test-check-code.t | |
864 | print("Skipping %s it has no-che?k-code (glob)" % f) |
|
886 | print("Skipping %s it has no-che?k-code (glob)" % f) | |
865 | return "Skip" # skip checking this file |
|
887 | return "Skip" # skip checking this file | |
866 |
|
888 | |||
867 | fc = _checkfiledata( |
|
889 | fc = _checkfiledata( | |
868 | name, |
|
890 | name, | |
869 | f, |
|
891 | f, | |
870 | pre, |
|
892 | pre, | |
871 | filters, |
|
893 | filters, | |
872 | pats, |
|
894 | pats, | |
873 | context, |
|
895 | context, | |
874 | logfunc, |
|
896 | logfunc, | |
875 | maxerr, |
|
897 | maxerr, | |
876 | warnings, |
|
898 | warnings, | |
877 | blame, |
|
899 | blame, | |
878 | debug, |
|
900 | debug, | |
879 | lineno, |
|
901 | lineno, | |
880 | ) |
|
902 | ) | |
881 | if fc: |
|
903 | if fc: | |
882 | result = False |
|
904 | result = False | |
883 |
|
905 | |||
884 | if f.endswith('.t') and "no-" "check-code" not in pre: |
|
906 | if f.endswith('.t') and "no-" "check-code" not in pre: | |
885 | if debug: |
|
907 | if debug: | |
886 | print("Checking embedded code in %s" % f) |
|
908 | print("Checking embedded code in %s" % f) | |
887 |
|
909 | |||
888 | prelines = pre.splitlines() |
|
910 | prelines = pre.splitlines() | |
889 | embeddederros = [] |
|
911 | embeddederros = [] | |
890 | for name, embedded, filters, pats in embeddedchecks: |
|
912 | for name, embedded, filters, pats in embeddedchecks: | |
891 | # "reset curmax at each repetition" treats maxerr as "max |
|
913 | # "reset curmax at each repetition" treats maxerr as "max | |
892 | # nubmer of errors in an actual file per entry of |
|
914 | # nubmer of errors in an actual file per entry of | |
893 | # (embedded)checks" |
|
915 | # (embedded)checks" | |
894 | curmaxerr = maxerr |
|
916 | curmaxerr = maxerr | |
895 |
|
917 | |||
896 | for found in embedded(f, prelines, embeddederros): |
|
918 | for found in embedded(f, prelines, embeddederros): | |
897 | filename, starts, ends, code = found |
|
919 | filename, starts, ends, code = found | |
898 | fc = _checkfiledata( |
|
920 | fc = _checkfiledata( | |
899 | name, |
|
921 | name, | |
900 | f, |
|
922 | f, | |
901 | code, |
|
923 | code, | |
902 | filters, |
|
924 | filters, | |
903 | pats, |
|
925 | pats, | |
904 | context, |
|
926 | context, | |
905 | logfunc, |
|
927 | logfunc, | |
906 | curmaxerr, |
|
928 | curmaxerr, | |
907 | warnings, |
|
929 | warnings, | |
908 | blame, |
|
930 | blame, | |
909 | debug, |
|
931 | debug, | |
910 | lineno, |
|
932 | lineno, | |
911 | offset=starts - 1, |
|
933 | offset=starts - 1, | |
912 | ) |
|
934 | ) | |
913 | if fc: |
|
935 | if fc: | |
914 | result = False |
|
936 | result = False | |
915 | if curmaxerr: |
|
937 | if curmaxerr: | |
916 | if fc >= curmaxerr: |
|
938 | if fc >= curmaxerr: | |
917 | break |
|
939 | break | |
918 | curmaxerr -= fc |
|
940 | curmaxerr -= fc | |
919 |
|
941 | |||
920 | return result |
|
942 | return result | |
921 |
|
943 | |||
922 |
|
944 | |||
923 | def _checkfiledata( |
|
945 | def _checkfiledata( | |
924 | name, |
|
946 | name, | |
925 | f, |
|
947 | f, | |
926 | filedata, |
|
948 | filedata, | |
927 | filters, |
|
949 | filters, | |
928 | pats, |
|
950 | pats, | |
929 | context, |
|
951 | context, | |
930 | logfunc, |
|
952 | logfunc, | |
931 | maxerr, |
|
953 | maxerr, | |
932 | warnings, |
|
954 | warnings, | |
933 | blame, |
|
955 | blame, | |
934 | debug, |
|
956 | debug, | |
935 | lineno, |
|
957 | lineno, | |
936 | offset=None, |
|
958 | offset=None, | |
937 | ): |
|
959 | ): | |
938 | """Execute actual error check for file data |
|
960 | """Execute actual error check for file data | |
939 |
|
961 | |||
940 | :name: of the checking category |
|
962 | :name: of the checking category | |
941 | :f: filepath |
|
963 | :f: filepath | |
942 | :filedata: content of a file |
|
964 | :filedata: content of a file | |
943 | :filters: to be applied before checking |
|
965 | :filters: to be applied before checking | |
944 | :pats: to detect errors |
|
966 | :pats: to detect errors | |
945 | :context: a dict of information shared while single checkfile() invocation |
|
967 | :context: a dict of information shared while single checkfile() invocation | |
946 | Valid keys: 'blamecache'. |
|
968 | Valid keys: 'blamecache'. | |
947 | :logfunc: function used to report error |
|
969 | :logfunc: function used to report error | |
948 | logfunc(filename, linenumber, linecontent, errormessage) |
|
970 | logfunc(filename, linenumber, linecontent, errormessage) | |
949 | :maxerr: number of error to display before aborting, or False to |
|
971 | :maxerr: number of error to display before aborting, or False to | |
950 | report all errors |
|
972 | report all errors | |
951 | :warnings: whether warning level checks should be applied |
|
973 | :warnings: whether warning level checks should be applied | |
952 | :blame: whether blame information should be displayed at error reporting |
|
974 | :blame: whether blame information should be displayed at error reporting | |
953 | :debug: whether debug information should be displayed |
|
975 | :debug: whether debug information should be displayed | |
954 | :lineno: whether lineno should be displayed at error reporting |
|
976 | :lineno: whether lineno should be displayed at error reporting | |
955 | :offset: line number offset of 'filedata' in 'f' for checking |
|
977 | :offset: line number offset of 'filedata' in 'f' for checking | |
956 | an embedded code fragment, or None (offset=0 is different |
|
978 | an embedded code fragment, or None (offset=0 is different | |
957 | from offset=None) |
|
979 | from offset=None) | |
958 |
|
980 | |||
959 | returns number of detected errors. |
|
981 | returns number of detected errors. | |
960 | """ |
|
982 | """ | |
961 | blamecache = context['blamecache'] |
|
983 | blamecache = context['blamecache'] | |
962 | if offset is None: |
|
984 | if offset is None: | |
963 | lineoffset = 0 |
|
985 | lineoffset = 0 | |
964 | else: |
|
986 | else: | |
965 | lineoffset = offset |
|
987 | lineoffset = offset | |
966 |
|
988 | |||
967 | fc = 0 |
|
989 | fc = 0 | |
968 | pre = post = filedata |
|
990 | pre = post = filedata | |
969 |
|
991 | |||
970 | if True: # TODO: get rid of this redundant 'if' block |
|
992 | if True: # TODO: get rid of this redundant 'if' block | |
971 | for p, r in filters: |
|
993 | for p, r in filters: | |
972 | post = re.sub(p, r, post) |
|
994 | post = re.sub(p, r, post) | |
973 | nerrs = len(pats[0]) # nerr elements are errors |
|
995 | nerrs = len(pats[0]) # nerr elements are errors | |
974 | if warnings: |
|
996 | if warnings: | |
975 | pats = pats[0] + pats[1] |
|
997 | pats = pats[0] + pats[1] | |
976 | else: |
|
998 | else: | |
977 | pats = pats[0] |
|
999 | pats = pats[0] | |
978 | # print post # uncomment to show filtered version |
|
1000 | # print post # uncomment to show filtered version | |
979 |
|
1001 | |||
980 | if debug: |
|
1002 | if debug: | |
981 | print("Checking %s for %s" % (name, f)) |
|
1003 | print("Checking %s for %s" % (name, f)) | |
982 |
|
1004 | |||
983 | prelines = None |
|
1005 | prelines = None | |
984 | errors = [] |
|
1006 | errors = [] | |
985 | for i, pat in enumerate(pats): |
|
1007 | for i, pat in enumerate(pats): | |
986 | if len(pat) == 3: |
|
1008 | if len(pat) == 3: | |
987 | p, msg, ignore = pat |
|
1009 | p, msg, ignore = pat | |
988 | else: |
|
1010 | else: | |
989 | p, msg = pat |
|
1011 | p, msg = pat | |
990 | ignore = None |
|
1012 | ignore = None | |
991 | if i >= nerrs: |
|
1013 | if i >= nerrs: | |
992 | msg = "warning: " + msg |
|
1014 | msg = "warning: " + msg | |
993 |
|
1015 | |||
994 | pos = 0 |
|
1016 | pos = 0 | |
995 | n = 0 |
|
1017 | n = 0 | |
996 | for m in p.finditer(post): |
|
1018 | for m in p.finditer(post): | |
997 | if prelines is None: |
|
1019 | if prelines is None: | |
998 | prelines = pre.splitlines() |
|
1020 | prelines = pre.splitlines() | |
999 | postlines = post.splitlines(True) |
|
1021 | postlines = post.splitlines(True) | |
1000 |
|
1022 | |||
1001 | start = m.start() |
|
1023 | start = m.start() | |
1002 | while n < len(postlines): |
|
1024 | while n < len(postlines): | |
1003 | step = len(postlines[n]) |
|
1025 | step = len(postlines[n]) | |
1004 | if pos + step > start: |
|
1026 | if pos + step > start: | |
1005 | break |
|
1027 | break | |
1006 | pos += step |
|
1028 | pos += step | |
1007 | n += 1 |
|
1029 | n += 1 | |
1008 | l = prelines[n] |
|
1030 | l = prelines[n] | |
1009 |
|
1031 | |||
1010 | if ignore and re.search(ignore, l, re.MULTILINE): |
|
1032 | if ignore and re.search(ignore, l, re.MULTILINE): | |
1011 | if debug: |
|
1033 | if debug: | |
1012 | print( |
|
1034 | print( | |
1013 | "Skipping %s for %s:%s (ignore pattern)" |
|
1035 | "Skipping %s for %s:%s (ignore pattern)" | |
1014 | % (name, f, (n + lineoffset)) |
|
1036 | % (name, f, (n + lineoffset)) | |
1015 | ) |
|
1037 | ) | |
1016 | continue |
|
1038 | continue | |
1017 | bd = "" |
|
1039 | bd = "" | |
1018 | if blame: |
|
1040 | if blame: | |
1019 | bd = 'working directory' |
|
1041 | bd = 'working directory' | |
1020 | if blamecache is None: |
|
1042 | if blamecache is None: | |
1021 | blamecache = getblame(f) |
|
1043 | blamecache = getblame(f) | |
1022 | context['blamecache'] = blamecache |
|
1044 | context['blamecache'] = blamecache | |
1023 | if (n + lineoffset) < len(blamecache): |
|
1045 | if (n + lineoffset) < len(blamecache): | |
1024 | bl, bu, br = blamecache[(n + lineoffset)] |
|
1046 | bl, bu, br = blamecache[(n + lineoffset)] | |
1025 | if offset is None and bl == l: |
|
1047 | if offset is None and bl == l: | |
1026 | bd = '%s@%s' % (bu, br) |
|
1048 | bd = '%s@%s' % (bu, br) | |
1027 | elif offset is not None and bl.endswith(l): |
|
1049 | elif offset is not None and bl.endswith(l): | |
1028 | # "offset is not None" means "checking |
|
1050 | # "offset is not None" means "checking | |
1029 | # embedded code fragment". In this case, |
|
1051 | # embedded code fragment". In this case, | |
1030 | # "l" does not have information about the |
|
1052 | # "l" does not have information about the | |
1031 | # beginning of an *original* line in the |
|
1053 | # beginning of an *original* line in the | |
1032 | # file (e.g. ' > '). |
|
1054 | # file (e.g. ' > '). | |
1033 | # Therefore, use "str.endswith()", and |
|
1055 | # Therefore, use "str.endswith()", and | |
1034 | # show "maybe" for a little loose |
|
1056 | # show "maybe" for a little loose | |
1035 | # examination. |
|
1057 | # examination. | |
1036 | bd = '%s@%s, maybe' % (bu, br) |
|
1058 | bd = '%s@%s, maybe' % (bu, br) | |
1037 |
|
1059 | |||
1038 | errors.append((f, lineno and (n + lineoffset + 1), l, msg, bd)) |
|
1060 | errors.append((f, lineno and (n + lineoffset + 1), l, msg, bd)) | |
1039 |
|
1061 | |||
1040 | errors.sort() |
|
1062 | errors.sort() | |
1041 | for e in errors: |
|
1063 | for e in errors: | |
1042 | logfunc(*e) |
|
1064 | logfunc(*e) | |
1043 | fc += 1 |
|
1065 | fc += 1 | |
1044 | if maxerr and fc >= maxerr: |
|
1066 | if maxerr and fc >= maxerr: | |
1045 | print(" (too many errors, giving up)") |
|
1067 | print(" (too many errors, giving up)") | |
1046 | break |
|
1068 | break | |
1047 |
|
1069 | |||
1048 | return fc |
|
1070 | return fc | |
1049 |
|
1071 | |||
1050 |
|
1072 | |||
1051 | def main(): |
|
1073 | def main(): | |
1052 | parser = optparse.OptionParser("%prog [options] [files | -]") |
|
1074 | parser = optparse.OptionParser("%prog [options] [files | -]") | |
1053 | parser.add_option( |
|
1075 | parser.add_option( | |
1054 | "-w", |
|
1076 | "-w", | |
1055 | "--warnings", |
|
1077 | "--warnings", | |
1056 | action="store_true", |
|
1078 | action="store_true", | |
1057 | help="include warning-level checks", |
|
1079 | help="include warning-level checks", | |
1058 | ) |
|
1080 | ) | |
1059 | parser.add_option( |
|
1081 | parser.add_option( | |
1060 | "-p", "--per-file", type="int", help="max warnings per file" |
|
1082 | "-p", "--per-file", type="int", help="max warnings per file" | |
1061 | ) |
|
1083 | ) | |
1062 | parser.add_option( |
|
1084 | parser.add_option( | |
1063 | "-b", |
|
1085 | "-b", | |
1064 | "--blame", |
|
1086 | "--blame", | |
1065 | action="store_true", |
|
1087 | action="store_true", | |
1066 | help="use annotate to generate blame info", |
|
1088 | help="use annotate to generate blame info", | |
1067 | ) |
|
1089 | ) | |
1068 | parser.add_option( |
|
1090 | parser.add_option( | |
1069 | "", "--debug", action="store_true", help="show debug information" |
|
1091 | "", "--debug", action="store_true", help="show debug information" | |
1070 | ) |
|
1092 | ) | |
1071 | parser.add_option( |
|
1093 | parser.add_option( | |
1072 | "", |
|
1094 | "", | |
1073 | "--nolineno", |
|
1095 | "--nolineno", | |
1074 | action="store_false", |
|
1096 | action="store_false", | |
1075 | dest='lineno', |
|
1097 | dest='lineno', | |
1076 | help="don't show line numbers", |
|
1098 | help="don't show line numbers", | |
1077 | ) |
|
1099 | ) | |
1078 |
|
1100 | |||
1079 | parser.set_defaults( |
|
1101 | parser.set_defaults( | |
1080 | per_file=15, warnings=False, blame=False, debug=False, lineno=True |
|
1102 | per_file=15, warnings=False, blame=False, debug=False, lineno=True | |
1081 | ) |
|
1103 | ) | |
1082 | (options, args) = parser.parse_args() |
|
1104 | (options, args) = parser.parse_args() | |
1083 |
|
1105 | |||
1084 | if len(args) == 0: |
|
1106 | if len(args) == 0: | |
1085 | check = glob.glob("*") |
|
1107 | check = glob.glob("*") | |
1086 | elif args == ['-']: |
|
1108 | elif args == ['-']: | |
1087 | # read file list from stdin |
|
1109 | # read file list from stdin | |
1088 | check = sys.stdin.read().splitlines() |
|
1110 | check = sys.stdin.read().splitlines() | |
1089 | else: |
|
1111 | else: | |
1090 | check = args |
|
1112 | check = args | |
1091 |
|
1113 | |||
1092 | _preparepats() |
|
1114 | _preparepats() | |
1093 |
|
1115 | |||
1094 | ret = 0 |
|
1116 | ret = 0 | |
1095 | for f in check: |
|
1117 | for f in check: | |
1096 | if not checkfile( |
|
1118 | if not checkfile( | |
1097 | f, |
|
1119 | f, | |
1098 | maxerr=options.per_file, |
|
1120 | maxerr=options.per_file, | |
1099 | warnings=options.warnings, |
|
1121 | warnings=options.warnings, | |
1100 | blame=options.blame, |
|
1122 | blame=options.blame, | |
1101 | debug=options.debug, |
|
1123 | debug=options.debug, | |
1102 | lineno=options.lineno, |
|
1124 | lineno=options.lineno, | |
1103 | ): |
|
1125 | ): | |
1104 | ret = 1 |
|
1126 | ret = 1 | |
1105 | return ret |
|
1127 | return ret | |
1106 |
|
1128 | |||
1107 |
|
1129 | |||
1108 | if __name__ == "__main__": |
|
1130 | if __name__ == "__main__": | |
1109 | sys.exit(main()) |
|
1131 | sys.exit(main()) |
@@ -1,551 +1,546 b'' | |||||
1 | /* |
|
1 | /* | |
2 | * A fast client for Mercurial command server |
|
2 | * A fast client for Mercurial command server | |
3 | * |
|
3 | * | |
4 | * Copyright (c) 2011 Yuya Nishihara <yuya@tcha.org> |
|
4 | * Copyright (c) 2011 Yuya Nishihara <yuya@tcha.org> | |
5 | * |
|
5 | * | |
6 | * This software may be used and distributed according to the terms of the |
|
6 | * This software may be used and distributed according to the terms of the | |
7 | * GNU General Public License version 2 or any later version. |
|
7 | * GNU General Public License version 2 or any later version. | |
8 | */ |
|
8 | */ | |
9 |
|
9 | |||
10 | #include <assert.h> |
|
10 | #include <assert.h> | |
11 | #include <dirent.h> |
|
11 | #include <dirent.h> | |
12 | #include <errno.h> |
|
12 | #include <errno.h> | |
13 | #include <fcntl.h> |
|
13 | #include <fcntl.h> | |
14 | #include <signal.h> |
|
14 | #include <signal.h> | |
15 | #include <stdio.h> |
|
15 | #include <stdio.h> | |
16 | #include <stdlib.h> |
|
16 | #include <stdlib.h> | |
17 | #include <string.h> |
|
17 | #include <string.h> | |
18 | #include <sys/file.h> |
|
18 | #include <sys/file.h> | |
19 | #include <sys/stat.h> |
|
19 | #include <sys/stat.h> | |
20 | #include <sys/types.h> |
|
20 | #include <sys/types.h> | |
21 | #include <sys/un.h> |
|
21 | #include <sys/un.h> | |
22 | #include <sys/wait.h> |
|
22 | #include <sys/wait.h> | |
23 | #include <time.h> |
|
23 | #include <time.h> | |
24 | #include <unistd.h> |
|
24 | #include <unistd.h> | |
25 |
|
25 | |||
26 | #include "hgclient.h" |
|
26 | #include "hgclient.h" | |
27 | #include "procutil.h" |
|
27 | #include "procutil.h" | |
28 | #include "util.h" |
|
28 | #include "util.h" | |
29 |
|
29 | |||
30 | #ifndef PATH_MAX |
|
30 | #ifndef PATH_MAX | |
31 | #define PATH_MAX 4096 |
|
31 | #define PATH_MAX 4096 | |
32 | #endif |
|
32 | #endif | |
33 |
|
33 | |||
34 | struct cmdserveropts { |
|
34 | struct cmdserveropts { | |
35 | char sockname[PATH_MAX]; |
|
35 | char sockname[PATH_MAX]; | |
36 | char initsockname[PATH_MAX]; |
|
36 | char initsockname[PATH_MAX]; | |
37 | char redirectsockname[PATH_MAX]; |
|
37 | char redirectsockname[PATH_MAX]; | |
38 | size_t argsize; |
|
38 | size_t argsize; | |
39 | const char **args; |
|
39 | const char **args; | |
40 | }; |
|
40 | }; | |
41 |
|
41 | |||
42 | static void initcmdserveropts(struct cmdserveropts *opts) |
|
42 | static void initcmdserveropts(struct cmdserveropts *opts) | |
43 | { |
|
43 | { | |
44 | memset(opts, 0, sizeof(struct cmdserveropts)); |
|
44 | memset(opts, 0, sizeof(struct cmdserveropts)); | |
45 | } |
|
45 | } | |
46 |
|
46 | |||
47 | static void freecmdserveropts(struct cmdserveropts *opts) |
|
47 | static void freecmdserveropts(struct cmdserveropts *opts) | |
48 | { |
|
48 | { | |
49 | free(opts->args); |
|
49 | free(opts->args); | |
50 | opts->args = NULL; |
|
50 | opts->args = NULL; | |
51 | opts->argsize = 0; |
|
51 | opts->argsize = 0; | |
52 | } |
|
52 | } | |
53 |
|
53 | |||
54 | /* |
|
54 | /* | |
55 | * Test if an argument is a sensitive flag that should be passed to the server. |
|
55 | * Test if an argument is a sensitive flag that should be passed to the server. | |
56 | * Return 0 if not, otherwise the number of arguments starting from the current |
|
56 | * Return 0 if not, otherwise the number of arguments starting from the current | |
57 | * one that should be passed to the server. |
|
57 | * one that should be passed to the server. | |
58 | */ |
|
58 | */ | |
59 | static size_t testsensitiveflag(const char *arg) |
|
59 | static size_t testsensitiveflag(const char *arg) | |
60 | { |
|
60 | { | |
61 | static const struct { |
|
61 | static const struct { | |
62 | const char *name; |
|
62 | const char *name; | |
63 | size_t narg; |
|
63 | size_t narg; | |
64 | } flags[] = { |
|
64 | } flags[] = { | |
65 | {"--config", 1}, {"--cwd", 1}, {"--repo", 1}, |
|
65 | {"--config", 1}, {"--cwd", 1}, {"--repo", 1}, | |
66 | {"--repository", 1}, {"--traceback", 0}, {"-R", 1}, |
|
66 | {"--repository", 1}, {"--traceback", 0}, {"-R", 1}, | |
67 | }; |
|
67 | }; | |
68 | size_t i; |
|
68 | size_t i; | |
69 | for (i = 0; i < sizeof(flags) / sizeof(flags[0]); ++i) { |
|
69 | for (i = 0; i < sizeof(flags) / sizeof(flags[0]); ++i) { | |
70 | size_t len = strlen(flags[i].name); |
|
70 | size_t len = strlen(flags[i].name); | |
71 | size_t narg = flags[i].narg; |
|
71 | size_t narg = flags[i].narg; | |
72 | if (memcmp(arg, flags[i].name, len) == 0) { |
|
72 | if (memcmp(arg, flags[i].name, len) == 0) { | |
73 | if (arg[len] == '\0') { |
|
73 | if (arg[len] == '\0') { | |
74 | /* --flag (value) */ |
|
74 | /* --flag (value) */ | |
75 | return narg + 1; |
|
75 | return narg + 1; | |
76 | } else if (arg[len] == '=' && narg > 0) { |
|
76 | } else if (arg[len] == '=' && narg > 0) { | |
77 | /* --flag=value */ |
|
77 | /* --flag=value */ | |
78 | return 1; |
|
78 | return 1; | |
79 | } else if (flags[i].name[1] != '-') { |
|
79 | } else if (flags[i].name[1] != '-') { | |
80 | /* short flag */ |
|
80 | /* short flag */ | |
81 | return 1; |
|
81 | return 1; | |
82 | } |
|
82 | } | |
83 | } |
|
83 | } | |
84 | } |
|
84 | } | |
85 | return 0; |
|
85 | return 0; | |
86 | } |
|
86 | } | |
87 |
|
87 | |||
88 | /* |
|
88 | /* | |
89 | * Parse argv[] and put sensitive flags to opts->args |
|
89 | * Parse argv[] and put sensitive flags to opts->args | |
90 | */ |
|
90 | */ | |
91 | static void setcmdserverargs(struct cmdserveropts *opts, int argc, |
|
91 | static void setcmdserverargs(struct cmdserveropts *opts, int argc, | |
92 | const char *argv[]) |
|
92 | const char *argv[]) | |
93 | { |
|
93 | { | |
94 | size_t i, step; |
|
94 | size_t i, step; | |
95 | opts->argsize = 0; |
|
95 | opts->argsize = 0; | |
96 | for (i = 0, step = 1; i < (size_t)argc; i += step, step = 1) { |
|
96 | for (i = 0, step = 1; i < (size_t)argc; i += step, step = 1) { | |
97 | if (!argv[i]) |
|
97 | if (!argv[i]) | |
98 | continue; /* pass clang-analyse */ |
|
98 | continue; /* pass clang-analyse */ | |
99 | if (strcmp(argv[i], "--") == 0) |
|
99 | if (strcmp(argv[i], "--") == 0) | |
100 | break; |
|
100 | break; | |
101 | size_t n = testsensitiveflag(argv[i]); |
|
101 | size_t n = testsensitiveflag(argv[i]); | |
102 | if (n == 0 || i + n > (size_t)argc) |
|
102 | if (n == 0 || i + n > (size_t)argc) | |
103 | continue; |
|
103 | continue; | |
104 | opts->args = |
|
104 | opts->args = | |
105 | reallocx(opts->args, (n + opts->argsize) * sizeof(char *)); |
|
105 | reallocx(opts->args, (n + opts->argsize) * sizeof(char *)); | |
106 | memcpy(opts->args + opts->argsize, argv + i, |
|
106 | memcpy(opts->args + opts->argsize, argv + i, | |
107 | sizeof(char *) * n); |
|
107 | sizeof(char *) * n); | |
108 | opts->argsize += n; |
|
108 | opts->argsize += n; | |
109 | step = n; |
|
109 | step = n; | |
110 | } |
|
110 | } | |
111 | } |
|
111 | } | |
112 |
|
112 | |||
113 | static void preparesockdir(const char *sockdir) |
|
113 | static void preparesockdir(const char *sockdir) | |
114 | { |
|
114 | { | |
115 | int r; |
|
115 | int r; | |
116 | r = mkdir(sockdir, 0700); |
|
116 | r = mkdir(sockdir, 0700); | |
117 | if (r < 0 && errno != EEXIST) |
|
117 | if (r < 0 && errno != EEXIST) | |
118 | abortmsgerrno("cannot create sockdir %s", sockdir); |
|
118 | abortmsgerrno("cannot create sockdir %s", sockdir); | |
119 |
|
119 | |||
120 | struct stat st; |
|
120 | struct stat st; | |
121 | r = lstat(sockdir, &st); |
|
121 | r = lstat(sockdir, &st); | |
122 | if (r < 0) |
|
122 | if (r < 0) | |
123 | abortmsgerrno("cannot stat %s", sockdir); |
|
123 | abortmsgerrno("cannot stat %s", sockdir); | |
124 | if (!S_ISDIR(st.st_mode)) |
|
124 | if (!S_ISDIR(st.st_mode)) | |
125 | abortmsg("cannot create sockdir %s (file exists)", sockdir); |
|
125 | abortmsg("cannot create sockdir %s (file exists)", sockdir); | |
126 | if (st.st_uid != geteuid() || st.st_mode & 0077) |
|
126 | if (st.st_uid != geteuid() || st.st_mode & 0077) | |
127 | abortmsg("insecure sockdir %s", sockdir); |
|
127 | abortmsg("insecure sockdir %s", sockdir); | |
128 | } |
|
128 | } | |
129 |
|
129 | |||
130 | /* |
|
130 | /* | |
131 | * Check if a socket directory exists and is only owned by the current user. |
|
131 | * Check if a socket directory exists and is only owned by the current user. | |
132 | * Return 1 if so, 0 if not. This is used to check if XDG_RUNTIME_DIR can be |
|
132 | * Return 1 if so, 0 if not. This is used to check if XDG_RUNTIME_DIR can be | |
133 | * used or not. According to the specification [1], XDG_RUNTIME_DIR should be |
|
133 | * used or not. According to the specification [1], XDG_RUNTIME_DIR should be | |
134 | * ignored if the directory is not owned by the user with mode 0700. |
|
134 | * ignored if the directory is not owned by the user with mode 0700. | |
135 | * [1]: https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html |
|
135 | * [1]: https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html | |
136 | */ |
|
136 | */ | |
137 | static int checkruntimedir(const char *sockdir) |
|
137 | static int checkruntimedir(const char *sockdir) | |
138 | { |
|
138 | { | |
139 | struct stat st; |
|
139 | struct stat st; | |
140 | int r = lstat(sockdir, &st); |
|
140 | int r = lstat(sockdir, &st); | |
141 | if (r < 0) /* ex. does not exist */ |
|
141 | if (r < 0) /* ex. does not exist */ | |
142 | return 0; |
|
142 | return 0; | |
143 | if (!S_ISDIR(st.st_mode)) /* ex. is a file, not a directory */ |
|
143 | if (!S_ISDIR(st.st_mode)) /* ex. is a file, not a directory */ | |
144 | return 0; |
|
144 | return 0; | |
145 | return st.st_uid == geteuid() && (st.st_mode & 0777) == 0700; |
|
145 | return st.st_uid == geteuid() && (st.st_mode & 0777) == 0700; | |
146 | } |
|
146 | } | |
147 |
|
147 | |||
148 | static void getdefaultsockdir(char sockdir[], size_t size) |
|
148 | static void getdefaultsockdir(char sockdir[], size_t size) | |
149 | { |
|
149 | { | |
150 | /* by default, put socket file in secure directory |
|
150 | /* by default, put socket file in secure directory | |
151 | * (${XDG_RUNTIME_DIR}/chg, or /${TMPDIR:-tmp}/chg$UID) |
|
151 | * (${XDG_RUNTIME_DIR}/chg, or /${TMPDIR:-tmp}/chg$UID) | |
152 | * (permission of socket file may be ignored on some Unices) */ |
|
152 | * (permission of socket file may be ignored on some Unices) */ | |
153 | const char *runtimedir = getenv("XDG_RUNTIME_DIR"); |
|
153 | const char *runtimedir = getenv("XDG_RUNTIME_DIR"); | |
154 | int r; |
|
154 | int r; | |
155 | if (runtimedir && checkruntimedir(runtimedir)) { |
|
155 | if (runtimedir && checkruntimedir(runtimedir)) { | |
156 | r = snprintf(sockdir, size, "%s/chg", runtimedir); |
|
156 | r = snprintf(sockdir, size, "%s/chg", runtimedir); | |
157 | } else { |
|
157 | } else { | |
158 | const char *tmpdir = getenv("TMPDIR"); |
|
158 | const char *tmpdir = getenv("TMPDIR"); | |
159 | if (!tmpdir) |
|
159 | if (!tmpdir) | |
160 | tmpdir = "/tmp"; |
|
160 | tmpdir = "/tmp"; | |
161 | r = snprintf(sockdir, size, "%s/chg%d", tmpdir, geteuid()); |
|
161 | r = snprintf(sockdir, size, "%s/chg%d", tmpdir, geteuid()); | |
162 | } |
|
162 | } | |
163 | if (r < 0 || (size_t)r >= size) |
|
163 | if (r < 0 || (size_t)r >= size) | |
164 | abortmsg("too long TMPDIR (r = %d)", r); |
|
164 | abortmsg("too long TMPDIR (r = %d)", r); | |
165 | } |
|
165 | } | |
166 |
|
166 | |||
167 | static void setcmdserveropts(struct cmdserveropts *opts) |
|
167 | static void setcmdserveropts(struct cmdserveropts *opts) | |
168 | { |
|
168 | { | |
169 | int r; |
|
169 | int r; | |
170 | char sockdir[PATH_MAX]; |
|
170 | char sockdir[PATH_MAX]; | |
171 | const char *envsockname = getenv("CHGSOCKNAME"); |
|
171 | const char *envsockname = getenv("CHGSOCKNAME"); | |
172 | if (!envsockname) { |
|
172 | if (!envsockname) { | |
173 | getdefaultsockdir(sockdir, sizeof(sockdir)); |
|
173 | getdefaultsockdir(sockdir, sizeof(sockdir)); | |
174 | preparesockdir(sockdir); |
|
174 | preparesockdir(sockdir); | |
175 | } |
|
175 | } | |
176 |
|
176 | |||
177 | const char *basename = (envsockname) ? envsockname : sockdir; |
|
177 | const char *basename = (envsockname) ? envsockname : sockdir; | |
178 | const char *sockfmt = (envsockname) ? "%s" : "%s/server"; |
|
178 | const char *sockfmt = (envsockname) ? "%s" : "%s/server"; | |
179 | r = snprintf(opts->sockname, sizeof(opts->sockname), sockfmt, basename); |
|
179 | r = snprintf(opts->sockname, sizeof(opts->sockname), sockfmt, basename); | |
180 | if (r < 0 || (size_t)r >= sizeof(opts->sockname)) |
|
180 | if (r < 0 || (size_t)r >= sizeof(opts->sockname)) | |
181 | abortmsg("too long TMPDIR or CHGSOCKNAME (r = %d)", r); |
|
181 | abortmsg("too long TMPDIR or CHGSOCKNAME (r = %d)", r); | |
182 | r = snprintf(opts->initsockname, sizeof(opts->initsockname), "%s.%u", |
|
182 | r = snprintf(opts->initsockname, sizeof(opts->initsockname), "%s.%u", | |
183 | opts->sockname, (unsigned)getpid()); |
|
183 | opts->sockname, (unsigned)getpid()); | |
184 | if (r < 0 || (size_t)r >= sizeof(opts->initsockname)) |
|
184 | if (r < 0 || (size_t)r >= sizeof(opts->initsockname)) | |
185 | abortmsg("too long TMPDIR or CHGSOCKNAME (r = %d)", r); |
|
185 | abortmsg("too long TMPDIR or CHGSOCKNAME (r = %d)", r); | |
186 | } |
|
186 | } | |
187 |
|
187 | |||
188 | /* If the current program is, say, /a/b/c/chg, returns /a/b/c/hg. */ |
|
188 | /* If the current program is, say, /a/b/c/chg, returns /a/b/c/hg. */ | |
189 | static char *getrelhgcmd(void) |
|
189 | static char *getrelhgcmd(void) | |
190 | { |
|
190 | { | |
191 | ssize_t n; |
|
191 | ssize_t n; | |
192 | char *res, *slash; |
|
192 | char *res, *slash; | |
193 | int maxsize = 4096; |
|
193 | int maxsize = 4096; | |
194 | res = malloc(maxsize); |
|
194 | res = malloc(maxsize); | |
195 | if (res == NULL) |
|
195 | if (res == NULL) | |
196 | goto cleanup; |
|
196 | goto cleanup; | |
197 | n = readlink("/proc/self/exe", res, maxsize); |
|
197 | n = readlink("/proc/self/exe", res, maxsize); | |
198 | if (n < 0 || n >= maxsize) |
|
198 | if (n < 0 || n >= maxsize) | |
199 | goto cleanup; |
|
199 | goto cleanup; | |
200 | res[n] = '\0'; |
|
200 | res[n] = '\0'; | |
201 | slash = strrchr(res, '/'); |
|
201 | slash = strrchr(res, '/'); | |
202 | if (slash == NULL) |
|
202 | if (slash == NULL) | |
203 | goto cleanup; |
|
203 | goto cleanup; | |
204 | /* 4 is strlen("/hg") + nul byte */ |
|
204 | /* 4 is strlen("/hg") + nul byte */ | |
205 | if (slash + 4 >= res + maxsize) |
|
205 | if (slash + 4 >= res + maxsize) | |
206 | goto cleanup; |
|
206 | goto cleanup; | |
207 | memcpy(slash, "/hg", 4); |
|
207 | memcpy(slash, "/hg", 4); | |
208 | return res; |
|
208 | return res; | |
209 | cleanup: |
|
209 | cleanup: | |
210 | free(res); |
|
210 | free(res); | |
211 | return NULL; |
|
211 | return NULL; | |
212 | } |
|
212 | } | |
213 |
|
213 | |||
214 | static const char *gethgcmd(void) |
|
214 | static const char *gethgcmd(void) | |
215 | { |
|
215 | { | |
216 | static const char *hgcmd = NULL; |
|
216 | static const char *hgcmd = NULL; | |
217 | #ifdef HGPATHREL |
|
217 | #ifdef HGPATHREL | |
218 | int tryrelhgcmd = 1; |
|
218 | int tryrelhgcmd = 1; | |
219 | #else |
|
219 | #else | |
220 | int tryrelhgcmd = 0; |
|
220 | int tryrelhgcmd = 0; | |
221 | #endif |
|
221 | #endif | |
222 | if (!hgcmd) { |
|
222 | if (!hgcmd) { | |
223 | hgcmd = getenv("CHGHG"); |
|
223 | hgcmd = getenv("CHGHG"); | |
224 | if (!hgcmd || hgcmd[0] == '\0') |
|
224 | if (!hgcmd || hgcmd[0] == '\0') | |
225 | hgcmd = getenv("HG"); |
|
225 | hgcmd = getenv("HG"); | |
226 | if (tryrelhgcmd && (!hgcmd || hgcmd[0] == '\0')) |
|
226 | if (tryrelhgcmd && (!hgcmd || hgcmd[0] == '\0')) | |
227 | hgcmd = getrelhgcmd(); |
|
227 | hgcmd = getrelhgcmd(); | |
228 | if (!hgcmd || hgcmd[0] == '\0') |
|
228 | if (!hgcmd || hgcmd[0] == '\0') | |
229 | #ifdef HGPATH |
|
229 | #ifdef HGPATH | |
230 | hgcmd = (HGPATH); |
|
230 | hgcmd = (HGPATH); | |
231 | #else |
|
231 | #else | |
232 | hgcmd = "hg"; |
|
232 | hgcmd = "hg"; | |
233 | #endif |
|
233 | #endif | |
234 | } |
|
234 | } | |
235 | return hgcmd; |
|
235 | return hgcmd; | |
236 | } |
|
236 | } | |
237 |
|
237 | |||
238 | static void execcmdserver(const struct cmdserveropts *opts) |
|
238 | static void execcmdserver(const struct cmdserveropts *opts) | |
239 | { |
|
239 | { | |
240 | const char *hgcmd = gethgcmd(); |
|
240 | const char *hgcmd = gethgcmd(); | |
241 |
|
241 | |||
242 | const char *baseargv[] = { |
|
242 | const char *baseargv[] = { | |
243 | hgcmd, |
|
243 | hgcmd, "serve", "--no-profile", "--cmdserver", | |
244 | "serve", |
|
244 | "chgunix", "--address", opts->initsockname, "--daemon-postexec", | |
245 | "--cmdserver", |
|
|||
246 | "chgunix", |
|
|||
247 | "--address", |
|
|||
248 | opts->initsockname, |
|
|||
249 | "--daemon-postexec", |
|
|||
250 | "chdir:/", |
|
245 | "chdir:/", | |
251 | }; |
|
246 | }; | |
252 | size_t baseargvsize = sizeof(baseargv) / sizeof(baseargv[0]); |
|
247 | size_t baseargvsize = sizeof(baseargv) / sizeof(baseargv[0]); | |
253 | size_t argsize = baseargvsize + opts->argsize + 1; |
|
248 | size_t argsize = baseargvsize + opts->argsize + 1; | |
254 |
|
249 | |||
255 | const char **argv = mallocx(sizeof(char *) * argsize); |
|
250 | const char **argv = mallocx(sizeof(char *) * argsize); | |
256 | memcpy(argv, baseargv, sizeof(baseargv)); |
|
251 | memcpy(argv, baseargv, sizeof(baseargv)); | |
257 | if (opts->args) { |
|
252 | if (opts->args) { | |
258 | size_t size = sizeof(char *) * opts->argsize; |
|
253 | size_t size = sizeof(char *) * opts->argsize; | |
259 | memcpy(argv + baseargvsize, opts->args, size); |
|
254 | memcpy(argv + baseargvsize, opts->args, size); | |
260 | } |
|
255 | } | |
261 | argv[argsize - 1] = NULL; |
|
256 | argv[argsize - 1] = NULL; | |
262 |
|
257 | |||
263 | const char *lc_ctype_env = getenv("LC_CTYPE"); |
|
258 | const char *lc_ctype_env = getenv("LC_CTYPE"); | |
264 | if (lc_ctype_env == NULL) { |
|
259 | if (lc_ctype_env == NULL) { | |
265 | if (putenv("CHG_CLEAR_LC_CTYPE=") != 0) |
|
260 | if (putenv("CHG_CLEAR_LC_CTYPE=") != 0) | |
266 | abortmsgerrno("failed to putenv CHG_CLEAR_LC_CTYPE"); |
|
261 | abortmsgerrno("failed to putenv CHG_CLEAR_LC_CTYPE"); | |
267 | } else { |
|
262 | } else { | |
268 | if (setenv("CHGORIG_LC_CTYPE", lc_ctype_env, 1) != 0) { |
|
263 | if (setenv("CHGORIG_LC_CTYPE", lc_ctype_env, 1) != 0) { | |
269 | abortmsgerrno("failed to setenv CHGORIG_LC_CTYPE"); |
|
264 | abortmsgerrno("failed to setenv CHGORIG_LC_CTYPE"); | |
270 | } |
|
265 | } | |
271 | } |
|
266 | } | |
272 |
|
267 | |||
273 | /* close any open files to avoid hanging locks */ |
|
268 | /* close any open files to avoid hanging locks */ | |
274 | DIR *dp = opendir("/proc/self/fd"); |
|
269 | DIR *dp = opendir("/proc/self/fd"); | |
275 | if (dp != NULL) { |
|
270 | if (dp != NULL) { | |
276 | debugmsg("closing files based on /proc contents"); |
|
271 | debugmsg("closing files based on /proc contents"); | |
277 | struct dirent *de; |
|
272 | struct dirent *de; | |
278 | while ((de = readdir(dp))) { |
|
273 | while ((de = readdir(dp))) { | |
279 | errno = 0; |
|
274 | errno = 0; | |
280 | char *end; |
|
275 | char *end; | |
281 | long fd_value = strtol(de->d_name, &end, 10); |
|
276 | long fd_value = strtol(de->d_name, &end, 10); | |
282 | if (end == de->d_name) { |
|
277 | if (end == de->d_name) { | |
283 | /* unable to convert to int (. or ..) */ |
|
278 | /* unable to convert to int (. or ..) */ | |
284 | continue; |
|
279 | continue; | |
285 | } |
|
280 | } | |
286 | if (errno == ERANGE) { |
|
281 | if (errno == ERANGE) { | |
287 | debugmsg("tried to parse %s, but range error " |
|
282 | debugmsg("tried to parse %s, but range error " | |
288 | "occurred", |
|
283 | "occurred", | |
289 | de->d_name); |
|
284 | de->d_name); | |
290 | continue; |
|
285 | continue; | |
291 | } |
|
286 | } | |
292 | if (fd_value > STDERR_FILENO && fd_value != dirfd(dp)) { |
|
287 | if (fd_value > STDERR_FILENO && fd_value != dirfd(dp)) { | |
293 | debugmsg("closing fd %ld", fd_value); |
|
288 | debugmsg("closing fd %ld", fd_value); | |
294 | int res = close(fd_value); |
|
289 | int res = close(fd_value); | |
295 | if (res) { |
|
290 | if (res) { | |
296 | debugmsg("tried to close fd %ld: %d " |
|
291 | debugmsg("tried to close fd %ld: %d " | |
297 | "(errno: %d)", |
|
292 | "(errno: %d)", | |
298 | fd_value, res, errno); |
|
293 | fd_value, res, errno); | |
299 | } |
|
294 | } | |
300 | } |
|
295 | } | |
301 | } |
|
296 | } | |
302 | closedir(dp); |
|
297 | closedir(dp); | |
303 | } |
|
298 | } | |
304 |
|
299 | |||
305 | if (putenv("CHGINTERNALMARK=") != 0) |
|
300 | if (putenv("CHGINTERNALMARK=") != 0) | |
306 | abortmsgerrno("failed to putenv"); |
|
301 | abortmsgerrno("failed to putenv"); | |
307 | if (execvp(hgcmd, (char **)argv) < 0) |
|
302 | if (execvp(hgcmd, (char **)argv) < 0) | |
308 | abortmsgerrno("failed to exec cmdserver"); |
|
303 | abortmsgerrno("failed to exec cmdserver"); | |
309 | free(argv); |
|
304 | free(argv); | |
310 | } |
|
305 | } | |
311 |
|
306 | |||
312 | /* Retry until we can connect to the server. Give up after some time. */ |
|
307 | /* Retry until we can connect to the server. Give up after some time. */ | |
313 | static hgclient_t *retryconnectcmdserver(struct cmdserveropts *opts, pid_t pid) |
|
308 | static hgclient_t *retryconnectcmdserver(struct cmdserveropts *opts, pid_t pid) | |
314 | { |
|
309 | { | |
315 | static const struct timespec sleepreq = {0, 10 * 1000000}; |
|
310 | static const struct timespec sleepreq = {0, 10 * 1000000}; | |
316 | int pst = 0; |
|
311 | int pst = 0; | |
317 |
|
312 | |||
318 | debugmsg("try connect to %s repeatedly", opts->initsockname); |
|
313 | debugmsg("try connect to %s repeatedly", opts->initsockname); | |
319 |
|
314 | |||
320 | unsigned int timeoutsec = 60; /* default: 60 seconds */ |
|
315 | unsigned int timeoutsec = 60; /* default: 60 seconds */ | |
321 | const char *timeoutenv = getenv("CHGTIMEOUT"); |
|
316 | const char *timeoutenv = getenv("CHGTIMEOUT"); | |
322 | if (timeoutenv) |
|
317 | if (timeoutenv) | |
323 | sscanf(timeoutenv, "%u", &timeoutsec); |
|
318 | sscanf(timeoutenv, "%u", &timeoutsec); | |
324 |
|
319 | |||
325 | for (unsigned int i = 0; !timeoutsec || i < timeoutsec * 100; i++) { |
|
320 | for (unsigned int i = 0; !timeoutsec || i < timeoutsec * 100; i++) { | |
326 | hgclient_t *hgc = hgc_open(opts->initsockname); |
|
321 | hgclient_t *hgc = hgc_open(opts->initsockname); | |
327 | if (hgc) { |
|
322 | if (hgc) { | |
328 | debugmsg("rename %s to %s", opts->initsockname, |
|
323 | debugmsg("rename %s to %s", opts->initsockname, | |
329 | opts->sockname); |
|
324 | opts->sockname); | |
330 | int r = rename(opts->initsockname, opts->sockname); |
|
325 | int r = rename(opts->initsockname, opts->sockname); | |
331 | if (r != 0) |
|
326 | if (r != 0) | |
332 | abortmsgerrno("cannot rename"); |
|
327 | abortmsgerrno("cannot rename"); | |
333 | return hgc; |
|
328 | return hgc; | |
334 | } |
|
329 | } | |
335 |
|
330 | |||
336 | if (pid > 0) { |
|
331 | if (pid > 0) { | |
337 | /* collect zombie if child process fails to start */ |
|
332 | /* collect zombie if child process fails to start */ | |
338 | int r = waitpid(pid, &pst, WNOHANG); |
|
333 | int r = waitpid(pid, &pst, WNOHANG); | |
339 | if (r != 0) |
|
334 | if (r != 0) | |
340 | goto cleanup; |
|
335 | goto cleanup; | |
341 | } |
|
336 | } | |
342 |
|
337 | |||
343 | nanosleep(&sleepreq, NULL); |
|
338 | nanosleep(&sleepreq, NULL); | |
344 | } |
|
339 | } | |
345 |
|
340 | |||
346 | abortmsg("timed out waiting for cmdserver %s", opts->initsockname); |
|
341 | abortmsg("timed out waiting for cmdserver %s", opts->initsockname); | |
347 | return NULL; |
|
342 | return NULL; | |
348 |
|
343 | |||
349 | cleanup: |
|
344 | cleanup: | |
350 | if (WIFEXITED(pst)) { |
|
345 | if (WIFEXITED(pst)) { | |
351 | if (WEXITSTATUS(pst) == 0) |
|
346 | if (WEXITSTATUS(pst) == 0) | |
352 | abortmsg("could not connect to cmdserver " |
|
347 | abortmsg("could not connect to cmdserver " | |
353 | "(exited with status 0)"); |
|
348 | "(exited with status 0)"); | |
354 | debugmsg("cmdserver exited with status %d", WEXITSTATUS(pst)); |
|
349 | debugmsg("cmdserver exited with status %d", WEXITSTATUS(pst)); | |
355 | exit(WEXITSTATUS(pst)); |
|
350 | exit(WEXITSTATUS(pst)); | |
356 | } else if (WIFSIGNALED(pst)) { |
|
351 | } else if (WIFSIGNALED(pst)) { | |
357 | abortmsg("cmdserver killed by signal %d", WTERMSIG(pst)); |
|
352 | abortmsg("cmdserver killed by signal %d", WTERMSIG(pst)); | |
358 | } else { |
|
353 | } else { | |
359 | abortmsg("error while waiting for cmdserver"); |
|
354 | abortmsg("error while waiting for cmdserver"); | |
360 | } |
|
355 | } | |
361 | return NULL; |
|
356 | return NULL; | |
362 | } |
|
357 | } | |
363 |
|
358 | |||
364 | /* Connect to a cmdserver. Will start a new server on demand. */ |
|
359 | /* Connect to a cmdserver. Will start a new server on demand. */ | |
365 | static hgclient_t *connectcmdserver(struct cmdserveropts *opts) |
|
360 | static hgclient_t *connectcmdserver(struct cmdserveropts *opts) | |
366 | { |
|
361 | { | |
367 | const char *sockname = |
|
362 | const char *sockname = | |
368 | opts->redirectsockname[0] ? opts->redirectsockname : opts->sockname; |
|
363 | opts->redirectsockname[0] ? opts->redirectsockname : opts->sockname; | |
369 | debugmsg("try connect to %s", sockname); |
|
364 | debugmsg("try connect to %s", sockname); | |
370 | hgclient_t *hgc = hgc_open(sockname); |
|
365 | hgclient_t *hgc = hgc_open(sockname); | |
371 | if (hgc) |
|
366 | if (hgc) | |
372 | return hgc; |
|
367 | return hgc; | |
373 |
|
368 | |||
374 | /* prevent us from being connected to an outdated server: we were |
|
369 | /* prevent us from being connected to an outdated server: we were | |
375 | * told by a server to redirect to opts->redirectsockname and that |
|
370 | * told by a server to redirect to opts->redirectsockname and that | |
376 | * address does not work. we do not want to connect to the server |
|
371 | * address does not work. we do not want to connect to the server | |
377 | * again because it will probably tell us the same thing. */ |
|
372 | * again because it will probably tell us the same thing. */ | |
378 | if (sockname == opts->redirectsockname) |
|
373 | if (sockname == opts->redirectsockname) | |
379 | unlink(opts->sockname); |
|
374 | unlink(opts->sockname); | |
380 |
|
375 | |||
381 | debugmsg("start cmdserver at %s", opts->initsockname); |
|
376 | debugmsg("start cmdserver at %s", opts->initsockname); | |
382 |
|
377 | |||
383 | pid_t pid = fork(); |
|
378 | pid_t pid = fork(); | |
384 | if (pid < 0) |
|
379 | if (pid < 0) | |
385 | abortmsg("failed to fork cmdserver process"); |
|
380 | abortmsg("failed to fork cmdserver process"); | |
386 | if (pid == 0) { |
|
381 | if (pid == 0) { | |
387 | execcmdserver(opts); |
|
382 | execcmdserver(opts); | |
388 | } else { |
|
383 | } else { | |
389 | hgc = retryconnectcmdserver(opts, pid); |
|
384 | hgc = retryconnectcmdserver(opts, pid); | |
390 | } |
|
385 | } | |
391 |
|
386 | |||
392 | return hgc; |
|
387 | return hgc; | |
393 | } |
|
388 | } | |
394 |
|
389 | |||
395 | static void killcmdserver(const struct cmdserveropts *opts) |
|
390 | static void killcmdserver(const struct cmdserveropts *opts) | |
396 | { |
|
391 | { | |
397 | /* resolve config hash */ |
|
392 | /* resolve config hash */ | |
398 | char *resolvedpath = realpath(opts->sockname, NULL); |
|
393 | char *resolvedpath = realpath(opts->sockname, NULL); | |
399 | if (resolvedpath) { |
|
394 | if (resolvedpath) { | |
400 | unlink(resolvedpath); |
|
395 | unlink(resolvedpath); | |
401 | free(resolvedpath); |
|
396 | free(resolvedpath); | |
402 | } |
|
397 | } | |
403 | } |
|
398 | } | |
404 |
|
399 | |||
405 | /* Run instructions sent from the server like unlink and set redirect path |
|
400 | /* Run instructions sent from the server like unlink and set redirect path | |
406 | * Return 1 if reconnect is needed, otherwise 0 */ |
|
401 | * Return 1 if reconnect is needed, otherwise 0 */ | |
407 | static int runinstructions(struct cmdserveropts *opts, const char **insts) |
|
402 | static int runinstructions(struct cmdserveropts *opts, const char **insts) | |
408 | { |
|
403 | { | |
409 | int needreconnect = 0; |
|
404 | int needreconnect = 0; | |
410 | if (!insts) |
|
405 | if (!insts) | |
411 | return needreconnect; |
|
406 | return needreconnect; | |
412 |
|
407 | |||
413 | assert(insts); |
|
408 | assert(insts); | |
414 | opts->redirectsockname[0] = '\0'; |
|
409 | opts->redirectsockname[0] = '\0'; | |
415 | const char **pinst; |
|
410 | const char **pinst; | |
416 | for (pinst = insts; *pinst; pinst++) { |
|
411 | for (pinst = insts; *pinst; pinst++) { | |
417 | debugmsg("instruction: %s", *pinst); |
|
412 | debugmsg("instruction: %s", *pinst); | |
418 | if (strncmp(*pinst, "unlink ", 7) == 0) { |
|
413 | if (strncmp(*pinst, "unlink ", 7) == 0) { | |
419 | unlink(*pinst + 7); |
|
414 | unlink(*pinst + 7); | |
420 | } else if (strncmp(*pinst, "redirect ", 9) == 0) { |
|
415 | } else if (strncmp(*pinst, "redirect ", 9) == 0) { | |
421 | int r = snprintf(opts->redirectsockname, |
|
416 | int r = snprintf(opts->redirectsockname, | |
422 | sizeof(opts->redirectsockname), "%s", |
|
417 | sizeof(opts->redirectsockname), "%s", | |
423 | *pinst + 9); |
|
418 | *pinst + 9); | |
424 | if (r < 0 || r >= (int)sizeof(opts->redirectsockname)) |
|
419 | if (r < 0 || r >= (int)sizeof(opts->redirectsockname)) | |
425 | abortmsg("redirect path is too long (%d)", r); |
|
420 | abortmsg("redirect path is too long (%d)", r); | |
426 | needreconnect = 1; |
|
421 | needreconnect = 1; | |
427 | } else if (strncmp(*pinst, "exit ", 5) == 0) { |
|
422 | } else if (strncmp(*pinst, "exit ", 5) == 0) { | |
428 | int n = 0; |
|
423 | int n = 0; | |
429 | if (sscanf(*pinst + 5, "%d", &n) != 1) |
|
424 | if (sscanf(*pinst + 5, "%d", &n) != 1) | |
430 | abortmsg("cannot read the exit code"); |
|
425 | abortmsg("cannot read the exit code"); | |
431 | exit(n); |
|
426 | exit(n); | |
432 | } else if (strcmp(*pinst, "reconnect") == 0) { |
|
427 | } else if (strcmp(*pinst, "reconnect") == 0) { | |
433 | needreconnect = 1; |
|
428 | needreconnect = 1; | |
434 | } else { |
|
429 | } else { | |
435 | abortmsg("unknown instruction: %s", *pinst); |
|
430 | abortmsg("unknown instruction: %s", *pinst); | |
436 | } |
|
431 | } | |
437 | } |
|
432 | } | |
438 | return needreconnect; |
|
433 | return needreconnect; | |
439 | } |
|
434 | } | |
440 |
|
435 | |||
441 | /* |
|
436 | /* | |
442 | * Test whether the command and the environment is unsupported or not. |
|
437 | * Test whether the command and the environment is unsupported or not. | |
443 | * |
|
438 | * | |
444 | * If any of the stdio file descriptors are not present (rare, but some tools |
|
439 | * If any of the stdio file descriptors are not present (rare, but some tools | |
445 | * might spawn new processes without stdio instead of redirecting them to the |
|
440 | * might spawn new processes without stdio instead of redirecting them to the | |
446 | * null device), then mark it as not supported because attachio won't work |
|
441 | * null device), then mark it as not supported because attachio won't work | |
447 | * correctly. |
|
442 | * correctly. | |
448 | * |
|
443 | * | |
449 | * The command list is not designed to cover all cases. But it's fast, and does |
|
444 | * The command list is not designed to cover all cases. But it's fast, and does | |
450 | * not depend on the server. |
|
445 | * not depend on the server. | |
451 | */ |
|
446 | */ | |
452 | static int isunsupported(int argc, const char *argv[]) |
|
447 | static int isunsupported(int argc, const char *argv[]) | |
453 | { |
|
448 | { | |
454 | enum { |
|
449 | enum { | |
455 | SERVE = 1, |
|
450 | SERVE = 1, | |
456 | DAEMON = 2, |
|
451 | DAEMON = 2, | |
457 | SERVEDAEMON = SERVE | DAEMON, |
|
452 | SERVEDAEMON = SERVE | DAEMON, | |
458 | }; |
|
453 | }; | |
459 | unsigned int state = 0; |
|
454 | unsigned int state = 0; | |
460 | int i; |
|
455 | int i; | |
461 | /* use fcntl to test missing stdio fds */ |
|
456 | /* use fcntl to test missing stdio fds */ | |
462 | if (fcntl(STDIN_FILENO, F_GETFD) == -1 || |
|
457 | if (fcntl(STDIN_FILENO, F_GETFD) == -1 || | |
463 | fcntl(STDOUT_FILENO, F_GETFD) == -1 || |
|
458 | fcntl(STDOUT_FILENO, F_GETFD) == -1 || | |
464 | fcntl(STDERR_FILENO, F_GETFD) == -1) { |
|
459 | fcntl(STDERR_FILENO, F_GETFD) == -1) { | |
465 | debugmsg("stdio fds are missing"); |
|
460 | debugmsg("stdio fds are missing"); | |
466 | return 1; |
|
461 | return 1; | |
467 | } |
|
462 | } | |
468 | for (i = 0; i < argc; ++i) { |
|
463 | for (i = 0; i < argc; ++i) { | |
469 | if (strcmp(argv[i], "--") == 0) |
|
464 | if (strcmp(argv[i], "--") == 0) | |
470 | break; |
|
465 | break; | |
471 | /* |
|
466 | /* | |
472 | * there can be false positives but no false negative |
|
467 | * there can be false positives but no false negative | |
473 | * we cannot assume `serve` will always be first argument |
|
468 | * we cannot assume `serve` will always be first argument | |
474 | * because global options can be passed before the command name |
|
469 | * because global options can be passed before the command name | |
475 | */ |
|
470 | */ | |
476 | if (strcmp("serve", argv[i]) == 0) |
|
471 | if (strcmp("serve", argv[i]) == 0) | |
477 | state |= SERVE; |
|
472 | state |= SERVE; | |
478 | else if (strcmp("-d", argv[i]) == 0 || |
|
473 | else if (strcmp("-d", argv[i]) == 0 || | |
479 | strcmp("--daemon", argv[i]) == 0) |
|
474 | strcmp("--daemon", argv[i]) == 0) | |
480 | state |= DAEMON; |
|
475 | state |= DAEMON; | |
481 | } |
|
476 | } | |
482 | return (state & SERVEDAEMON) == SERVEDAEMON; |
|
477 | return (state & SERVEDAEMON) == SERVEDAEMON; | |
483 | } |
|
478 | } | |
484 |
|
479 | |||
485 | static void execoriginalhg(const char *argv[]) |
|
480 | static void execoriginalhg(const char *argv[]) | |
486 | { |
|
481 | { | |
487 | debugmsg("execute original hg"); |
|
482 | debugmsg("execute original hg"); | |
488 | if (execvp(gethgcmd(), (char **)argv) < 0) |
|
483 | if (execvp(gethgcmd(), (char **)argv) < 0) | |
489 | abortmsgerrno("failed to exec original hg"); |
|
484 | abortmsgerrno("failed to exec original hg"); | |
490 | } |
|
485 | } | |
491 |
|
486 | |||
492 | int main(int argc, const char *argv[], const char *envp[]) |
|
487 | int main(int argc, const char *argv[], const char *envp[]) | |
493 | { |
|
488 | { | |
494 | if (getenv("CHGDEBUG")) |
|
489 | if (getenv("CHGDEBUG")) | |
495 | enabledebugmsg(); |
|
490 | enabledebugmsg(); | |
496 |
|
491 | |||
497 | if (!getenv("HGPLAIN") && isatty(fileno(stderr))) |
|
492 | if (!getenv("HGPLAIN") && isatty(fileno(stderr))) | |
498 | enablecolor(); |
|
493 | enablecolor(); | |
499 |
|
494 | |||
500 | if (getenv("CHGINTERNALMARK")) |
|
495 | if (getenv("CHGINTERNALMARK")) | |
501 | abortmsg("chg started by chg detected.\n" |
|
496 | abortmsg("chg started by chg detected.\n" | |
502 | "Please make sure ${HG:-hg} is not a symlink or " |
|
497 | "Please make sure ${HG:-hg} is not a symlink or " | |
503 | "wrapper to chg. Alternatively, set $CHGHG to the " |
|
498 | "wrapper to chg. Alternatively, set $CHGHG to the " | |
504 | "path of real hg."); |
|
499 | "path of real hg."); | |
505 |
|
500 | |||
506 | if (isunsupported(argc - 1, argv + 1)) |
|
501 | if (isunsupported(argc - 1, argv + 1)) | |
507 | execoriginalhg(argv); |
|
502 | execoriginalhg(argv); | |
508 |
|
503 | |||
509 | struct cmdserveropts opts; |
|
504 | struct cmdserveropts opts; | |
510 | initcmdserveropts(&opts); |
|
505 | initcmdserveropts(&opts); | |
511 | setcmdserveropts(&opts); |
|
506 | setcmdserveropts(&opts); | |
512 | setcmdserverargs(&opts, argc, argv); |
|
507 | setcmdserverargs(&opts, argc, argv); | |
513 |
|
508 | |||
514 | if (argc == 2) { |
|
509 | if (argc == 2) { | |
515 | if (strcmp(argv[1], "--kill-chg-daemon") == 0) { |
|
510 | if (strcmp(argv[1], "--kill-chg-daemon") == 0) { | |
516 | killcmdserver(&opts); |
|
511 | killcmdserver(&opts); | |
517 | return 0; |
|
512 | return 0; | |
518 | } |
|
513 | } | |
519 | } |
|
514 | } | |
520 |
|
515 | |||
521 | hgclient_t *hgc; |
|
516 | hgclient_t *hgc; | |
522 | size_t retry = 0; |
|
517 | size_t retry = 0; | |
523 | while (1) { |
|
518 | while (1) { | |
524 | hgc = connectcmdserver(&opts); |
|
519 | hgc = connectcmdserver(&opts); | |
525 | if (!hgc) |
|
520 | if (!hgc) | |
526 | abortmsg("cannot open hg client"); |
|
521 | abortmsg("cannot open hg client"); | |
527 | hgc_setenv(hgc, envp); |
|
522 | hgc_setenv(hgc, envp); | |
528 | const char **insts = hgc_validate(hgc, argv + 1, argc - 1); |
|
523 | const char **insts = hgc_validate(hgc, argv + 1, argc - 1); | |
529 | int needreconnect = runinstructions(&opts, insts); |
|
524 | int needreconnect = runinstructions(&opts, insts); | |
530 | free(insts); |
|
525 | free(insts); | |
531 | if (!needreconnect) |
|
526 | if (!needreconnect) | |
532 | break; |
|
527 | break; | |
533 | hgc_close(hgc); |
|
528 | hgc_close(hgc); | |
534 | if (++retry > 10) |
|
529 | if (++retry > 10) | |
535 | abortmsg("too many redirections.\n" |
|
530 | abortmsg("too many redirections.\n" | |
536 | "Please make sure %s is not a wrapper which " |
|
531 | "Please make sure %s is not a wrapper which " | |
537 | "changes sensitive environment variables " |
|
532 | "changes sensitive environment variables " | |
538 | "before executing hg. If you have to use a " |
|
533 | "before executing hg. If you have to use a " | |
539 | "wrapper, wrap chg instead of hg.", |
|
534 | "wrapper, wrap chg instead of hg.", | |
540 | gethgcmd()); |
|
535 | gethgcmd()); | |
541 | } |
|
536 | } | |
542 |
|
537 | |||
543 | setupsignalhandler(hgc_peerpid(hgc), hgc_peerpgid(hgc)); |
|
538 | setupsignalhandler(hgc_peerpid(hgc), hgc_peerpgid(hgc)); | |
544 | atexit(waitpager); |
|
539 | atexit(waitpager); | |
545 | int exitcode = hgc_runcommand(hgc, argv + 1, argc - 1); |
|
540 | int exitcode = hgc_runcommand(hgc, argv + 1, argc - 1); | |
546 | restoresignalhandler(); |
|
541 | restoresignalhandler(); | |
547 | hgc_close(hgc); |
|
542 | hgc_close(hgc); | |
548 | freecmdserveropts(&opts); |
|
543 | freecmdserveropts(&opts); | |
549 |
|
544 | |||
550 | return exitcode; |
|
545 | return exitcode; | |
551 | } |
|
546 | } |
@@ -1,65 +1,69 b'' | |||||
1 | # dirstatenonnormalcheck.py - extension to check the consistency of the |
|
1 | # dirstatenonnormalcheck.py - extension to check the consistency of the | |
2 | # dirstate's non-normal map |
|
2 | # dirstate's non-normal map | |
3 | # |
|
3 | # | |
4 | # For most operations on dirstate, this extensions checks that the nonnormalset |
|
4 | # For most operations on dirstate, this extensions checks that the nonnormalset | |
5 | # contains the right entries. |
|
5 | # contains the right entries. | |
6 | # It compares the nonnormal file to a nonnormalset built from the map of all |
|
6 | # It compares the nonnormal file to a nonnormalset built from the map of all | |
7 | # the files in the dirstate to check that they contain the same files. |
|
7 | # the files in the dirstate to check that they contain the same files. | |
8 |
|
8 | |||
9 | from __future__ import absolute_import |
|
9 | from __future__ import absolute_import | |
10 |
|
10 | |||
11 | from mercurial import ( |
|
11 | from mercurial import ( | |
12 | dirstate, |
|
12 | dirstate, | |
13 | extensions, |
|
13 | extensions, | |
|
14 | pycompat, | |||
14 | ) |
|
15 | ) | |
15 |
|
16 | |||
16 |
|
17 | |||
17 | def nonnormalentries(dmap): |
|
18 | def nonnormalentries(dmap): | |
18 | """Compute nonnormal entries from dirstate's dmap""" |
|
19 | """Compute nonnormal entries from dirstate's dmap""" | |
19 | res = set() |
|
20 | res = set() | |
20 | for f, e in dmap.iteritems(): |
|
21 | for f, e in dmap.iteritems(): | |
21 |
if e |
|
22 | if e.state != b'n' or e.mtime == -1: | |
22 | res.add(f) |
|
23 | res.add(f) | |
23 | return res |
|
24 | return res | |
24 |
|
25 | |||
25 |
|
26 | |||
26 | def checkconsistency(ui, orig, dmap, _nonnormalset, label): |
|
27 | def checkconsistency(ui, orig, dmap, _nonnormalset, label): | |
27 | """Compute nonnormalset from dmap, check that it matches _nonnormalset""" |
|
28 | """Compute nonnormalset from dmap, check that it matches _nonnormalset""" | |
28 | nonnormalcomputedmap = nonnormalentries(dmap) |
|
29 | nonnormalcomputedmap = nonnormalentries(dmap) | |
29 | if _nonnormalset != nonnormalcomputedmap: |
|
30 | if _nonnormalset != nonnormalcomputedmap: | |
30 | ui.develwarn(b"%s call to %s\n" % (label, orig), config=b'dirstate') |
|
31 | b_orig = pycompat.sysbytes(repr(orig)) | |
|
32 | ui.develwarn(b"%s call to %s\n" % (label, b_orig), config=b'dirstate') | |||
31 | ui.develwarn(b"inconsistency in nonnormalset\n", config=b'dirstate') |
|
33 | ui.develwarn(b"inconsistency in nonnormalset\n", config=b'dirstate') | |
32 | ui.develwarn(b"[nonnormalset] %s\n" % _nonnormalset, config=b'dirstate') |
|
34 | b_nonnormal = pycompat.sysbytes(repr(_nonnormalset)) | |
33 |
ui.develwarn(b"[ |
|
35 | ui.develwarn(b"[nonnormalset] %s\n" % b_nonnormal, config=b'dirstate') | |
|
36 | b_nonnormalcomputed = pycompat.sysbytes(repr(nonnormalcomputedmap)) | |||
|
37 | ui.develwarn(b"[map] %s\n" % b_nonnormalcomputed, config=b'dirstate') | |||
34 |
|
38 | |||
35 |
|
39 | |||
36 | def _checkdirstate(orig, self, arg): |
|
40 | def _checkdirstate(orig, self, *args, **kwargs): | |
37 | """Check nonnormal set consistency before and after the call to orig""" |
|
41 | """Check nonnormal set consistency before and after the call to orig""" | |
38 | checkconsistency( |
|
42 | checkconsistency( | |
39 | self._ui, orig, self._map, self._map.nonnormalset, b"before" |
|
43 | self._ui, orig, self._map, self._map.nonnormalset, b"before" | |
40 | ) |
|
44 | ) | |
41 | r = orig(self, arg) |
|
45 | r = orig(self, *args, **kwargs) | |
42 | checkconsistency( |
|
46 | checkconsistency( | |
43 | self._ui, orig, self._map, self._map.nonnormalset, b"after" |
|
47 | self._ui, orig, self._map, self._map.nonnormalset, b"after" | |
44 | ) |
|
48 | ) | |
45 | return r |
|
49 | return r | |
46 |
|
50 | |||
47 |
|
51 | |||
48 | def extsetup(ui): |
|
52 | def extsetup(ui): | |
49 | """Wrap functions modifying dirstate to check nonnormalset consistency""" |
|
53 | """Wrap functions modifying dirstate to check nonnormalset consistency""" | |
50 | dirstatecl = dirstate.dirstate |
|
54 | dirstatecl = dirstate.dirstate | |
51 | devel = ui.configbool(b'devel', b'all-warnings') |
|
55 | devel = ui.configbool(b'devel', b'all-warnings') | |
52 | paranoid = ui.configbool(b'experimental', b'nonnormalparanoidcheck') |
|
56 | paranoid = ui.configbool(b'experimental', b'nonnormalparanoidcheck') | |
53 | if devel: |
|
57 | if devel: | |
54 | extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate) |
|
58 | extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate) | |
55 | if paranoid: |
|
59 | if paranoid: | |
56 | # We don't do all these checks when paranoid is disable as it would |
|
60 | # We don't do all these checks when paranoid is disable as it would | |
57 | # make the extension run very slowly on large repos |
|
61 | # make the extension run very slowly on large repos | |
58 | extensions.wrapfunction(dirstatecl, 'normallookup', _checkdirstate) |
|
62 | extensions.wrapfunction(dirstatecl, 'normallookup', _checkdirstate) | |
59 | extensions.wrapfunction(dirstatecl, 'otherparent', _checkdirstate) |
|
63 | extensions.wrapfunction(dirstatecl, 'otherparent', _checkdirstate) | |
60 | extensions.wrapfunction(dirstatecl, 'normal', _checkdirstate) |
|
64 | extensions.wrapfunction(dirstatecl, 'normal', _checkdirstate) | |
61 | extensions.wrapfunction(dirstatecl, 'write', _checkdirstate) |
|
65 | extensions.wrapfunction(dirstatecl, 'write', _checkdirstate) | |
62 | extensions.wrapfunction(dirstatecl, 'add', _checkdirstate) |
|
66 | extensions.wrapfunction(dirstatecl, 'add', _checkdirstate) | |
63 | extensions.wrapfunction(dirstatecl, 'remove', _checkdirstate) |
|
67 | extensions.wrapfunction(dirstatecl, 'remove', _checkdirstate) | |
64 | extensions.wrapfunction(dirstatecl, 'merge', _checkdirstate) |
|
68 | extensions.wrapfunction(dirstatecl, 'merge', _checkdirstate) | |
65 | extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate) |
|
69 | extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate) |
@@ -1,47 +1,60 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 | # Dump revlogs as raw data stream |
|
2 | # Dump revlogs as raw data stream | |
3 | # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump |
|
3 | # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump | |
4 |
|
4 | |||
5 | from __future__ import absolute_import, print_function |
|
5 | from __future__ import absolute_import, print_function | |
6 |
|
6 | |||
7 | import sys |
|
7 | import sys | |
8 | from mercurial.node import hex |
|
8 | from mercurial.node import hex | |
9 | from mercurial import ( |
|
9 | from mercurial import ( | |
10 | encoding, |
|
10 | encoding, | |
11 | pycompat, |
|
11 | pycompat, | |
12 | revlog, |
|
12 | revlog, | |
13 | ) |
|
13 | ) | |
14 | from mercurial.utils import procutil |
|
14 | from mercurial.utils import procutil | |
15 |
|
15 | |||
|
16 | from mercurial.revlogutils import ( | |||
|
17 | constants as revlog_constants, | |||
|
18 | ) | |||
|
19 | ||||
16 | for fp in (sys.stdin, sys.stdout, sys.stderr): |
|
20 | for fp in (sys.stdin, sys.stdout, sys.stderr): | |
17 | procutil.setbinary(fp) |
|
21 | procutil.setbinary(fp) | |
18 |
|
22 | |||
19 |
|
23 | |||
20 | def binopen(path, mode=b'rb'): |
|
24 | def binopen(path, mode=b'rb'): | |
21 | if b'b' not in mode: |
|
25 | if b'b' not in mode: | |
22 | mode = mode + b'b' |
|
26 | mode = mode + b'b' | |
23 | return open(path, pycompat.sysstr(mode)) |
|
27 | return open(path, pycompat.sysstr(mode)) | |
24 |
|
28 | |||
25 |
|
29 | |||
26 | binopen.options = {} |
|
30 | binopen.options = {} | |
27 |
|
31 | |||
28 |
|
32 | |||
29 | def printb(data, end=b'\n'): |
|
33 | def printb(data, end=b'\n'): | |
30 | sys.stdout.flush() |
|
34 | sys.stdout.flush() | |
31 | procutil.stdout.write(data + end) |
|
35 | procutil.stdout.write(data + end) | |
32 |
|
36 | |||
33 |
|
37 | |||
34 | for f in sys.argv[1:]: |
|
38 | for f in sys.argv[1:]: | |
35 |
|
|
39 | localf = encoding.strtolocal(f) | |
|
40 | if not localf.endswith(b'.i'): | |||
|
41 | print("file:", f, file=sys.stderr) | |||
|
42 | print(" invalid filename", file=sys.stderr) | |||
|
43 | ||||
|
44 | r = revlog.revlog( | |||
|
45 | binopen, | |||
|
46 | target=(revlog_constants.KIND_OTHER, b'dump-revlog'), | |||
|
47 | radix=localf[:-2], | |||
|
48 | ) | |||
36 | print("file:", f) |
|
49 | print("file:", f) | |
37 | for i in r: |
|
50 | for i in r: | |
38 | n = r.node(i) |
|
51 | n = r.node(i) | |
39 | p = r.parents(n) |
|
52 | p = r.parents(n) | |
40 | d = r.revision(n) |
|
53 | d = r.revision(n) | |
41 | printb(b"node: %s" % hex(n)) |
|
54 | printb(b"node: %s" % hex(n)) | |
42 | printb(b"linkrev: %d" % r.linkrev(i)) |
|
55 | printb(b"linkrev: %d" % r.linkrev(i)) | |
43 | printb(b"parents: %s %s" % (hex(p[0]), hex(p[1]))) |
|
56 | printb(b"parents: %s %s" % (hex(p[0]), hex(p[1]))) | |
44 | printb(b"length: %d" % len(d)) |
|
57 | printb(b"length: %d" % len(d)) | |
45 | printb(b"-start-") |
|
58 | printb(b"-start-") | |
46 | printb(d) |
|
59 | printb(d) | |
47 | printb(b"-end-") |
|
60 | printb(b"-end-") |
@@ -1,366 +1,371 b'' | |||||
1 | from __future__ import absolute_import, print_function |
|
1 | from __future__ import absolute_import, print_function | |
2 |
|
2 | |||
3 | import argparse |
|
3 | import argparse | |
|
4 | import os | |||
4 | import struct |
|
5 | import struct | |
5 | import sys |
|
6 | import sys | |
6 | import zipfile |
|
7 | import zipfile | |
7 |
|
8 | |||
|
9 | # Add ../.. to sys.path as an absolute path so we can import hg modules | |||
|
10 | hgloc = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) | |||
|
11 | sys.path[0:0] = [hgloc] | |||
|
12 | ||||
8 | from mercurial import ( |
|
13 | from mercurial import ( | |
9 | hg, |
|
14 | hg, | |
10 | ui as uimod, |
|
15 | ui as uimod, | |
11 | ) |
|
16 | ) | |
12 |
|
17 | |||
13 | ap = argparse.ArgumentParser() |
|
18 | ap = argparse.ArgumentParser() | |
14 | ap.add_argument("out", metavar="some.zip", type=str, nargs=1) |
|
19 | ap.add_argument("out", metavar="some.zip", type=str, nargs=1) | |
15 | args = ap.parse_args() |
|
20 | args = ap.parse_args() | |
16 |
|
21 | |||
17 |
|
22 | |||
18 | if sys.version_info[0] < 3: |
|
23 | if sys.version_info[0] < 3: | |
19 |
|
24 | |||
20 | class py2reprhack(object): |
|
25 | class py2reprhack(object): | |
21 | def __repr__(self): |
|
26 | def __repr__(self): | |
22 | """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__""" |
|
27 | """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__""" | |
23 | return self.__bytes__() |
|
28 | return self.__bytes__() | |
24 |
|
29 | |||
25 |
|
30 | |||
26 | else: |
|
31 | else: | |
27 |
|
32 | |||
28 | class py2reprhack(object): |
|
33 | class py2reprhack(object): | |
29 | """Not needed on py3.""" |
|
34 | """Not needed on py3.""" | |
30 |
|
35 | |||
31 |
|
36 | |||
32 | class deltafrag(py2reprhack): |
|
37 | class deltafrag(py2reprhack): | |
33 | def __init__(self, start, end, data): |
|
38 | def __init__(self, start, end, data): | |
34 | self.start = start |
|
39 | self.start = start | |
35 | self.end = end |
|
40 | self.end = end | |
36 | self.data = data |
|
41 | self.data = data | |
37 |
|
42 | |||
38 | def __bytes__(self): |
|
43 | def __bytes__(self): | |
39 | return ( |
|
44 | return ( | |
40 | struct.pack(">lll", self.start, self.end, len(self.data)) |
|
45 | struct.pack(">lll", self.start, self.end, len(self.data)) | |
41 | + self.data |
|
46 | + self.data | |
42 | ) |
|
47 | ) | |
43 |
|
48 | |||
44 |
|
49 | |||
45 | class delta(py2reprhack): |
|
50 | class delta(py2reprhack): | |
46 | def __init__(self, frags): |
|
51 | def __init__(self, frags): | |
47 | self.frags = frags |
|
52 | self.frags = frags | |
48 |
|
53 | |||
49 | def __bytes__(self): |
|
54 | def __bytes__(self): | |
50 | return b''.join(bytes(f) for f in self.frags) |
|
55 | return b''.join(bytes(f) for f in self.frags) | |
51 |
|
56 | |||
52 |
|
57 | |||
53 | class corpus(py2reprhack): |
|
58 | class corpus(py2reprhack): | |
54 | def __init__(self, base, deltas): |
|
59 | def __init__(self, base, deltas): | |
55 | self.base = base |
|
60 | self.base = base | |
56 | self.deltas = deltas |
|
61 | self.deltas = deltas | |
57 |
|
62 | |||
58 | def __bytes__(self): |
|
63 | def __bytes__(self): | |
59 | deltas = [bytes(d) for d in self.deltas] |
|
64 | deltas = [bytes(d) for d in self.deltas] | |
60 | parts = ( |
|
65 | parts = ( | |
61 | [ |
|
66 | [ | |
62 | struct.pack(">B", len(deltas) + 1), |
|
67 | struct.pack(">B", len(deltas) + 1), | |
63 | struct.pack(">H", len(self.base)), |
|
68 | struct.pack(">H", len(self.base)), | |
64 | ] |
|
69 | ] | |
65 | + [struct.pack(">H", len(d)) for d in deltas] |
|
70 | + [struct.pack(">H", len(d)) for d in deltas] | |
66 | + [self.base] |
|
71 | + [self.base] | |
67 | + deltas |
|
72 | + deltas | |
68 | ) |
|
73 | ) | |
69 | return b''.join(parts) |
|
74 | return b''.join(parts) | |
70 |
|
75 | |||
71 |
|
76 | |||
72 | with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf: |
|
77 | with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf: | |
73 | # Manually constructed entries |
|
78 | # Manually constructed entries | |
74 | zf.writestr( |
|
79 | zf.writestr( | |
75 | "one_delta_applies", |
|
80 | "one_delta_applies", | |
76 | bytes(corpus(b'a', [delta([deltafrag(0, 1, b'b')])])), |
|
81 | bytes(corpus(b'a', [delta([deltafrag(0, 1, b'b')])])), | |
77 | ) |
|
82 | ) | |
78 | zf.writestr( |
|
83 | zf.writestr( | |
79 | "one_delta_starts_late", |
|
84 | "one_delta_starts_late", | |
80 | bytes(corpus(b'a', [delta([deltafrag(3, 1, b'b')])])), |
|
85 | bytes(corpus(b'a', [delta([deltafrag(3, 1, b'b')])])), | |
81 | ) |
|
86 | ) | |
82 | zf.writestr( |
|
87 | zf.writestr( | |
83 | "one_delta_ends_late", |
|
88 | "one_delta_ends_late", | |
84 | bytes(corpus(b'a', [delta([deltafrag(0, 20, b'b')])])), |
|
89 | bytes(corpus(b'a', [delta([deltafrag(0, 20, b'b')])])), | |
85 | ) |
|
90 | ) | |
86 |
|
91 | |||
87 | try: |
|
92 | try: | |
88 | # Generated from repo data |
|
93 | # Generated from repo data | |
89 | r = hg.repository(uimod.ui(), b'../..') |
|
94 | r = hg.repository(uimod.ui(), b'../..') | |
90 | fl = r.file(b'mercurial/manifest.py') |
|
95 | fl = r.file(b'mercurial/manifest.py') | |
91 | rl = getattr(fl, '_revlog', fl) |
|
96 | rl = getattr(fl, '_revlog', fl) | |
92 | bins = rl._chunks(rl._deltachain(10)[0]) |
|
97 | bins = rl._chunks(rl._deltachain(10)[0]) | |
93 | zf.writestr('manifest_py_rev_10', bytes(corpus(bins[0], bins[1:]))) |
|
98 | zf.writestr('manifest_py_rev_10', bytes(corpus(bins[0], bins[1:]))) | |
94 | except: # skip this, so no re-raises |
|
99 | except: # skip this, so no re-raises | |
95 | print('skipping seed file from repo data') |
|
100 | print('skipping seed file from repo data') | |
96 | # Automatically discovered by running the fuzzer |
|
101 | # Automatically discovered by running the fuzzer | |
97 | zf.writestr( |
|
102 | zf.writestr( | |
98 | "mpatch_decode_old_overread", b"\x02\x00\x00\x00\x02\x00\x00\x00" |
|
103 | "mpatch_decode_old_overread", b"\x02\x00\x00\x00\x02\x00\x00\x00" | |
99 | ) |
|
104 | ) | |
100 | # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876 |
|
105 | # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876 | |
101 | zf.writestr( |
|
106 | zf.writestr( | |
102 | "mpatch_ossfuzz_getbe32_ubsan", |
|
107 | "mpatch_ossfuzz_getbe32_ubsan", | |
103 | b"\x02\x00\x00\x00\x0c \xff\xff\xff\xff ", |
|
108 | b"\x02\x00\x00\x00\x0c \xff\xff\xff\xff ", | |
104 | ) |
|
109 | ) | |
105 | zf.writestr( |
|
110 | zf.writestr( | |
106 | "mpatch_apply_over_memcpy", |
|
111 | "mpatch_apply_over_memcpy", | |
107 | b'\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00' |
|
112 | b'\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00' | |
108 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
113 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
109 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00' |
|
114 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00' | |
110 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
115 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
111 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
116 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
112 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
117 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
113 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
118 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
114 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
119 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
115 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
120 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
116 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
121 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
117 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
122 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
118 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
123 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
119 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
124 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
120 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
125 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
121 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
126 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
122 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
127 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
123 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
128 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
124 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
129 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
125 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
130 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
126 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
131 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
127 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
132 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
128 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
133 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
129 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
134 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
130 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
135 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
131 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
136 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
132 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
137 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
133 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
138 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
134 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
139 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
135 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
140 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
136 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
141 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
137 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8c\x00\x00\x00\x00' |
|
142 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8c\x00\x00\x00\x00' | |
138 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
143 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
139 | b'\x00\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00\x00\x00\x00\x00\x00' |
|
144 | b'\x00\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00\x00\x00\x00\x00\x00' | |
140 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
145 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
141 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
146 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
142 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
147 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
143 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
148 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
144 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
149 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
145 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
150 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
146 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
151 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
147 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
152 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
148 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
153 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
149 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
154 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
150 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
155 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
151 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
156 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
152 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
157 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
153 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
158 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
154 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
159 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
155 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
160 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
156 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
161 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
157 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
162 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
158 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
163 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
159 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
164 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
160 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
165 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
161 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
166 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
162 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
167 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
163 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
168 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
164 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
169 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
165 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
170 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
166 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
171 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
167 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
172 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
168 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
173 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
169 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
174 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
170 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
175 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
171 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
176 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
172 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00A\x00\x00\x00\x00' |
|
177 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00A\x00\x00\x00\x00' | |
173 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
178 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
174 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
179 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
175 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
180 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
176 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
181 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
177 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
182 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
178 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
183 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
179 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
184 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
180 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
185 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
181 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
186 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
182 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
187 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
183 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
188 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
184 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
189 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
185 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
190 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
186 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
191 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
187 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
192 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
188 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
193 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
189 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
194 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
190 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
195 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
191 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
196 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
192 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
197 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
193 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
198 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
194 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
199 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
195 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
200 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
196 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
201 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
197 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x18' |
|
202 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x18' | |
198 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
203 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
199 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
204 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
200 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
205 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
201 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
206 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
202 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
207 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
203 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
208 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
204 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
209 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
205 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
210 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
206 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
211 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
207 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
212 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
208 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
213 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
209 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
214 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
210 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
215 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
211 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
216 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
212 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
217 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
213 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
218 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
214 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
219 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
215 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
220 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
216 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
221 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
217 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
222 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
218 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
223 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
219 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
224 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
220 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
225 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
221 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
226 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
222 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
227 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
223 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
228 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
224 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
229 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
225 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
230 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
226 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
231 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
227 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
232 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
228 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
233 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
229 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
234 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
230 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
235 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
231 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
236 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
232 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
237 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
233 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
238 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
234 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
239 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
235 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
240 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
236 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
241 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
237 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
242 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
238 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
243 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
239 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
244 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
240 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
245 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
241 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
246 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
242 | b'\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
247 | b'\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
243 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
248 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
244 | b'\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00\x00\x00\x00\x00' |
|
249 | b'\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00\x00\x00\x00\x00' | |
245 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
250 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
246 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
251 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
247 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
252 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
248 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
253 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
249 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
254 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
250 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
255 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
251 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
256 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
252 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
257 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
253 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
258 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
254 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
259 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
255 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
260 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
256 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
261 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
257 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
262 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
258 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
263 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
259 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
264 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
260 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
265 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
261 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
266 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
262 | b'\x00\x00\x94\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
267 | b'\x00\x00\x94\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
263 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
268 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
264 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
269 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
265 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
270 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
266 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
271 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
267 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
272 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
268 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
273 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
269 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
274 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
270 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
275 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
271 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
276 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
272 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
277 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
273 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
278 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
274 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
279 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
275 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
280 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
276 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
281 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
277 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
282 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
278 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
283 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
279 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
284 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
280 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
285 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
281 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
286 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
282 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
287 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
283 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
288 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
284 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
289 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
285 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
290 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
286 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
291 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
287 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
292 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
288 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
293 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
289 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
294 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
290 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
295 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
291 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
296 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
292 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
297 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
293 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
298 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
294 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
299 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
295 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
300 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
296 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
301 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
297 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
302 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
298 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
303 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
299 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
304 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
300 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
305 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
301 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
306 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
302 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
307 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
303 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
308 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
304 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
309 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
305 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
310 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
306 | b'\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
311 | b'\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
307 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
312 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
308 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00' |
|
313 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00' | |
309 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
314 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
310 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
315 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
311 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
316 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
312 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
317 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
313 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
318 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
314 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
319 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
315 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
320 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
316 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
321 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
317 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
322 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
318 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
323 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
319 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
324 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
320 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
325 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
321 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
326 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
322 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
327 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
323 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
328 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
324 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
329 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
325 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
330 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
326 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
331 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
327 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00' |
|
332 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00' | |
328 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
333 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
329 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
334 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
330 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
335 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
331 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
336 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
332 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
337 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
333 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
338 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
334 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
339 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
335 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
340 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
336 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
341 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
337 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
342 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
338 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
343 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
339 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
344 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
340 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
345 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
341 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
346 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
342 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00' |
|
347 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00' | |
343 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
348 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
344 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
349 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
345 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
350 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
346 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
351 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
347 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\x00\x00' |
|
352 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\x00\x00' | |
348 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
353 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
349 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
354 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
350 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
355 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
351 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
356 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
352 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
357 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
353 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
358 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
354 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
359 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
355 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
360 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
356 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
361 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
357 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
362 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
358 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
363 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
359 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
364 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
360 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
365 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
361 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
366 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
362 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
367 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
363 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00' |
|
368 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00' | |
364 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
369 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' | |
365 | b'\x00\x00\x00\x00', |
|
370 | b'\x00\x00\x00\x00', | |
366 | ) |
|
371 | ) |
@@ -1,141 +1,174 b'' | |||||
1 | stages: |
|
1 | stages: | |
2 | - tests |
|
2 | - tests | |
3 | - phabricator |
|
3 | - phabricator | |
4 |
|
4 | |||
5 | image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG |
|
5 | image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG | |
6 |
|
6 | |||
7 | variables: |
|
7 | variables: | |
8 | PYTHON: python |
|
8 | PYTHON: python | |
9 | TEST_HGMODULEPOLICY: "allow" |
|
9 | TEST_HGMODULEPOLICY: "allow" | |
10 | HG_CI_IMAGE_TAG: "latest" |
|
10 | HG_CI_IMAGE_TAG: "latest" | |
11 | TEST_HGTESTS_ALLOW_NETIO: "0" |
|
11 | TEST_HGTESTS_ALLOW_NETIO: "0" | |
12 |
|
12 | |||
13 | .runtests_template: &runtests |
|
13 | .runtests_template: &runtests | |
14 | stage: tests |
|
14 | stage: tests | |
15 | # The runner made a clone as root. |
|
15 | # The runner made a clone as root. | |
16 | # We make a new clone owned by user used to run the step. |
|
16 | # We make a new clone owned by user used to run the step. | |
17 | before_script: |
|
17 | before_script: | |
18 | - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no |
|
18 | - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no | |
19 | - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` |
|
19 | - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` | |
20 | - cd /tmp/mercurial-ci/ |
|
20 | - cd /tmp/mercurial-ci/ | |
21 | - ls -1 tests/test-check-*.* > /tmp/check-tests.txt |
|
21 | - ls -1 tests/test-check-*.* > /tmp/check-tests.txt | |
22 | - black --version |
|
22 | - black --version | |
23 | - clang-format --version |
|
23 | - clang-format --version | |
24 | script: |
|
24 | script: | |
25 | - echo "python used, $PYTHON" |
|
25 | - echo "python used, $PYTHON" | |
26 | - echo "$RUNTEST_ARGS" |
|
26 | - echo "$RUNTEST_ARGS" | |
27 | - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS |
|
27 | - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS | |
28 |
|
28 | |||
29 | checks-py2: |
|
29 | checks-py2: | |
30 | <<: *runtests |
|
30 | <<: *runtests | |
31 | variables: |
|
31 | variables: | |
32 | RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt" |
|
32 | RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt" | |
33 |
|
33 | |||
34 | checks-py3: |
|
34 | checks-py3: | |
35 | <<: *runtests |
|
35 | <<: *runtests | |
36 | variables: |
|
36 | variables: | |
37 | RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt" |
|
37 | RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt" | |
38 | PYTHON: python3 |
|
38 | PYTHON: python3 | |
39 |
|
39 | |||
40 | rust-cargo-test-py2: &rust_cargo_test |
|
40 | rust-cargo-test-py2: &rust_cargo_test | |
41 | stage: tests |
|
41 | stage: tests | |
42 | script: |
|
42 | script: | |
43 | - echo "python used, $PYTHON" |
|
43 | - echo "python used, $PYTHON" | |
44 | - make rust-tests |
|
44 | - make rust-tests | |
45 |
|
45 | |||
46 | rust-cargo-test-py3: |
|
46 | rust-cargo-test-py3: | |
47 | stage: tests |
|
47 | stage: tests | |
48 | <<: *rust_cargo_test |
|
48 | <<: *rust_cargo_test | |
49 | variables: |
|
49 | variables: | |
50 | PYTHON: python3 |
|
50 | PYTHON: python3 | |
51 |
|
51 | |||
52 | phabricator-refresh: |
|
52 | phabricator-refresh: | |
53 | stage: phabricator |
|
53 | stage: phabricator | |
54 | variables: |
|
54 | variables: | |
55 | DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)" |
|
55 | DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)" | |
56 | STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}" |
|
56 | STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}" | |
57 | script: |
|
57 | script: | |
58 | - | |
|
58 | - | | |
59 | if [ `hg branch` == "stable" ]; then |
|
59 | if [ `hg branch` == "stable" ]; then | |
60 | ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT"; |
|
60 | ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT"; | |
61 | else |
|
61 | else | |
62 | ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT"; |
|
62 | ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT"; | |
63 | fi |
|
63 | fi | |
64 |
|
64 | |||
65 | test-py2: |
|
65 | test-py2: | |
66 | <<: *runtests |
|
66 | <<: *runtests | |
67 | variables: |
|
67 | variables: | |
68 | RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" |
|
68 | RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" | |
69 | TEST_HGMODULEPOLICY: "c" |
|
69 | TEST_HGMODULEPOLICY: "c" | |
70 | TEST_HGTESTS_ALLOW_NETIO: "1" |
|
70 | TEST_HGTESTS_ALLOW_NETIO: "1" | |
71 |
|
71 | |||
72 | test-py3: |
|
72 | test-py3: | |
73 | <<: *runtests |
|
73 | <<: *runtests | |
74 | variables: |
|
74 | variables: | |
75 | RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" |
|
75 | RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" | |
76 | PYTHON: python3 |
|
76 | PYTHON: python3 | |
77 | TEST_HGMODULEPOLICY: "c" |
|
77 | TEST_HGMODULEPOLICY: "c" | |
78 | TEST_HGTESTS_ALLOW_NETIO: "1" |
|
78 | TEST_HGTESTS_ALLOW_NETIO: "1" | |
79 |
|
79 | |||
80 | test-py2-pure: |
|
80 | test-py2-pure: | |
81 | <<: *runtests |
|
81 | <<: *runtests | |
82 | variables: |
|
82 | variables: | |
83 | RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt" |
|
83 | RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt" | |
84 | TEST_HGMODULEPOLICY: "py" |
|
84 | TEST_HGMODULEPOLICY: "py" | |
85 |
|
85 | |||
86 | test-py3-pure: |
|
86 | test-py3-pure: | |
87 | <<: *runtests |
|
87 | <<: *runtests | |
88 | variables: |
|
88 | variables: | |
89 | RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt" |
|
89 | RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt" | |
90 | PYTHON: python3 |
|
90 | PYTHON: python3 | |
91 | TEST_HGMODULEPOLICY: "py" |
|
91 | TEST_HGMODULEPOLICY: "py" | |
92 |
|
92 | |||
93 | test-py2-rust: |
|
93 | test-py2-rust: | |
94 | <<: *runtests |
|
94 | <<: *runtests | |
95 | variables: |
|
95 | variables: | |
96 | HGWITHRUSTEXT: cpython |
|
96 | HGWITHRUSTEXT: cpython | |
97 | RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" |
|
97 | RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" | |
98 | TEST_HGMODULEPOLICY: "rust+c" |
|
98 | TEST_HGMODULEPOLICY: "rust+c" | |
99 |
|
99 | |||
100 | test-py3-rust: |
|
100 | test-py3-rust: | |
101 | <<: *runtests |
|
101 | <<: *runtests | |
102 | variables: |
|
102 | variables: | |
103 | HGWITHRUSTEXT: cpython |
|
103 | HGWITHRUSTEXT: cpython | |
104 | RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" |
|
104 | RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" | |
105 | PYTHON: python3 |
|
105 | PYTHON: python3 | |
106 | TEST_HGMODULEPOLICY: "rust+c" |
|
106 | TEST_HGMODULEPOLICY: "rust+c" | |
107 |
|
107 | |||
108 | test-py3-rhg: |
|
108 | test-py3-rhg: | |
109 | <<: *runtests |
|
109 | <<: *runtests | |
110 | variables: |
|
110 | variables: | |
111 | HGWITHRUSTEXT: cpython |
|
111 | HGWITHRUSTEXT: cpython | |
112 | RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt" |
|
112 | RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt" | |
113 | PYTHON: python3 |
|
113 | PYTHON: python3 | |
114 | TEST_HGMODULEPOLICY: "rust+c" |
|
114 | TEST_HGMODULEPOLICY: "rust+c" | |
115 |
|
115 | |||
116 | test-py2-chg: |
|
116 | test-py2-chg: | |
117 | <<: *runtests |
|
117 | <<: *runtests | |
118 | variables: |
|
118 | variables: | |
119 | RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg" |
|
119 | RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg" | |
120 | TEST_HGMODULEPOLICY: "c" |
|
120 | TEST_HGMODULEPOLICY: "c" | |
121 |
|
121 | |||
122 | test-py3-chg: |
|
122 | test-py3-chg: | |
123 | <<: *runtests |
|
123 | <<: *runtests | |
124 | variables: |
|
124 | variables: | |
125 | PYTHON: python3 |
|
125 | PYTHON: python3 | |
126 | RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg" |
|
126 | RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg" | |
127 | TEST_HGMODULEPOLICY: "c" |
|
127 | TEST_HGMODULEPOLICY: "c" | |
128 |
|
128 | |||
129 | check-pytype-py3: |
|
129 | check-pytype-py3: | |
130 | extends: .runtests_template |
|
130 | extends: .runtests_template | |
131 | when: manual |
|
131 | when: manual | |
132 | before_script: |
|
132 | before_script: | |
133 | - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no |
|
133 | - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no | |
134 | - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` |
|
134 | - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` | |
135 | - cd /tmp/mercurial-ci/ |
|
135 | - cd /tmp/mercurial-ci/ | |
136 | - make local PYTHON=$PYTHON |
|
136 | - make local PYTHON=$PYTHON | |
137 | - $PYTHON -m pip install --user -U pytype==2021.04.15 |
|
137 | - $PYTHON -m pip install --user -U pytype==2021.04.15 | |
138 | variables: |
|
138 | variables: | |
139 | RUNTEST_ARGS: " --allow-slow-tests tests/test-check-pytype.t" |
|
139 | RUNTEST_ARGS: " --allow-slow-tests tests/test-check-pytype.t" | |
140 | PYTHON: python3 |
|
140 | PYTHON: python3 | |
141 | TEST_HGMODULEPOLICY: "c" |
|
141 | TEST_HGMODULEPOLICY: "c" | |
|
142 | ||||
|
143 | # `sh.exe --login` sets a couple of extra environment variables that are defined | |||
|
144 | # in the MinGW shell, but switches CWD to /home/$username. The previous value | |||
|
145 | # is stored in OLDPWD. Of the added variables, MSYSTEM is crucial to running | |||
|
146 | # run-tests.py- it is needed to make run-tests.py generate a `python3` script | |||
|
147 | # that satisfies the various shebang lines and delegates to `py -3`. | |||
|
148 | .window_runtests_template: &windows_runtests | |||
|
149 | stage: tests | |||
|
150 | before_script: | |||
|
151 | # Temporary until this is adjusted in the environment | |||
|
152 | - $Env:TEMP="C:/Temp" | |||
|
153 | - $Env:TMP="C:/Temp" | |||
|
154 | # TODO: find/install cvs, bzr, perforce, gpg, sqlite3 | |||
|
155 | ||||
|
156 | script: | |||
|
157 | - echo "Entering script section" | |||
|
158 | - echo "python used, $Env:PYTHON" | |||
|
159 | - Invoke-Expression "$Env:PYTHON -V" | |||
|
160 | - Invoke-Expression "$Env:PYTHON -m black --version" | |||
|
161 | - echo "$Env:RUNTEST_ARGS" | |||
|
162 | ||||
|
163 | - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS' | |||
|
164 | ||||
|
165 | windows-py3: | |||
|
166 | <<: *windows_runtests | |||
|
167 | when: manual | |||
|
168 | tags: | |||
|
169 | - windows | |||
|
170 | timeout: 2h | |||
|
171 | variables: | |||
|
172 | TEST_HGMODULEPOLICY: "c" | |||
|
173 | RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt" | |||
|
174 | PYTHON: py -3 |
@@ -1,111 +1,118 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 by Intevation GmbH <intevation@intevation.de> |
|
3 | # Copyright 2005-2007 by Intevation GmbH <intevation@intevation.de> | |
4 | # |
|
4 | # | |
5 | # Author(s): |
|
5 | # Author(s): | |
6 | # Thomas Arendsen Hein <thomas@intevation.de> |
|
6 | # Thomas Arendsen Hein <thomas@intevation.de> | |
7 | # |
|
7 | # | |
8 | # This software may be used and distributed according to the terms of the |
|
8 | # This software may be used and distributed according to the terms of the | |
9 | # GNU General Public License version 2 or any later version. |
|
9 | # GNU General Public License version 2 or any later version. | |
10 |
|
10 | |||
11 | """ |
|
11 | """ | |
12 | hg-ssh - a wrapper for ssh access to a limited set of mercurial repos |
|
12 | hg-ssh - a wrapper for ssh access to a limited set of mercurial repos | |
13 |
|
13 | |||
14 | To be used in ~/.ssh/authorized_keys with the "command" option, see sshd(8): |
|
14 | To be used in ~/.ssh/authorized_keys with the "command" option, see sshd(8): | |
15 | command="hg-ssh path/to/repo1 /path/to/repo2 ~/repo3 ~user/repo4" ssh-dss ... |
|
15 | command="hg-ssh path/to/repo1 /path/to/repo2 ~/repo3 ~user/repo4" ssh-dss ... | |
16 | (probably together with these other useful options: |
|
16 | (probably together with these other useful options: | |
17 | no-port-forwarding,no-X11-forwarding,no-agent-forwarding) |
|
17 | no-port-forwarding,no-X11-forwarding,no-agent-forwarding) | |
18 |
|
18 | |||
19 | This allows pull/push over ssh from/to the repositories given as arguments. |
|
19 | This allows pull/push over ssh from/to the repositories given as arguments. | |
20 |
|
20 | |||
21 | If all your repositories are subdirectories of a common directory, you can |
|
21 | If all your repositories are subdirectories of a common directory, you can | |
22 | allow shorter paths with: |
|
22 | allow shorter paths with: | |
23 | command="cd path/to/my/repositories && hg-ssh repo1 subdir/repo2" |
|
23 | command="cd path/to/my/repositories && hg-ssh repo1 subdir/repo2" | |
24 |
|
24 | |||
25 | You can use pattern matching of your normal shell, e.g.: |
|
25 | You can use pattern matching of your normal shell, e.g.: | |
26 | command="cd repos && hg-ssh user/thomas/* projects/{mercurial,foo}" |
|
26 | command="cd repos && hg-ssh user/thomas/* projects/{mercurial,foo}" | |
27 |
|
27 | |||
28 | You can also add a --read-only flag to allow read-only access to a key, e.g.: |
|
28 | You can also add a --read-only flag to allow read-only access to a key, e.g.: | |
29 | command="hg-ssh --read-only repos/*" |
|
29 | command="hg-ssh --read-only repos/*" | |
30 | """ |
|
30 | """ | |
31 | from __future__ import absolute_import |
|
31 | from __future__ import absolute_import | |
32 |
|
32 | |||
33 | import os |
|
33 | import os | |
|
34 | import re | |||
34 | import shlex |
|
35 | import shlex | |
35 | import sys |
|
36 | import sys | |
36 |
|
37 | |||
37 | # enable importing on demand to reduce startup time |
|
38 | # enable importing on demand to reduce startup time | |
38 | import hgdemandimport |
|
39 | import hgdemandimport | |
39 |
|
40 | |||
40 | hgdemandimport.enable() |
|
41 | hgdemandimport.enable() | |
41 |
|
42 | |||
42 | from mercurial import ( |
|
43 | from mercurial import ( | |
43 | dispatch, |
|
44 | dispatch, | |
44 | pycompat, |
|
45 | pycompat, | |
45 | ui as uimod, |
|
46 | ui as uimod, | |
46 | ) |
|
47 | ) | |
47 |
|
48 | |||
48 |
|
49 | |||
49 | def main(): |
|
50 | def main(): | |
50 | # Prevent insertion/deletion of CRs |
|
51 | # Prevent insertion/deletion of CRs | |
51 | dispatch.initstdio() |
|
52 | dispatch.initstdio() | |
52 |
|
53 | |||
53 | cwd = os.getcwd() |
|
54 | cwd = os.getcwd() | |
|
55 | if os.name == 'nt': | |||
|
56 | # os.getcwd() is inconsistent on the capitalization of the drive | |||
|
57 | # letter, so adjust it. see https://bugs.python.org/issue40368 | |||
|
58 | if re.match('^[a-z]:', cwd): | |||
|
59 | cwd = cwd[0:1].upper() + cwd[1:] | |||
|
60 | ||||
54 | readonly = False |
|
61 | readonly = False | |
55 | args = sys.argv[1:] |
|
62 | args = sys.argv[1:] | |
56 | while len(args): |
|
63 | while len(args): | |
57 | if args[0] == '--read-only': |
|
64 | if args[0] == '--read-only': | |
58 | readonly = True |
|
65 | readonly = True | |
59 | args.pop(0) |
|
66 | args.pop(0) | |
60 | else: |
|
67 | else: | |
61 | break |
|
68 | break | |
62 | allowed_paths = [ |
|
69 | allowed_paths = [ | |
63 | os.path.normpath(os.path.join(cwd, os.path.expanduser(path))) |
|
70 | os.path.normpath(os.path.join(cwd, os.path.expanduser(path))) | |
64 | for path in args |
|
71 | for path in args | |
65 | ] |
|
72 | ] | |
66 | orig_cmd = os.getenv('SSH_ORIGINAL_COMMAND', '?') |
|
73 | orig_cmd = os.getenv('SSH_ORIGINAL_COMMAND', '?') | |
67 | try: |
|
74 | try: | |
68 | cmdargv = shlex.split(orig_cmd) |
|
75 | cmdargv = shlex.split(orig_cmd) | |
69 | except ValueError as e: |
|
76 | except ValueError as e: | |
70 | sys.stderr.write('Illegal command "%s": %s\n' % (orig_cmd, e)) |
|
77 | sys.stderr.write('Illegal command "%s": %s\n' % (orig_cmd, e)) | |
71 | sys.exit(255) |
|
78 | sys.exit(255) | |
72 |
|
79 | |||
73 | if cmdargv[:2] == ['hg', '-R'] and cmdargv[3:] == ['serve', '--stdio']: |
|
80 | if cmdargv[:2] == ['hg', '-R'] and cmdargv[3:] == ['serve', '--stdio']: | |
74 | path = cmdargv[2] |
|
81 | path = cmdargv[2] | |
75 | repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path))) |
|
82 | repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path))) | |
76 | if repo in allowed_paths: |
|
83 | if repo in allowed_paths: | |
77 | cmd = [b'-R', pycompat.fsencode(repo), b'serve', b'--stdio'] |
|
84 | cmd = [b'-R', pycompat.fsencode(repo), b'serve', b'--stdio'] | |
78 | req = dispatch.request(cmd) |
|
85 | req = dispatch.request(cmd) | |
79 | if readonly: |
|
86 | if readonly: | |
80 | if not req.ui: |
|
87 | if not req.ui: | |
81 | req.ui = uimod.ui.load() |
|
88 | req.ui = uimod.ui.load() | |
82 | req.ui.setconfig( |
|
89 | req.ui.setconfig( | |
83 | b'hooks', |
|
90 | b'hooks', | |
84 | b'pretxnopen.hg-ssh', |
|
91 | b'pretxnopen.hg-ssh', | |
85 | b'python:__main__.rejectpush', |
|
92 | b'python:__main__.rejectpush', | |
86 | b'hg-ssh', |
|
93 | b'hg-ssh', | |
87 | ) |
|
94 | ) | |
88 | req.ui.setconfig( |
|
95 | req.ui.setconfig( | |
89 | b'hooks', |
|
96 | b'hooks', | |
90 | b'prepushkey.hg-ssh', |
|
97 | b'prepushkey.hg-ssh', | |
91 | b'python:__main__.rejectpush', |
|
98 | b'python:__main__.rejectpush', | |
92 | b'hg-ssh', |
|
99 | b'hg-ssh', | |
93 | ) |
|
100 | ) | |
94 | dispatch.dispatch(req) |
|
101 | dispatch.dispatch(req) | |
95 | else: |
|
102 | else: | |
96 | sys.stderr.write('Illegal repository "%s"\n' % repo) |
|
103 | sys.stderr.write('Illegal repository "%s"\n' % repo) | |
97 | sys.exit(255) |
|
104 | sys.exit(255) | |
98 | else: |
|
105 | else: | |
99 | sys.stderr.write('Illegal command "%s"\n' % orig_cmd) |
|
106 | sys.stderr.write('Illegal command "%s"\n' % orig_cmd) | |
100 | sys.exit(255) |
|
107 | sys.exit(255) | |
101 |
|
108 | |||
102 |
|
109 | |||
103 | def rejectpush(ui, **kwargs): |
|
110 | def rejectpush(ui, **kwargs): | |
104 | ui.warn((b"Permission denied\n")) |
|
111 | ui.warn((b"Permission denied\n")) | |
105 | # mercurial hooks use unix process conventions for hook return values |
|
112 | # mercurial hooks use unix process conventions for hook return values | |
106 | # so a truthy return means failure |
|
113 | # so a truthy return means failure | |
107 | return True |
|
114 | return True | |
108 |
|
115 | |||
109 |
|
116 | |||
110 | if __name__ == '__main__': |
|
117 | if __name__ == '__main__': | |
111 | main() |
|
118 | main() |
@@ -1,821 +1,821 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 |
|
2 | |||
3 | from __future__ import absolute_import, print_function |
|
3 | from __future__ import absolute_import, print_function | |
4 |
|
4 | |||
5 | import ast |
|
5 | import ast | |
6 | import collections |
|
6 | import collections | |
7 | import io |
|
7 | import io | |
8 | import os |
|
8 | import os | |
9 | import sys |
|
9 | import sys | |
10 |
|
10 | |||
11 | # Import a minimal set of stdlib modules needed for list_stdlib_modules() |
|
11 | # Import a minimal set of stdlib modules needed for list_stdlib_modules() | |
12 | # to work when run from a virtualenv. The modules were chosen empirically |
|
12 | # to work when run from a virtualenv. The modules were chosen empirically | |
13 | # so that the return value matches the return value without virtualenv. |
|
13 | # so that the return value matches the return value without virtualenv. | |
14 | if True: # disable lexical sorting checks |
|
14 | if True: # disable lexical sorting checks | |
15 | try: |
|
15 | try: | |
16 | import BaseHTTPServer as basehttpserver |
|
16 | import BaseHTTPServer as basehttpserver | |
17 | except ImportError: |
|
17 | except ImportError: | |
18 | basehttpserver = None |
|
18 | basehttpserver = None | |
19 | import zlib |
|
19 | import zlib | |
20 |
|
20 | |||
21 | import testparseutil |
|
21 | import testparseutil | |
22 |
|
22 | |||
23 | # Whitelist of modules that symbols can be directly imported from. |
|
23 | # Whitelist of modules that symbols can be directly imported from. | |
24 | allowsymbolimports = ( |
|
24 | allowsymbolimports = ( | |
25 | '__future__', |
|
25 | '__future__', | |
26 |
'b |
|
26 | 'breezy', | |
27 | 'hgclient', |
|
27 | 'hgclient', | |
28 | 'mercurial', |
|
28 | 'mercurial', | |
29 | 'mercurial.hgweb.common', |
|
29 | 'mercurial.hgweb.common', | |
30 | 'mercurial.hgweb.request', |
|
30 | 'mercurial.hgweb.request', | |
31 | 'mercurial.i18n', |
|
31 | 'mercurial.i18n', | |
32 | 'mercurial.interfaces', |
|
32 | 'mercurial.interfaces', | |
33 | 'mercurial.node', |
|
33 | 'mercurial.node', | |
34 | 'mercurial.pycompat', |
|
34 | 'mercurial.pycompat', | |
35 | # for revlog to re-export constant to extensions |
|
35 | # for revlog to re-export constant to extensions | |
36 | 'mercurial.revlogutils.constants', |
|
36 | 'mercurial.revlogutils.constants', | |
37 | 'mercurial.revlogutils.flagutil', |
|
37 | 'mercurial.revlogutils.flagutil', | |
38 | # for cffi modules to re-export pure functions |
|
38 | # for cffi modules to re-export pure functions | |
39 | 'mercurial.pure.base85', |
|
39 | 'mercurial.pure.base85', | |
40 | 'mercurial.pure.bdiff', |
|
40 | 'mercurial.pure.bdiff', | |
41 | 'mercurial.pure.mpatch', |
|
41 | 'mercurial.pure.mpatch', | |
42 | 'mercurial.pure.osutil', |
|
42 | 'mercurial.pure.osutil', | |
43 | 'mercurial.pure.parsers', |
|
43 | 'mercurial.pure.parsers', | |
44 | # third-party imports should be directly imported |
|
44 | # third-party imports should be directly imported | |
45 | 'mercurial.thirdparty', |
|
45 | 'mercurial.thirdparty', | |
46 | 'mercurial.thirdparty.attr', |
|
46 | 'mercurial.thirdparty.attr', | |
47 | 'mercurial.thirdparty.zope', |
|
47 | 'mercurial.thirdparty.zope', | |
48 | 'mercurial.thirdparty.zope.interface', |
|
48 | 'mercurial.thirdparty.zope.interface', | |
49 | ) |
|
49 | ) | |
50 |
|
50 | |||
51 | # Whitelist of symbols that can be directly imported. |
|
51 | # Whitelist of symbols that can be directly imported. | |
52 | directsymbols = ('demandimport',) |
|
52 | directsymbols = ('demandimport',) | |
53 |
|
53 | |||
54 | # Modules that must be aliased because they are commonly confused with |
|
54 | # Modules that must be aliased because they are commonly confused with | |
55 | # common variables and can create aliasing and readability issues. |
|
55 | # common variables and can create aliasing and readability issues. | |
56 | requirealias = { |
|
56 | requirealias = { | |
57 | 'ui': 'uimod', |
|
57 | 'ui': 'uimod', | |
58 | } |
|
58 | } | |
59 |
|
59 | |||
60 |
|
60 | |||
61 | def usingabsolute(root): |
|
61 | def usingabsolute(root): | |
62 | """Whether absolute imports are being used.""" |
|
62 | """Whether absolute imports are being used.""" | |
63 | if sys.version_info[0] >= 3: |
|
63 | if sys.version_info[0] >= 3: | |
64 | return True |
|
64 | return True | |
65 |
|
65 | |||
66 | for node in ast.walk(root): |
|
66 | for node in ast.walk(root): | |
67 | if isinstance(node, ast.ImportFrom): |
|
67 | if isinstance(node, ast.ImportFrom): | |
68 | if node.module == '__future__': |
|
68 | if node.module == '__future__': | |
69 | for n in node.names: |
|
69 | for n in node.names: | |
70 | if n.name == 'absolute_import': |
|
70 | if n.name == 'absolute_import': | |
71 | return True |
|
71 | return True | |
72 |
|
72 | |||
73 | return False |
|
73 | return False | |
74 |
|
74 | |||
75 |
|
75 | |||
76 | def walklocal(root): |
|
76 | def walklocal(root): | |
77 | """Recursively yield all descendant nodes but not in a different scope""" |
|
77 | """Recursively yield all descendant nodes but not in a different scope""" | |
78 | todo = collections.deque(ast.iter_child_nodes(root)) |
|
78 | todo = collections.deque(ast.iter_child_nodes(root)) | |
79 | yield root, False |
|
79 | yield root, False | |
80 | while todo: |
|
80 | while todo: | |
81 | node = todo.popleft() |
|
81 | node = todo.popleft() | |
82 | newscope = isinstance(node, ast.FunctionDef) |
|
82 | newscope = isinstance(node, ast.FunctionDef) | |
83 | if not newscope: |
|
83 | if not newscope: | |
84 | todo.extend(ast.iter_child_nodes(node)) |
|
84 | todo.extend(ast.iter_child_nodes(node)) | |
85 | yield node, newscope |
|
85 | yield node, newscope | |
86 |
|
86 | |||
87 |
|
87 | |||
88 | def dotted_name_of_path(path): |
|
88 | def dotted_name_of_path(path): | |
89 | """Given a relative path to a source file, return its dotted module name. |
|
89 | """Given a relative path to a source file, return its dotted module name. | |
90 |
|
90 | |||
91 | >>> dotted_name_of_path('mercurial/error.py') |
|
91 | >>> dotted_name_of_path('mercurial/error.py') | |
92 | 'mercurial.error' |
|
92 | 'mercurial.error' | |
93 | >>> dotted_name_of_path('zlibmodule.so') |
|
93 | >>> dotted_name_of_path('zlibmodule.so') | |
94 | 'zlib' |
|
94 | 'zlib' | |
95 | """ |
|
95 | """ | |
96 | parts = path.replace(os.sep, '/').split('/') |
|
96 | parts = path.replace(os.sep, '/').split('/') | |
97 | parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so |
|
97 | parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so | |
98 | if parts[-1].endswith('module'): |
|
98 | if parts[-1].endswith('module'): | |
99 | parts[-1] = parts[-1][:-6] |
|
99 | parts[-1] = parts[-1][:-6] | |
100 | return '.'.join(parts) |
|
100 | return '.'.join(parts) | |
101 |
|
101 | |||
102 |
|
102 | |||
103 | def fromlocalfunc(modulename, localmods): |
|
103 | def fromlocalfunc(modulename, localmods): | |
104 | """Get a function to examine which locally defined module the |
|
104 | """Get a function to examine which locally defined module the | |
105 | target source imports via a specified name. |
|
105 | target source imports via a specified name. | |
106 |
|
106 | |||
107 | `modulename` is an `dotted_name_of_path()`-ed source file path, |
|
107 | `modulename` is an `dotted_name_of_path()`-ed source file path, | |
108 | which may have `.__init__` at the end of it, of the target source. |
|
108 | which may have `.__init__` at the end of it, of the target source. | |
109 |
|
109 | |||
110 | `localmods` is a set of absolute `dotted_name_of_path()`-ed source file |
|
110 | `localmods` is a set of absolute `dotted_name_of_path()`-ed source file | |
111 | paths of locally defined (= Mercurial specific) modules. |
|
111 | paths of locally defined (= Mercurial specific) modules. | |
112 |
|
112 | |||
113 | This function assumes that module names not existing in |
|
113 | This function assumes that module names not existing in | |
114 | `localmods` are from the Python standard library. |
|
114 | `localmods` are from the Python standard library. | |
115 |
|
115 | |||
116 | This function returns the function, which takes `name` argument, |
|
116 | This function returns the function, which takes `name` argument, | |
117 | and returns `(absname, dottedpath, hassubmod)` tuple if `name` |
|
117 | and returns `(absname, dottedpath, hassubmod)` tuple if `name` | |
118 | matches against locally defined module. Otherwise, it returns |
|
118 | matches against locally defined module. Otherwise, it returns | |
119 | False. |
|
119 | False. | |
120 |
|
120 | |||
121 | It is assumed that `name` doesn't have `.__init__`. |
|
121 | It is assumed that `name` doesn't have `.__init__`. | |
122 |
|
122 | |||
123 | `absname` is an absolute module name of specified `name` |
|
123 | `absname` is an absolute module name of specified `name` | |
124 | (e.g. "hgext.convert"). This can be used to compose prefix for sub |
|
124 | (e.g. "hgext.convert"). This can be used to compose prefix for sub | |
125 | modules or so. |
|
125 | modules or so. | |
126 |
|
126 | |||
127 | `dottedpath` is a `dotted_name_of_path()`-ed source file path |
|
127 | `dottedpath` is a `dotted_name_of_path()`-ed source file path | |
128 | (e.g. "hgext.convert.__init__") of `name`. This is used to look |
|
128 | (e.g. "hgext.convert.__init__") of `name`. This is used to look | |
129 | module up in `localmods` again. |
|
129 | module up in `localmods` again. | |
130 |
|
130 | |||
131 | `hassubmod` is whether it may have sub modules under it (for |
|
131 | `hassubmod` is whether it may have sub modules under it (for | |
132 | convenient, even though this is also equivalent to "absname != |
|
132 | convenient, even though this is also equivalent to "absname != | |
133 | dottednpath") |
|
133 | dottednpath") | |
134 |
|
134 | |||
135 | >>> localmods = {'foo.__init__', 'foo.foo1', |
|
135 | >>> localmods = {'foo.__init__', 'foo.foo1', | |
136 | ... 'foo.bar.__init__', 'foo.bar.bar1', |
|
136 | ... 'foo.bar.__init__', 'foo.bar.bar1', | |
137 | ... 'baz.__init__', 'baz.baz1'} |
|
137 | ... 'baz.__init__', 'baz.baz1'} | |
138 | >>> fromlocal = fromlocalfunc('foo.xxx', localmods) |
|
138 | >>> fromlocal = fromlocalfunc('foo.xxx', localmods) | |
139 | >>> # relative |
|
139 | >>> # relative | |
140 | >>> fromlocal('foo1') |
|
140 | >>> fromlocal('foo1') | |
141 | ('foo.foo1', 'foo.foo1', False) |
|
141 | ('foo.foo1', 'foo.foo1', False) | |
142 | >>> fromlocal('bar') |
|
142 | >>> fromlocal('bar') | |
143 | ('foo.bar', 'foo.bar.__init__', True) |
|
143 | ('foo.bar', 'foo.bar.__init__', True) | |
144 | >>> fromlocal('bar.bar1') |
|
144 | >>> fromlocal('bar.bar1') | |
145 | ('foo.bar.bar1', 'foo.bar.bar1', False) |
|
145 | ('foo.bar.bar1', 'foo.bar.bar1', False) | |
146 | >>> # absolute |
|
146 | >>> # absolute | |
147 | >>> fromlocal('baz') |
|
147 | >>> fromlocal('baz') | |
148 | ('baz', 'baz.__init__', True) |
|
148 | ('baz', 'baz.__init__', True) | |
149 | >>> fromlocal('baz.baz1') |
|
149 | >>> fromlocal('baz.baz1') | |
150 | ('baz.baz1', 'baz.baz1', False) |
|
150 | ('baz.baz1', 'baz.baz1', False) | |
151 | >>> # unknown = maybe standard library |
|
151 | >>> # unknown = maybe standard library | |
152 | >>> fromlocal('os') |
|
152 | >>> fromlocal('os') | |
153 | False |
|
153 | False | |
154 | >>> fromlocal(None, 1) |
|
154 | >>> fromlocal(None, 1) | |
155 | ('foo', 'foo.__init__', True) |
|
155 | ('foo', 'foo.__init__', True) | |
156 | >>> fromlocal('foo1', 1) |
|
156 | >>> fromlocal('foo1', 1) | |
157 | ('foo.foo1', 'foo.foo1', False) |
|
157 | ('foo.foo1', 'foo.foo1', False) | |
158 | >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods) |
|
158 | >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods) | |
159 | >>> fromlocal2(None, 2) |
|
159 | >>> fromlocal2(None, 2) | |
160 | ('foo', 'foo.__init__', True) |
|
160 | ('foo', 'foo.__init__', True) | |
161 | >>> fromlocal2('bar2', 1) |
|
161 | >>> fromlocal2('bar2', 1) | |
162 | False |
|
162 | False | |
163 | >>> fromlocal2('bar', 2) |
|
163 | >>> fromlocal2('bar', 2) | |
164 | ('foo.bar', 'foo.bar.__init__', True) |
|
164 | ('foo.bar', 'foo.bar.__init__', True) | |
165 | """ |
|
165 | """ | |
166 | if not isinstance(modulename, str): |
|
166 | if not isinstance(modulename, str): | |
167 | modulename = modulename.decode('ascii') |
|
167 | modulename = modulename.decode('ascii') | |
168 | prefix = '.'.join(modulename.split('.')[:-1]) |
|
168 | prefix = '.'.join(modulename.split('.')[:-1]) | |
169 | if prefix: |
|
169 | if prefix: | |
170 | prefix += '.' |
|
170 | prefix += '.' | |
171 |
|
171 | |||
172 | def fromlocal(name, level=0): |
|
172 | def fromlocal(name, level=0): | |
173 | # name is false value when relative imports are used. |
|
173 | # name is false value when relative imports are used. | |
174 | if not name: |
|
174 | if not name: | |
175 | # If relative imports are used, level must not be absolute. |
|
175 | # If relative imports are used, level must not be absolute. | |
176 | assert level > 0 |
|
176 | assert level > 0 | |
177 | candidates = ['.'.join(modulename.split('.')[:-level])] |
|
177 | candidates = ['.'.join(modulename.split('.')[:-level])] | |
178 | else: |
|
178 | else: | |
179 | if not level: |
|
179 | if not level: | |
180 | # Check relative name first. |
|
180 | # Check relative name first. | |
181 | candidates = [prefix + name, name] |
|
181 | candidates = [prefix + name, name] | |
182 | else: |
|
182 | else: | |
183 | candidates = [ |
|
183 | candidates = [ | |
184 | '.'.join(modulename.split('.')[:-level]) + '.' + name |
|
184 | '.'.join(modulename.split('.')[:-level]) + '.' + name | |
185 | ] |
|
185 | ] | |
186 |
|
186 | |||
187 | for n in candidates: |
|
187 | for n in candidates: | |
188 | if n in localmods: |
|
188 | if n in localmods: | |
189 | return (n, n, False) |
|
189 | return (n, n, False) | |
190 | dottedpath = n + '.__init__' |
|
190 | dottedpath = n + '.__init__' | |
191 | if dottedpath in localmods: |
|
191 | if dottedpath in localmods: | |
192 | return (n, dottedpath, True) |
|
192 | return (n, dottedpath, True) | |
193 | return False |
|
193 | return False | |
194 |
|
194 | |||
195 | return fromlocal |
|
195 | return fromlocal | |
196 |
|
196 | |||
197 |
|
197 | |||
198 | def populateextmods(localmods): |
|
198 | def populateextmods(localmods): | |
199 | """Populate C extension modules based on pure modules""" |
|
199 | """Populate C extension modules based on pure modules""" | |
200 | newlocalmods = set(localmods) |
|
200 | newlocalmods = set(localmods) | |
201 | for n in localmods: |
|
201 | for n in localmods: | |
202 | if n.startswith('mercurial.pure.'): |
|
202 | if n.startswith('mercurial.pure.'): | |
203 | m = n[len('mercurial.pure.') :] |
|
203 | m = n[len('mercurial.pure.') :] | |
204 | newlocalmods.add('mercurial.cext.' + m) |
|
204 | newlocalmods.add('mercurial.cext.' + m) | |
205 | newlocalmods.add('mercurial.cffi._' + m) |
|
205 | newlocalmods.add('mercurial.cffi._' + m) | |
206 | return newlocalmods |
|
206 | return newlocalmods | |
207 |
|
207 | |||
208 |
|
208 | |||
209 | def list_stdlib_modules(): |
|
209 | def list_stdlib_modules(): | |
210 | """List the modules present in the stdlib. |
|
210 | """List the modules present in the stdlib. | |
211 |
|
211 | |||
212 | >>> py3 = sys.version_info[0] >= 3 |
|
212 | >>> py3 = sys.version_info[0] >= 3 | |
213 | >>> mods = set(list_stdlib_modules()) |
|
213 | >>> mods = set(list_stdlib_modules()) | |
214 | >>> 'BaseHTTPServer' in mods or py3 |
|
214 | >>> 'BaseHTTPServer' in mods or py3 | |
215 | True |
|
215 | True | |
216 |
|
216 | |||
217 | os.path isn't really a module, so it's missing: |
|
217 | os.path isn't really a module, so it's missing: | |
218 |
|
218 | |||
219 | >>> 'os.path' in mods |
|
219 | >>> 'os.path' in mods | |
220 | False |
|
220 | False | |
221 |
|
221 | |||
222 | sys requires special treatment, because it's baked into the |
|
222 | sys requires special treatment, because it's baked into the | |
223 | interpreter, but it should still appear: |
|
223 | interpreter, but it should still appear: | |
224 |
|
224 | |||
225 | >>> 'sys' in mods |
|
225 | >>> 'sys' in mods | |
226 | True |
|
226 | True | |
227 |
|
227 | |||
228 | >>> 'collections' in mods |
|
228 | >>> 'collections' in mods | |
229 | True |
|
229 | True | |
230 |
|
230 | |||
231 | >>> 'cStringIO' in mods or py3 |
|
231 | >>> 'cStringIO' in mods or py3 | |
232 | True |
|
232 | True | |
233 |
|
233 | |||
234 | >>> 'cffi' in mods |
|
234 | >>> 'cffi' in mods | |
235 | True |
|
235 | True | |
236 | """ |
|
236 | """ | |
237 | for m in sys.builtin_module_names: |
|
237 | for m in sys.builtin_module_names: | |
238 | yield m |
|
238 | yield m | |
239 | # These modules only exist on windows, but we should always |
|
239 | # These modules only exist on windows, but we should always | |
240 | # consider them stdlib. |
|
240 | # consider them stdlib. | |
241 | for m in ['msvcrt', '_winreg']: |
|
241 | for m in ['msvcrt', '_winreg']: | |
242 | yield m |
|
242 | yield m | |
243 | yield '__builtin__' |
|
243 | yield '__builtin__' | |
244 | yield 'builtins' # python3 only |
|
244 | yield 'builtins' # python3 only | |
245 | yield 'importlib.abc' # python3 only |
|
245 | yield 'importlib.abc' # python3 only | |
246 | yield 'importlib.machinery' # python3 only |
|
246 | yield 'importlib.machinery' # python3 only | |
247 | yield 'importlib.util' # python3 only |
|
247 | yield 'importlib.util' # python3 only | |
248 | for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only |
|
248 | for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only | |
249 | yield m |
|
249 | yield m | |
250 | for m in 'cPickle', 'datetime': # in Python (not C) on PyPy |
|
250 | for m in 'cPickle', 'datetime': # in Python (not C) on PyPy | |
251 | yield m |
|
251 | yield m | |
252 | for m in ['cffi']: |
|
252 | for m in ['cffi']: | |
253 | yield m |
|
253 | yield m | |
254 | stdlib_prefixes = {sys.prefix, sys.exec_prefix} |
|
254 | stdlib_prefixes = {sys.prefix, sys.exec_prefix} | |
255 | # We need to supplement the list of prefixes for the search to work |
|
255 | # We need to supplement the list of prefixes for the search to work | |
256 | # when run from within a virtualenv. |
|
256 | # when run from within a virtualenv. | |
257 | for mod in (basehttpserver, zlib): |
|
257 | for mod in (basehttpserver, zlib): | |
258 | if mod is None: |
|
258 | if mod is None: | |
259 | continue |
|
259 | continue | |
260 | try: |
|
260 | try: | |
261 | # Not all module objects have a __file__ attribute. |
|
261 | # Not all module objects have a __file__ attribute. | |
262 | filename = mod.__file__ |
|
262 | filename = mod.__file__ | |
263 | except AttributeError: |
|
263 | except AttributeError: | |
264 | continue |
|
264 | continue | |
265 | dirname = os.path.dirname(filename) |
|
265 | dirname = os.path.dirname(filename) | |
266 | for prefix in stdlib_prefixes: |
|
266 | for prefix in stdlib_prefixes: | |
267 | if dirname.startswith(prefix): |
|
267 | if dirname.startswith(prefix): | |
268 | # Then this directory is redundant. |
|
268 | # Then this directory is redundant. | |
269 | break |
|
269 | break | |
270 | else: |
|
270 | else: | |
271 | stdlib_prefixes.add(dirname) |
|
271 | stdlib_prefixes.add(dirname) | |
272 | sourceroot = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) |
|
272 | sourceroot = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) | |
273 | for libpath in sys.path: |
|
273 | for libpath in sys.path: | |
274 | # We want to walk everything in sys.path that starts with something in |
|
274 | # We want to walk everything in sys.path that starts with something in | |
275 | # stdlib_prefixes, but not directories from the hg sources. |
|
275 | # stdlib_prefixes, but not directories from the hg sources. | |
276 | if os.path.abspath(libpath).startswith(sourceroot) or not any( |
|
276 | if os.path.abspath(libpath).startswith(sourceroot) or not any( | |
277 | libpath.startswith(p) for p in stdlib_prefixes |
|
277 | libpath.startswith(p) for p in stdlib_prefixes | |
278 | ): |
|
278 | ): | |
279 | continue |
|
279 | continue | |
280 | for top, dirs, files in os.walk(libpath): |
|
280 | for top, dirs, files in os.walk(libpath): | |
281 | for i, d in reversed(list(enumerate(dirs))): |
|
281 | for i, d in reversed(list(enumerate(dirs))): | |
282 | if ( |
|
282 | if ( | |
283 | not os.path.exists(os.path.join(top, d, '__init__.py')) |
|
283 | not os.path.exists(os.path.join(top, d, '__init__.py')) | |
284 | or top == libpath |
|
284 | or top == libpath | |
285 | and d in ('hgdemandimport', 'hgext', 'mercurial') |
|
285 | and d in ('hgdemandimport', 'hgext', 'mercurial') | |
286 | ): |
|
286 | ): | |
287 | del dirs[i] |
|
287 | del dirs[i] | |
288 | for name in files: |
|
288 | for name in files: | |
289 | if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')): |
|
289 | if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')): | |
290 | continue |
|
290 | continue | |
291 | if name.startswith('__init__.py'): |
|
291 | if name.startswith('__init__.py'): | |
292 | full_path = top |
|
292 | full_path = top | |
293 | else: |
|
293 | else: | |
294 | full_path = os.path.join(top, name) |
|
294 | full_path = os.path.join(top, name) | |
295 | rel_path = full_path[len(libpath) + 1 :] |
|
295 | rel_path = full_path[len(libpath) + 1 :] | |
296 | mod = dotted_name_of_path(rel_path) |
|
296 | mod = dotted_name_of_path(rel_path) | |
297 | yield mod |
|
297 | yield mod | |
298 |
|
298 | |||
299 |
|
299 | |||
300 | stdlib_modules = set(list_stdlib_modules()) |
|
300 | stdlib_modules = set(list_stdlib_modules()) | |
301 |
|
301 | |||
302 |
|
302 | |||
303 | def imported_modules(source, modulename, f, localmods, ignore_nested=False): |
|
303 | def imported_modules(source, modulename, f, localmods, ignore_nested=False): | |
304 | """Given the source of a file as a string, yield the names |
|
304 | """Given the source of a file as a string, yield the names | |
305 | imported by that file. |
|
305 | imported by that file. | |
306 |
|
306 | |||
307 | Args: |
|
307 | Args: | |
308 | source: The python source to examine as a string. |
|
308 | source: The python source to examine as a string. | |
309 | modulename: of specified python source (may have `__init__`) |
|
309 | modulename: of specified python source (may have `__init__`) | |
310 | localmods: set of locally defined module names (may have `__init__`) |
|
310 | localmods: set of locally defined module names (may have `__init__`) | |
311 | ignore_nested: If true, import statements that do not start in |
|
311 | ignore_nested: If true, import statements that do not start in | |
312 | column zero will be ignored. |
|
312 | column zero will be ignored. | |
313 |
|
313 | |||
314 | Returns: |
|
314 | Returns: | |
315 | A list of absolute module names imported by the given source. |
|
315 | A list of absolute module names imported by the given source. | |
316 |
|
316 | |||
317 | >>> f = 'foo/xxx.py' |
|
317 | >>> f = 'foo/xxx.py' | |
318 | >>> modulename = 'foo.xxx' |
|
318 | >>> modulename = 'foo.xxx' | |
319 | >>> localmods = {'foo.__init__': True, |
|
319 | >>> localmods = {'foo.__init__': True, | |
320 | ... 'foo.foo1': True, 'foo.foo2': True, |
|
320 | ... 'foo.foo1': True, 'foo.foo2': True, | |
321 | ... 'foo.bar.__init__': True, 'foo.bar.bar1': True, |
|
321 | ... 'foo.bar.__init__': True, 'foo.bar.bar1': True, | |
322 | ... 'baz.__init__': True, 'baz.baz1': True } |
|
322 | ... 'baz.__init__': True, 'baz.baz1': True } | |
323 | >>> # standard library (= not locally defined ones) |
|
323 | >>> # standard library (= not locally defined ones) | |
324 | >>> sorted(imported_modules( |
|
324 | >>> sorted(imported_modules( | |
325 | ... 'from stdlib1 import foo, bar; import stdlib2', |
|
325 | ... 'from stdlib1 import foo, bar; import stdlib2', | |
326 | ... modulename, f, localmods)) |
|
326 | ... modulename, f, localmods)) | |
327 | [] |
|
327 | [] | |
328 | >>> # relative importing |
|
328 | >>> # relative importing | |
329 | >>> sorted(imported_modules( |
|
329 | >>> sorted(imported_modules( | |
330 | ... 'import foo1; from bar import bar1', |
|
330 | ... 'import foo1; from bar import bar1', | |
331 | ... modulename, f, localmods)) |
|
331 | ... modulename, f, localmods)) | |
332 | ['foo.bar.bar1', 'foo.foo1'] |
|
332 | ['foo.bar.bar1', 'foo.foo1'] | |
333 | >>> sorted(imported_modules( |
|
333 | >>> sorted(imported_modules( | |
334 | ... 'from bar.bar1 import name1, name2, name3', |
|
334 | ... 'from bar.bar1 import name1, name2, name3', | |
335 | ... modulename, f, localmods)) |
|
335 | ... modulename, f, localmods)) | |
336 | ['foo.bar.bar1'] |
|
336 | ['foo.bar.bar1'] | |
337 | >>> # absolute importing |
|
337 | >>> # absolute importing | |
338 | >>> sorted(imported_modules( |
|
338 | >>> sorted(imported_modules( | |
339 | ... 'from baz import baz1, name1', |
|
339 | ... 'from baz import baz1, name1', | |
340 | ... modulename, f, localmods)) |
|
340 | ... modulename, f, localmods)) | |
341 | ['baz.__init__', 'baz.baz1'] |
|
341 | ['baz.__init__', 'baz.baz1'] | |
342 | >>> # mixed importing, even though it shouldn't be recommended |
|
342 | >>> # mixed importing, even though it shouldn't be recommended | |
343 | >>> sorted(imported_modules( |
|
343 | >>> sorted(imported_modules( | |
344 | ... 'import stdlib, foo1, baz', |
|
344 | ... 'import stdlib, foo1, baz', | |
345 | ... modulename, f, localmods)) |
|
345 | ... modulename, f, localmods)) | |
346 | ['baz.__init__', 'foo.foo1'] |
|
346 | ['baz.__init__', 'foo.foo1'] | |
347 | >>> # ignore_nested |
|
347 | >>> # ignore_nested | |
348 | >>> sorted(imported_modules( |
|
348 | >>> sorted(imported_modules( | |
349 | ... '''import foo |
|
349 | ... '''import foo | |
350 | ... def wat(): |
|
350 | ... def wat(): | |
351 | ... import bar |
|
351 | ... import bar | |
352 | ... ''', modulename, f, localmods)) |
|
352 | ... ''', modulename, f, localmods)) | |
353 | ['foo.__init__', 'foo.bar.__init__'] |
|
353 | ['foo.__init__', 'foo.bar.__init__'] | |
354 | >>> sorted(imported_modules( |
|
354 | >>> sorted(imported_modules( | |
355 | ... '''import foo |
|
355 | ... '''import foo | |
356 | ... def wat(): |
|
356 | ... def wat(): | |
357 | ... import bar |
|
357 | ... import bar | |
358 | ... ''', modulename, f, localmods, ignore_nested=True)) |
|
358 | ... ''', modulename, f, localmods, ignore_nested=True)) | |
359 | ['foo.__init__'] |
|
359 | ['foo.__init__'] | |
360 | """ |
|
360 | """ | |
361 | fromlocal = fromlocalfunc(modulename, localmods) |
|
361 | fromlocal = fromlocalfunc(modulename, localmods) | |
362 | for node in ast.walk(ast.parse(source, f)): |
|
362 | for node in ast.walk(ast.parse(source, f)): | |
363 | if ignore_nested and getattr(node, 'col_offset', 0) > 0: |
|
363 | if ignore_nested and getattr(node, 'col_offset', 0) > 0: | |
364 | continue |
|
364 | continue | |
365 | if isinstance(node, ast.Import): |
|
365 | if isinstance(node, ast.Import): | |
366 | for n in node.names: |
|
366 | for n in node.names: | |
367 | found = fromlocal(n.name) |
|
367 | found = fromlocal(n.name) | |
368 | if not found: |
|
368 | if not found: | |
369 | # this should import standard library |
|
369 | # this should import standard library | |
370 | continue |
|
370 | continue | |
371 | yield found[1] |
|
371 | yield found[1] | |
372 | elif isinstance(node, ast.ImportFrom): |
|
372 | elif isinstance(node, ast.ImportFrom): | |
373 | found = fromlocal(node.module, node.level) |
|
373 | found = fromlocal(node.module, node.level) | |
374 | if not found: |
|
374 | if not found: | |
375 | # this should import standard library |
|
375 | # this should import standard library | |
376 | continue |
|
376 | continue | |
377 |
|
377 | |||
378 | absname, dottedpath, hassubmod = found |
|
378 | absname, dottedpath, hassubmod = found | |
379 | if not hassubmod: |
|
379 | if not hassubmod: | |
380 | # "dottedpath" is not a package; must be imported |
|
380 | # "dottedpath" is not a package; must be imported | |
381 | yield dottedpath |
|
381 | yield dottedpath | |
382 | # examination of "node.names" should be redundant |
|
382 | # examination of "node.names" should be redundant | |
383 | # e.g.: from mercurial.node import nullid, nullrev |
|
383 | # e.g.: from mercurial.node import nullid, nullrev | |
384 | continue |
|
384 | continue | |
385 |
|
385 | |||
386 | modnotfound = False |
|
386 | modnotfound = False | |
387 | prefix = absname + '.' |
|
387 | prefix = absname + '.' | |
388 | for n in node.names: |
|
388 | for n in node.names: | |
389 | found = fromlocal(prefix + n.name) |
|
389 | found = fromlocal(prefix + n.name) | |
390 | if not found: |
|
390 | if not found: | |
391 | # this should be a function or a property of "node.module" |
|
391 | # this should be a function or a property of "node.module" | |
392 | modnotfound = True |
|
392 | modnotfound = True | |
393 | continue |
|
393 | continue | |
394 | yield found[1] |
|
394 | yield found[1] | |
395 | if modnotfound and dottedpath != modulename: |
|
395 | if modnotfound and dottedpath != modulename: | |
396 | # "dottedpath" is a package, but imported because of non-module |
|
396 | # "dottedpath" is a package, but imported because of non-module | |
397 | # lookup |
|
397 | # lookup | |
398 | # specifically allow "from . import foo" from __init__.py |
|
398 | # specifically allow "from . import foo" from __init__.py | |
399 | yield dottedpath |
|
399 | yield dottedpath | |
400 |
|
400 | |||
401 |
|
401 | |||
402 | def verify_import_convention(module, source, localmods): |
|
402 | def verify_import_convention(module, source, localmods): | |
403 | """Verify imports match our established coding convention. |
|
403 | """Verify imports match our established coding convention. | |
404 |
|
404 | |||
405 | We have 2 conventions: legacy and modern. The modern convention is in |
|
405 | We have 2 conventions: legacy and modern. The modern convention is in | |
406 | effect when using absolute imports. |
|
406 | effect when using absolute imports. | |
407 |
|
407 | |||
408 | The legacy convention only looks for mixed imports. The modern convention |
|
408 | The legacy convention only looks for mixed imports. The modern convention | |
409 | is much more thorough. |
|
409 | is much more thorough. | |
410 | """ |
|
410 | """ | |
411 | root = ast.parse(source) |
|
411 | root = ast.parse(source) | |
412 | absolute = usingabsolute(root) |
|
412 | absolute = usingabsolute(root) | |
413 |
|
413 | |||
414 | if absolute: |
|
414 | if absolute: | |
415 | return verify_modern_convention(module, root, localmods) |
|
415 | return verify_modern_convention(module, root, localmods) | |
416 | else: |
|
416 | else: | |
417 | return verify_stdlib_on_own_line(root) |
|
417 | return verify_stdlib_on_own_line(root) | |
418 |
|
418 | |||
419 |
|
419 | |||
420 | def verify_modern_convention(module, root, localmods, root_col_offset=0): |
|
420 | def verify_modern_convention(module, root, localmods, root_col_offset=0): | |
421 | """Verify a file conforms to the modern import convention rules. |
|
421 | """Verify a file conforms to the modern import convention rules. | |
422 |
|
422 | |||
423 | The rules of the modern convention are: |
|
423 | The rules of the modern convention are: | |
424 |
|
424 | |||
425 | * Ordering is stdlib followed by local imports. Each group is lexically |
|
425 | * Ordering is stdlib followed by local imports. Each group is lexically | |
426 | sorted. |
|
426 | sorted. | |
427 | * Importing multiple modules via "import X, Y" is not allowed: use |
|
427 | * Importing multiple modules via "import X, Y" is not allowed: use | |
428 | separate import statements. |
|
428 | separate import statements. | |
429 | * Importing multiple modules via "from X import ..." is allowed if using |
|
429 | * Importing multiple modules via "from X import ..." is allowed if using | |
430 | parenthesis and one entry per line. |
|
430 | parenthesis and one entry per line. | |
431 | * Only 1 relative import statement per import level ("from .", "from ..") |
|
431 | * Only 1 relative import statement per import level ("from .", "from ..") | |
432 | is allowed. |
|
432 | is allowed. | |
433 | * Relative imports from higher levels must occur before lower levels. e.g. |
|
433 | * Relative imports from higher levels must occur before lower levels. e.g. | |
434 | "from .." must be before "from .". |
|
434 | "from .." must be before "from .". | |
435 | * Imports from peer packages should use relative import (e.g. do not |
|
435 | * Imports from peer packages should use relative import (e.g. do not | |
436 | "import mercurial.foo" from a "mercurial.*" module). |
|
436 | "import mercurial.foo" from a "mercurial.*" module). | |
437 | * Symbols can only be imported from specific modules (see |
|
437 | * Symbols can only be imported from specific modules (see | |
438 | `allowsymbolimports`). For other modules, first import the module then |
|
438 | `allowsymbolimports`). For other modules, first import the module then | |
439 | assign the symbol to a module-level variable. In addition, these imports |
|
439 | assign the symbol to a module-level variable. In addition, these imports | |
440 | must be performed before other local imports. This rule only |
|
440 | must be performed before other local imports. This rule only | |
441 | applies to import statements outside of any blocks. |
|
441 | applies to import statements outside of any blocks. | |
442 | * Relative imports from the standard library are not allowed, unless that |
|
442 | * Relative imports from the standard library are not allowed, unless that | |
443 | library is also a local module. |
|
443 | library is also a local module. | |
444 | * Certain modules must be aliased to alternate names to avoid aliasing |
|
444 | * Certain modules must be aliased to alternate names to avoid aliasing | |
445 | and readability problems. See `requirealias`. |
|
445 | and readability problems. See `requirealias`. | |
446 | """ |
|
446 | """ | |
447 | if not isinstance(module, str): |
|
447 | if not isinstance(module, str): | |
448 | module = module.decode('ascii') |
|
448 | module = module.decode('ascii') | |
449 | topmodule = module.split('.')[0] |
|
449 | topmodule = module.split('.')[0] | |
450 | fromlocal = fromlocalfunc(module, localmods) |
|
450 | fromlocal = fromlocalfunc(module, localmods) | |
451 |
|
451 | |||
452 | # Whether a local/non-stdlib import has been performed. |
|
452 | # Whether a local/non-stdlib import has been performed. | |
453 | seenlocal = None |
|
453 | seenlocal = None | |
454 | # Whether a local/non-stdlib, non-symbol import has been seen. |
|
454 | # Whether a local/non-stdlib, non-symbol import has been seen. | |
455 | seennonsymbollocal = False |
|
455 | seennonsymbollocal = False | |
456 | # The last name to be imported (for sorting). |
|
456 | # The last name to be imported (for sorting). | |
457 | lastname = None |
|
457 | lastname = None | |
458 | laststdlib = None |
|
458 | laststdlib = None | |
459 | # Relative import levels encountered so far. |
|
459 | # Relative import levels encountered so far. | |
460 | seenlevels = set() |
|
460 | seenlevels = set() | |
461 |
|
461 | |||
462 | for node, newscope in walklocal(root): |
|
462 | for node, newscope in walklocal(root): | |
463 |
|
463 | |||
464 | def msg(fmt, *args): |
|
464 | def msg(fmt, *args): | |
465 | return (fmt % args, node.lineno) |
|
465 | return (fmt % args, node.lineno) | |
466 |
|
466 | |||
467 | if newscope: |
|
467 | if newscope: | |
468 | # Check for local imports in function |
|
468 | # Check for local imports in function | |
469 | for r in verify_modern_convention( |
|
469 | for r in verify_modern_convention( | |
470 | module, node, localmods, node.col_offset + 4 |
|
470 | module, node, localmods, node.col_offset + 4 | |
471 | ): |
|
471 | ): | |
472 | yield r |
|
472 | yield r | |
473 | elif isinstance(node, ast.Import): |
|
473 | elif isinstance(node, ast.Import): | |
474 | # Disallow "import foo, bar" and require separate imports |
|
474 | # Disallow "import foo, bar" and require separate imports | |
475 | # for each module. |
|
475 | # for each module. | |
476 | if len(node.names) > 1: |
|
476 | if len(node.names) > 1: | |
477 | yield msg( |
|
477 | yield msg( | |
478 | 'multiple imported names: %s', |
|
478 | 'multiple imported names: %s', | |
479 | ', '.join(n.name for n in node.names), |
|
479 | ', '.join(n.name for n in node.names), | |
480 | ) |
|
480 | ) | |
481 |
|
481 | |||
482 | name = node.names[0].name |
|
482 | name = node.names[0].name | |
483 | asname = node.names[0].asname |
|
483 | asname = node.names[0].asname | |
484 |
|
484 | |||
485 | stdlib = name in stdlib_modules |
|
485 | stdlib = name in stdlib_modules | |
486 |
|
486 | |||
487 | # Ignore sorting rules on imports inside blocks. |
|
487 | # Ignore sorting rules on imports inside blocks. | |
488 | if node.col_offset == root_col_offset: |
|
488 | if node.col_offset == root_col_offset: | |
489 | if lastname and name < lastname and laststdlib == stdlib: |
|
489 | if lastname and name < lastname and laststdlib == stdlib: | |
490 | yield msg( |
|
490 | yield msg( | |
491 | 'imports not lexically sorted: %s < %s', name, lastname |
|
491 | 'imports not lexically sorted: %s < %s', name, lastname | |
492 | ) |
|
492 | ) | |
493 |
|
493 | |||
494 | lastname = name |
|
494 | lastname = name | |
495 | laststdlib = stdlib |
|
495 | laststdlib = stdlib | |
496 |
|
496 | |||
497 | # stdlib imports should be before local imports. |
|
497 | # stdlib imports should be before local imports. | |
498 | if stdlib and seenlocal and node.col_offset == root_col_offset: |
|
498 | if stdlib and seenlocal and node.col_offset == root_col_offset: | |
499 | yield msg( |
|
499 | yield msg( | |
500 | 'stdlib import "%s" follows local import: %s', |
|
500 | 'stdlib import "%s" follows local import: %s', | |
501 | name, |
|
501 | name, | |
502 | seenlocal, |
|
502 | seenlocal, | |
503 | ) |
|
503 | ) | |
504 |
|
504 | |||
505 | if not stdlib: |
|
505 | if not stdlib: | |
506 | seenlocal = name |
|
506 | seenlocal = name | |
507 |
|
507 | |||
508 | # Import of sibling modules should use relative imports. |
|
508 | # Import of sibling modules should use relative imports. | |
509 | topname = name.split('.')[0] |
|
509 | topname = name.split('.')[0] | |
510 | if topname == topmodule: |
|
510 | if topname == topmodule: | |
511 | yield msg('import should be relative: %s', name) |
|
511 | yield msg('import should be relative: %s', name) | |
512 |
|
512 | |||
513 | if name in requirealias and asname != requirealias[name]: |
|
513 | if name in requirealias and asname != requirealias[name]: | |
514 | yield msg( |
|
514 | yield msg( | |
515 | '%s module must be "as" aliased to %s', |
|
515 | '%s module must be "as" aliased to %s', | |
516 | name, |
|
516 | name, | |
517 | requirealias[name], |
|
517 | requirealias[name], | |
518 | ) |
|
518 | ) | |
519 |
|
519 | |||
520 | elif isinstance(node, ast.ImportFrom): |
|
520 | elif isinstance(node, ast.ImportFrom): | |
521 | # Resolve the full imported module name. |
|
521 | # Resolve the full imported module name. | |
522 | if node.level > 0: |
|
522 | if node.level > 0: | |
523 | fullname = '.'.join(module.split('.')[: -node.level]) |
|
523 | fullname = '.'.join(module.split('.')[: -node.level]) | |
524 | if node.module: |
|
524 | if node.module: | |
525 | fullname += '.%s' % node.module |
|
525 | fullname += '.%s' % node.module | |
526 | else: |
|
526 | else: | |
527 | assert node.module |
|
527 | assert node.module | |
528 | fullname = node.module |
|
528 | fullname = node.module | |
529 |
|
529 | |||
530 | topname = fullname.split('.')[0] |
|
530 | topname = fullname.split('.')[0] | |
531 | if topname == topmodule: |
|
531 | if topname == topmodule: | |
532 | yield msg('import should be relative: %s', fullname) |
|
532 | yield msg('import should be relative: %s', fullname) | |
533 |
|
533 | |||
534 | # __future__ is special since it needs to come first and use |
|
534 | # __future__ is special since it needs to come first and use | |
535 | # symbol import. |
|
535 | # symbol import. | |
536 | if fullname != '__future__': |
|
536 | if fullname != '__future__': | |
537 | if not fullname or ( |
|
537 | if not fullname or ( | |
538 | fullname in stdlib_modules |
|
538 | fullname in stdlib_modules | |
539 | # allow standard 'from typing import ...' style |
|
539 | # allow standard 'from typing import ...' style | |
540 | and fullname.startswith('.') |
|
540 | and fullname.startswith('.') | |
541 | and fullname not in localmods |
|
541 | and fullname not in localmods | |
542 | and fullname + '.__init__' not in localmods |
|
542 | and fullname + '.__init__' not in localmods | |
543 | ): |
|
543 | ): | |
544 | yield msg('relative import of stdlib module') |
|
544 | yield msg('relative import of stdlib module') | |
545 | else: |
|
545 | else: | |
546 | seenlocal = fullname |
|
546 | seenlocal = fullname | |
547 |
|
547 | |||
548 | # Direct symbol import is only allowed from certain modules and |
|
548 | # Direct symbol import is only allowed from certain modules and | |
549 | # must occur before non-symbol imports. |
|
549 | # must occur before non-symbol imports. | |
550 | found = fromlocal(node.module, node.level) |
|
550 | found = fromlocal(node.module, node.level) | |
551 | if found and found[2]: # node.module is a package |
|
551 | if found and found[2]: # node.module is a package | |
552 | prefix = found[0] + '.' |
|
552 | prefix = found[0] + '.' | |
553 | symbols = ( |
|
553 | symbols = ( | |
554 | n.name for n in node.names if not fromlocal(prefix + n.name) |
|
554 | n.name for n in node.names if not fromlocal(prefix + n.name) | |
555 | ) |
|
555 | ) | |
556 | else: |
|
556 | else: | |
557 | symbols = (n.name for n in node.names) |
|
557 | symbols = (n.name for n in node.names) | |
558 | symbols = [sym for sym in symbols if sym not in directsymbols] |
|
558 | symbols = [sym for sym in symbols if sym not in directsymbols] | |
559 | if node.module and node.col_offset == root_col_offset: |
|
559 | if node.module and node.col_offset == root_col_offset: | |
560 | if symbols and fullname not in allowsymbolimports: |
|
560 | if symbols and fullname not in allowsymbolimports: | |
561 | yield msg( |
|
561 | yield msg( | |
562 | 'direct symbol import %s from %s', |
|
562 | 'direct symbol import %s from %s', | |
563 | ', '.join(symbols), |
|
563 | ', '.join(symbols), | |
564 | fullname, |
|
564 | fullname, | |
565 | ) |
|
565 | ) | |
566 |
|
566 | |||
567 | if symbols and seennonsymbollocal: |
|
567 | if symbols and seennonsymbollocal: | |
568 | yield msg( |
|
568 | yield msg( | |
569 | 'symbol import follows non-symbol import: %s', fullname |
|
569 | 'symbol import follows non-symbol import: %s', fullname | |
570 | ) |
|
570 | ) | |
571 | if not symbols and fullname not in stdlib_modules: |
|
571 | if not symbols and fullname not in stdlib_modules: | |
572 | seennonsymbollocal = True |
|
572 | seennonsymbollocal = True | |
573 |
|
573 | |||
574 | if not node.module: |
|
574 | if not node.module: | |
575 | assert node.level |
|
575 | assert node.level | |
576 |
|
576 | |||
577 | # Only allow 1 group per level. |
|
577 | # Only allow 1 group per level. | |
578 | if ( |
|
578 | if ( | |
579 | node.level in seenlevels |
|
579 | node.level in seenlevels | |
580 | and node.col_offset == root_col_offset |
|
580 | and node.col_offset == root_col_offset | |
581 | ): |
|
581 | ): | |
582 | yield msg( |
|
582 | yield msg( | |
583 | 'multiple "from %s import" statements', '.' * node.level |
|
583 | 'multiple "from %s import" statements', '.' * node.level | |
584 | ) |
|
584 | ) | |
585 |
|
585 | |||
586 | # Higher-level groups come before lower-level groups. |
|
586 | # Higher-level groups come before lower-level groups. | |
587 | if any(node.level > l for l in seenlevels): |
|
587 | if any(node.level > l for l in seenlevels): | |
588 | yield msg( |
|
588 | yield msg( | |
589 | 'higher-level import should come first: %s', fullname |
|
589 | 'higher-level import should come first: %s', fullname | |
590 | ) |
|
590 | ) | |
591 |
|
591 | |||
592 | seenlevels.add(node.level) |
|
592 | seenlevels.add(node.level) | |
593 |
|
593 | |||
594 | # Entries in "from .X import ( ... )" lists must be lexically |
|
594 | # Entries in "from .X import ( ... )" lists must be lexically | |
595 | # sorted. |
|
595 | # sorted. | |
596 | lastentryname = None |
|
596 | lastentryname = None | |
597 |
|
597 | |||
598 | for n in node.names: |
|
598 | for n in node.names: | |
599 | if lastentryname and n.name < lastentryname: |
|
599 | if lastentryname and n.name < lastentryname: | |
600 | yield msg( |
|
600 | yield msg( | |
601 | 'imports from %s not lexically sorted: %s < %s', |
|
601 | 'imports from %s not lexically sorted: %s < %s', | |
602 | fullname, |
|
602 | fullname, | |
603 | n.name, |
|
603 | n.name, | |
604 | lastentryname, |
|
604 | lastentryname, | |
605 | ) |
|
605 | ) | |
606 |
|
606 | |||
607 | lastentryname = n.name |
|
607 | lastentryname = n.name | |
608 |
|
608 | |||
609 | if n.name in requirealias and n.asname != requirealias[n.name]: |
|
609 | if n.name in requirealias and n.asname != requirealias[n.name]: | |
610 | yield msg( |
|
610 | yield msg( | |
611 | '%s from %s must be "as" aliased to %s', |
|
611 | '%s from %s must be "as" aliased to %s', | |
612 | n.name, |
|
612 | n.name, | |
613 | fullname, |
|
613 | fullname, | |
614 | requirealias[n.name], |
|
614 | requirealias[n.name], | |
615 | ) |
|
615 | ) | |
616 |
|
616 | |||
617 |
|
617 | |||
618 | def verify_stdlib_on_own_line(root): |
|
618 | def verify_stdlib_on_own_line(root): | |
619 | """Given some python source, verify that stdlib imports are done |
|
619 | """Given some python source, verify that stdlib imports are done | |
620 | in separate statements from relative local module imports. |
|
620 | in separate statements from relative local module imports. | |
621 |
|
621 | |||
622 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo'))) |
|
622 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo'))) | |
623 | [('mixed imports\\n stdlib: sys\\n relative: foo', 1)] |
|
623 | [('mixed imports\\n stdlib: sys\\n relative: foo', 1)] | |
624 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os'))) |
|
624 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os'))) | |
625 | [] |
|
625 | [] | |
626 | >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar'))) |
|
626 | >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar'))) | |
627 | [] |
|
627 | [] | |
628 | """ |
|
628 | """ | |
629 | for node in ast.walk(root): |
|
629 | for node in ast.walk(root): | |
630 | if isinstance(node, ast.Import): |
|
630 | if isinstance(node, ast.Import): | |
631 | from_stdlib = {False: [], True: []} |
|
631 | from_stdlib = {False: [], True: []} | |
632 | for n in node.names: |
|
632 | for n in node.names: | |
633 | from_stdlib[n.name in stdlib_modules].append(n.name) |
|
633 | from_stdlib[n.name in stdlib_modules].append(n.name) | |
634 | if from_stdlib[True] and from_stdlib[False]: |
|
634 | if from_stdlib[True] and from_stdlib[False]: | |
635 | yield ( |
|
635 | yield ( | |
636 | 'mixed imports\n stdlib: %s\n relative: %s' |
|
636 | 'mixed imports\n stdlib: %s\n relative: %s' | |
637 | % ( |
|
637 | % ( | |
638 | ', '.join(sorted(from_stdlib[True])), |
|
638 | ', '.join(sorted(from_stdlib[True])), | |
639 | ', '.join(sorted(from_stdlib[False])), |
|
639 | ', '.join(sorted(from_stdlib[False])), | |
640 | ), |
|
640 | ), | |
641 | node.lineno, |
|
641 | node.lineno, | |
642 | ) |
|
642 | ) | |
643 |
|
643 | |||
644 |
|
644 | |||
645 | class CircularImport(Exception): |
|
645 | class CircularImport(Exception): | |
646 | pass |
|
646 | pass | |
647 |
|
647 | |||
648 |
|
648 | |||
649 | def checkmod(mod, imports): |
|
649 | def checkmod(mod, imports): | |
650 | shortest = {} |
|
650 | shortest = {} | |
651 | visit = [[mod]] |
|
651 | visit = [[mod]] | |
652 | while visit: |
|
652 | while visit: | |
653 | path = visit.pop(0) |
|
653 | path = visit.pop(0) | |
654 | for i in sorted(imports.get(path[-1], [])): |
|
654 | for i in sorted(imports.get(path[-1], [])): | |
655 | if len(path) < shortest.get(i, 1000): |
|
655 | if len(path) < shortest.get(i, 1000): | |
656 | shortest[i] = len(path) |
|
656 | shortest[i] = len(path) | |
657 | if i in path: |
|
657 | if i in path: | |
658 | if i == path[0]: |
|
658 | if i == path[0]: | |
659 | raise CircularImport(path) |
|
659 | raise CircularImport(path) | |
660 | continue |
|
660 | continue | |
661 | visit.append(path + [i]) |
|
661 | visit.append(path + [i]) | |
662 |
|
662 | |||
663 |
|
663 | |||
664 | def rotatecycle(cycle): |
|
664 | def rotatecycle(cycle): | |
665 | """arrange a cycle so that the lexicographically first module listed first |
|
665 | """arrange a cycle so that the lexicographically first module listed first | |
666 |
|
666 | |||
667 | >>> rotatecycle(['foo', 'bar']) |
|
667 | >>> rotatecycle(['foo', 'bar']) | |
668 | ['bar', 'foo', 'bar'] |
|
668 | ['bar', 'foo', 'bar'] | |
669 | """ |
|
669 | """ | |
670 | lowest = min(cycle) |
|
670 | lowest = min(cycle) | |
671 | idx = cycle.index(lowest) |
|
671 | idx = cycle.index(lowest) | |
672 | return cycle[idx:] + cycle[:idx] + [lowest] |
|
672 | return cycle[idx:] + cycle[:idx] + [lowest] | |
673 |
|
673 | |||
674 |
|
674 | |||
675 | def find_cycles(imports): |
|
675 | def find_cycles(imports): | |
676 | """Find cycles in an already-loaded import graph. |
|
676 | """Find cycles in an already-loaded import graph. | |
677 |
|
677 | |||
678 | All module names recorded in `imports` should be absolute one. |
|
678 | All module names recorded in `imports` should be absolute one. | |
679 |
|
679 | |||
680 | >>> from __future__ import print_function |
|
680 | >>> from __future__ import print_function | |
681 | >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'], |
|
681 | >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'], | |
682 | ... 'top.bar': ['top.baz', 'sys'], |
|
682 | ... 'top.bar': ['top.baz', 'sys'], | |
683 | ... 'top.baz': ['top.foo'], |
|
683 | ... 'top.baz': ['top.foo'], | |
684 | ... 'top.qux': ['top.foo']} |
|
684 | ... 'top.qux': ['top.foo']} | |
685 | >>> print('\\n'.join(sorted(find_cycles(imports)))) |
|
685 | >>> print('\\n'.join(sorted(find_cycles(imports)))) | |
686 | top.bar -> top.baz -> top.foo -> top.bar |
|
686 | top.bar -> top.baz -> top.foo -> top.bar | |
687 | top.foo -> top.qux -> top.foo |
|
687 | top.foo -> top.qux -> top.foo | |
688 | """ |
|
688 | """ | |
689 | cycles = set() |
|
689 | cycles = set() | |
690 | for mod in sorted(imports.keys()): |
|
690 | for mod in sorted(imports.keys()): | |
691 | try: |
|
691 | try: | |
692 | checkmod(mod, imports) |
|
692 | checkmod(mod, imports) | |
693 | except CircularImport as e: |
|
693 | except CircularImport as e: | |
694 | cycle = e.args[0] |
|
694 | cycle = e.args[0] | |
695 | cycles.add(" -> ".join(rotatecycle(cycle))) |
|
695 | cycles.add(" -> ".join(rotatecycle(cycle))) | |
696 | return cycles |
|
696 | return cycles | |
697 |
|
697 | |||
698 |
|
698 | |||
699 | def _cycle_sortkey(c): |
|
699 | def _cycle_sortkey(c): | |
700 | return len(c), c |
|
700 | return len(c), c | |
701 |
|
701 | |||
702 |
|
702 | |||
703 | def embedded(f, modname, src): |
|
703 | def embedded(f, modname, src): | |
704 | """Extract embedded python code |
|
704 | """Extract embedded python code | |
705 |
|
705 | |||
706 | >>> def _forcestr(thing): |
|
706 | >>> def _forcestr(thing): | |
707 | ... if not isinstance(thing, str): |
|
707 | ... if not isinstance(thing, str): | |
708 | ... return thing.decode('ascii') |
|
708 | ... return thing.decode('ascii') | |
709 | ... return thing |
|
709 | ... return thing | |
710 | >>> def test(fn, lines): |
|
710 | >>> def test(fn, lines): | |
711 | ... for s, m, f, l in embedded(fn, b"example", lines): |
|
711 | ... for s, m, f, l in embedded(fn, b"example", lines): | |
712 | ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l)) |
|
712 | ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l)) | |
713 | ... print(repr(_forcestr(s))) |
|
713 | ... print(repr(_forcestr(s))) | |
714 | >>> lines = [ |
|
714 | >>> lines = [ | |
715 | ... 'comment', |
|
715 | ... 'comment', | |
716 | ... ' >>> from __future__ import print_function', |
|
716 | ... ' >>> from __future__ import print_function', | |
717 | ... " >>> ' multiline", |
|
717 | ... " >>> ' multiline", | |
718 | ... " ... string'", |
|
718 | ... " ... string'", | |
719 | ... ' ', |
|
719 | ... ' ', | |
720 | ... 'comment', |
|
720 | ... 'comment', | |
721 | ... ' $ cat > foo.py <<EOF', |
|
721 | ... ' $ cat > foo.py <<EOF', | |
722 | ... ' > from __future__ import print_function', |
|
722 | ... ' > from __future__ import print_function', | |
723 | ... ' > EOF', |
|
723 | ... ' > EOF', | |
724 | ... ] |
|
724 | ... ] | |
725 | >>> test(b"example.t", lines) |
|
725 | >>> test(b"example.t", lines) | |
726 | example[2] doctest.py 1 |
|
726 | example[2] doctest.py 1 | |
727 | "from __future__ import print_function\\n' multiline\\nstring'\\n\\n" |
|
727 | "from __future__ import print_function\\n' multiline\\nstring'\\n\\n" | |
728 | example[8] foo.py 7 |
|
728 | example[8] foo.py 7 | |
729 | 'from __future__ import print_function\\n' |
|
729 | 'from __future__ import print_function\\n' | |
730 | """ |
|
730 | """ | |
731 | errors = [] |
|
731 | errors = [] | |
732 | for name, starts, ends, code in testparseutil.pyembedded(f, src, errors): |
|
732 | for name, starts, ends, code in testparseutil.pyembedded(f, src, errors): | |
733 | if not name: |
|
733 | if not name: | |
734 | # use 'doctest.py', in order to make already existing |
|
734 | # use 'doctest.py', in order to make already existing | |
735 | # doctest above pass instantly |
|
735 | # doctest above pass instantly | |
736 | name = 'doctest.py' |
|
736 | name = 'doctest.py' | |
737 | # "starts" is "line number" (1-origin), but embedded() is |
|
737 | # "starts" is "line number" (1-origin), but embedded() is | |
738 | # expected to return "line offset" (0-origin). Therefore, this |
|
738 | # expected to return "line offset" (0-origin). Therefore, this | |
739 | # yields "starts - 1". |
|
739 | # yields "starts - 1". | |
740 | if not isinstance(modname, str): |
|
740 | if not isinstance(modname, str): | |
741 | modname = modname.decode('utf8') |
|
741 | modname = modname.decode('utf8') | |
742 | yield code, "%s[%d]" % (modname, starts), name, starts - 1 |
|
742 | yield code, "%s[%d]" % (modname, starts), name, starts - 1 | |
743 |
|
743 | |||
744 |
|
744 | |||
745 | def sources(f, modname): |
|
745 | def sources(f, modname): | |
746 | """Yields possibly multiple sources from a filepath |
|
746 | """Yields possibly multiple sources from a filepath | |
747 |
|
747 | |||
748 | input: filepath, modulename |
|
748 | input: filepath, modulename | |
749 | yields: script(string), modulename, filepath, linenumber |
|
749 | yields: script(string), modulename, filepath, linenumber | |
750 |
|
750 | |||
751 | For embedded scripts, the modulename and filepath will be different |
|
751 | For embedded scripts, the modulename and filepath will be different | |
752 | from the function arguments. linenumber is an offset relative to |
|
752 | from the function arguments. linenumber is an offset relative to | |
753 | the input file. |
|
753 | the input file. | |
754 | """ |
|
754 | """ | |
755 | py = False |
|
755 | py = False | |
756 | if not f.endswith('.t'): |
|
756 | if not f.endswith('.t'): | |
757 | with open(f, 'rb') as src: |
|
757 | with open(f, 'rb') as src: | |
758 | yield src.read(), modname, f, 0 |
|
758 | yield src.read(), modname, f, 0 | |
759 | py = True |
|
759 | py = True | |
760 | if py or f.endswith('.t'): |
|
760 | if py or f.endswith('.t'): | |
761 | # Strictly speaking we should sniff for the magic header that denotes |
|
761 | # Strictly speaking we should sniff for the magic header that denotes | |
762 | # Python source file encoding. But in reality we don't use anything |
|
762 | # Python source file encoding. But in reality we don't use anything | |
763 | # other than ASCII (mainly) and UTF-8 (in a few exceptions), so |
|
763 | # other than ASCII (mainly) and UTF-8 (in a few exceptions), so | |
764 | # simplicity is fine. |
|
764 | # simplicity is fine. | |
765 | with io.open(f, 'r', encoding='utf-8') as src: |
|
765 | with io.open(f, 'r', encoding='utf-8') as src: | |
766 | for script, modname, t, line in embedded(f, modname, src): |
|
766 | for script, modname, t, line in embedded(f, modname, src): | |
767 | yield script, modname.encode('utf8'), t, line |
|
767 | yield script, modname.encode('utf8'), t, line | |
768 |
|
768 | |||
769 |
|
769 | |||
770 | def main(argv): |
|
770 | def main(argv): | |
771 | if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2): |
|
771 | if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2): | |
772 | print('Usage: %s {-|file [file] [file] ...}') |
|
772 | print('Usage: %s {-|file [file] [file] ...}') | |
773 | return 1 |
|
773 | return 1 | |
774 | if argv[1] == '-': |
|
774 | if argv[1] == '-': | |
775 | argv = argv[:1] |
|
775 | argv = argv[:1] | |
776 | argv.extend(l.rstrip() for l in sys.stdin.readlines()) |
|
776 | argv.extend(l.rstrip() for l in sys.stdin.readlines()) | |
777 | localmodpaths = {} |
|
777 | localmodpaths = {} | |
778 | used_imports = {} |
|
778 | used_imports = {} | |
779 | any_errors = False |
|
779 | any_errors = False | |
780 | for source_path in argv[1:]: |
|
780 | for source_path in argv[1:]: | |
781 | modname = dotted_name_of_path(source_path) |
|
781 | modname = dotted_name_of_path(source_path) | |
782 | localmodpaths[modname] = source_path |
|
782 | localmodpaths[modname] = source_path | |
783 | localmods = populateextmods(localmodpaths) |
|
783 | localmods = populateextmods(localmodpaths) | |
784 | for localmodname, source_path in sorted(localmodpaths.items()): |
|
784 | for localmodname, source_path in sorted(localmodpaths.items()): | |
785 | if not isinstance(localmodname, bytes): |
|
785 | if not isinstance(localmodname, bytes): | |
786 | # This is only safe because all hg's files are ascii |
|
786 | # This is only safe because all hg's files are ascii | |
787 | localmodname = localmodname.encode('ascii') |
|
787 | localmodname = localmodname.encode('ascii') | |
788 | for src, modname, name, line in sources(source_path, localmodname): |
|
788 | for src, modname, name, line in sources(source_path, localmodname): | |
789 | try: |
|
789 | try: | |
790 | used_imports[modname] = sorted( |
|
790 | used_imports[modname] = sorted( | |
791 | imported_modules( |
|
791 | imported_modules( | |
792 | src, modname, name, localmods, ignore_nested=True |
|
792 | src, modname, name, localmods, ignore_nested=True | |
793 | ) |
|
793 | ) | |
794 | ) |
|
794 | ) | |
795 | for error, lineno in verify_import_convention( |
|
795 | for error, lineno in verify_import_convention( | |
796 | modname, src, localmods |
|
796 | modname, src, localmods | |
797 | ): |
|
797 | ): | |
798 | any_errors = True |
|
798 | any_errors = True | |
799 | print('%s:%d: %s' % (source_path, lineno + line, error)) |
|
799 | print('%s:%d: %s' % (source_path, lineno + line, error)) | |
800 | except SyntaxError as e: |
|
800 | except SyntaxError as e: | |
801 | print( |
|
801 | print( | |
802 | '%s:%d: SyntaxError: %s' % (source_path, e.lineno + line, e) |
|
802 | '%s:%d: SyntaxError: %s' % (source_path, e.lineno + line, e) | |
803 | ) |
|
803 | ) | |
804 | cycles = find_cycles(used_imports) |
|
804 | cycles = find_cycles(used_imports) | |
805 | if cycles: |
|
805 | if cycles: | |
806 | firstmods = set() |
|
806 | firstmods = set() | |
807 | for c in sorted(cycles, key=_cycle_sortkey): |
|
807 | for c in sorted(cycles, key=_cycle_sortkey): | |
808 | first = c.split()[0] |
|
808 | first = c.split()[0] | |
809 | # As a rough cut, ignore any cycle that starts with the |
|
809 | # As a rough cut, ignore any cycle that starts with the | |
810 | # same module as some other cycle. Otherwise we see lots |
|
810 | # same module as some other cycle. Otherwise we see lots | |
811 | # of cycles that are effectively duplicates. |
|
811 | # of cycles that are effectively duplicates. | |
812 | if first in firstmods: |
|
812 | if first in firstmods: | |
813 | continue |
|
813 | continue | |
814 | print('Import cycle:', c) |
|
814 | print('Import cycle:', c) | |
815 | firstmods.add(first) |
|
815 | firstmods.add(first) | |
816 | any_errors = True |
|
816 | any_errors = True | |
817 | return any_errors != 0 |
|
817 | return any_errors != 0 | |
818 |
|
818 | |||
819 |
|
819 | |||
820 | if __name__ == '__main__': |
|
820 | if __name__ == '__main__': | |
821 | sys.exit(int(main(sys.argv))) |
|
821 | sys.exit(int(main(sys.argv))) |
@@ -1,211 +1,215 b'' | |||||
1 | # install-dependencies.ps1 - Install Windows dependencies for building Mercurial |
|
1 | # install-dependencies.ps1 - Install Windows dependencies for building Mercurial | |
2 | # |
|
2 | # | |
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | # This script can be used to bootstrap a Mercurial build environment on |
|
8 | # This script can be used to bootstrap a Mercurial build environment on | |
9 | # Windows. |
|
9 | # Windows. | |
10 | # |
|
10 | # | |
11 | # The script makes a lot of assumptions about how things should work. |
|
11 | # The script makes a lot of assumptions about how things should work. | |
12 | # For example, the install location of Python is hardcoded to c:\hgdev\*. |
|
12 | # For example, the install location of Python is hardcoded to c:\hgdev\*. | |
13 | # |
|
13 | # | |
14 | # The script should be executed from a PowerShell with elevated privileges |
|
14 | # The script should be executed from a PowerShell with elevated privileges | |
15 | # if you don't want to see a UAC prompt for various installers. |
|
15 | # if you don't want to see a UAC prompt for various installers. | |
16 | # |
|
16 | # | |
17 | # The script is tested on Windows 10 and Windows Server 2019 (in EC2). |
|
17 | # The script is tested on Windows 10 and Windows Server 2019 (in EC2). | |
18 |
|
18 | |||
19 | $VS_BUILD_TOOLS_URL = "https://download.visualstudio.microsoft.com/download/pr/a1603c02-8a66-4b83-b821-811e3610a7c4/aa2db8bb39e0cbd23e9940d8951e0bc3/vs_buildtools.exe" |
|
19 | $VS_BUILD_TOOLS_URL = "https://download.visualstudio.microsoft.com/download/pr/a1603c02-8a66-4b83-b821-811e3610a7c4/aa2db8bb39e0cbd23e9940d8951e0bc3/vs_buildtools.exe" | |
20 | $VS_BUILD_TOOLS_SHA256 = "911E292B8E6E5F46CBC17003BDCD2D27A70E616E8D5E6E69D5D489A605CAA139" |
|
20 | $VS_BUILD_TOOLS_SHA256 = "911E292B8E6E5F46CBC17003BDCD2D27A70E616E8D5E6E69D5D489A605CAA139" | |
21 |
|
21 | |||
22 | $VC9_PYTHON_URL = "https://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi" |
|
22 | $VC9_PYTHON_URL = "https://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi" | |
23 | $VC9_PYTHON_SHA256 = "070474db76a2e625513a5835df4595df9324d820f9cc97eab2a596dcbc2f5cbf" |
|
23 | $VC9_PYTHON_SHA256 = "070474db76a2e625513a5835df4595df9324d820f9cc97eab2a596dcbc2f5cbf" | |
24 |
|
24 | |||
25 | $PYTHON27_x64_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.amd64.msi" |
|
25 | $PYTHON27_x64_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.amd64.msi" | |
26 | $PYTHON27_x64_SHA256 = "b74a3afa1e0bf2a6fc566a7b70d15c9bfabba3756fb077797d16fffa27800c05" |
|
26 | $PYTHON27_x64_SHA256 = "b74a3afa1e0bf2a6fc566a7b70d15c9bfabba3756fb077797d16fffa27800c05" | |
27 | $PYTHON27_X86_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.msi" |
|
27 | $PYTHON27_X86_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.msi" | |
28 | $PYTHON27_X86_SHA256 = "d901802e90026e9bad76b8a81f8dd7e43c7d7e8269d9281c9e9df7a9c40480a9" |
|
28 | $PYTHON27_X86_SHA256 = "d901802e90026e9bad76b8a81f8dd7e43c7d7e8269d9281c9e9df7a9c40480a9" | |
29 |
|
29 | |||
30 | $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9.exe" |
|
30 | $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9.exe" | |
31 | $PYTHON37_x86_SHA256 = "769bb7c74ad1df6d7d74071cc16a984ff6182e4016e11b8949b93db487977220" |
|
31 | $PYTHON37_x86_SHA256 = "769bb7c74ad1df6d7d74071cc16a984ff6182e4016e11b8949b93db487977220" | |
32 | $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9-amd64.exe" |
|
32 | $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9-amd64.exe" | |
33 | $PYTHON37_x64_SHA256 = "e69ed52afb5a722e5c56f6c21d594e85c17cb29f12f18bb69751cf1714e0f987" |
|
33 | $PYTHON37_x64_SHA256 = "e69ed52afb5a722e5c56f6c21d594e85c17cb29f12f18bb69751cf1714e0f987" | |
34 |
|
34 | |||
35 |
$PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8. |
|
35 | $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10.exe" | |
36 | $PYTHON38_x86_SHA256 = "287d5df01ff22ff09e6a487ae018603ee19eade71d462ec703850c96f1d5e8a0" |
|
36 | $PYTHON38_x86_SHA256 = "ad07633a1f0cd795f3bf9da33729f662281df196b4567fa795829f3bb38a30ac" | |
37 |
$PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8. |
|
37 | $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10-amd64.exe" | |
38 | $PYTHON38_x64_SHA256 = "328a257f189cb500606bb26ab0fbdd298ed0e05d8c36540a322a1744f489a0a0" |
|
38 | $PYTHON38_x64_SHA256 = "7628244cb53408b50639d2c1287c659f4e29d3dfdb9084b11aed5870c0c6a48a" | |
39 |
|
39 | |||
40 |
$PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9. |
|
40 | $PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.5/python-3.9.5.exe" | |
41 | $PYTHON39_x86_SHA256 = "a4c65917f4225d1543959342f0615c813a4e9e7ff1137c4394ff6a5290ac1913" |
|
41 | $PYTHON39_x86_SHA256 = "505129081a839b699a6ab9064b441ad922ef03767b5dd4241fd0c2166baf64de" | |
42 |
$PYTHON39_x64_URL = "https://www.python.org/ftp/python/3.9. |
|
42 | $PYTHON39_x64_URL = "https://www.python.org/ftp/python/3.9.5/python-3.9.5-amd64.exe" | |
43 | $PYTHON39_x64_SHA256 = "fd2e2c6612d43bb6b213b72fc53f07d73d99059fa72c96e44bde12e7815073ae" |
|
43 | $PYTHON39_x64_SHA256 = "84d5243088ba00c11e51905c704dbe041040dfff044f4e1ce5476844ee2e6eac" | |
44 |
|
44 | |||
45 | # PIP 19.2.3. |
|
45 | # PIP 19.2.3. | |
46 | $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py" |
|
46 | $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py" | |
47 | $PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe" |
|
47 | $PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe" | |
48 |
|
48 | |||
49 | $VIRTUALENV_URL = "https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/virtualenv-16.7.5.tar.gz" |
|
49 | $VIRTUALENV_URL = "https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/virtualenv-16.7.5.tar.gz" | |
50 | $VIRTUALENV_SHA256 = "f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2" |
|
50 | $VIRTUALENV_SHA256 = "f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2" | |
51 |
|
51 | |||
52 | $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe" |
|
52 | $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe" | |
53 | $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538" |
|
53 | $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538" | |
54 |
|
54 | |||
55 | $MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip" |
|
55 | $MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip" | |
56 | $MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF" |
|
56 | $MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF" | |
57 |
|
57 | |||
58 | $MERCURIAL_WHEEL_FILENAME = "mercurial-5.1.2-cp27-cp27m-win_amd64.whl" |
|
58 | $MERCURIAL_WHEEL_FILENAME = "mercurial-5.1.2-cp27-cp27m-win_amd64.whl" | |
59 | $MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/6d/47/e031e47f7fe9b16e4e3383da47e2b0a7eae6e603996bc67a03ec4fa1b3f4/$MERCURIAL_WHEEL_FILENAME" |
|
59 | $MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/6d/47/e031e47f7fe9b16e4e3383da47e2b0a7eae6e603996bc67a03ec4fa1b3f4/$MERCURIAL_WHEEL_FILENAME" | |
60 | $MERCURIAL_WHEEL_SHA256 = "1d18c7f6ca1456f0f62ee65c9a50c14cbba48ce6e924930cdb10537f5c9eaf5f" |
|
60 | $MERCURIAL_WHEEL_SHA256 = "1d18c7f6ca1456f0f62ee65c9a50c14cbba48ce6e924930cdb10537f5c9eaf5f" | |
61 |
|
61 | |||
62 | $RUSTUP_INIT_URL = "https://static.rust-lang.org/rustup/archive/1.21.1/x86_64-pc-windows-gnu/rustup-init.exe" |
|
62 | $RUSTUP_INIT_URL = "https://static.rust-lang.org/rustup/archive/1.21.1/x86_64-pc-windows-gnu/rustup-init.exe" | |
63 | $RUSTUP_INIT_SHA256 = "d17df34ba974b9b19cf5c75883a95475aa22ddc364591d75d174090d55711c72" |
|
63 | $RUSTUP_INIT_SHA256 = "d17df34ba974b9b19cf5c75883a95475aa22ddc364591d75d174090d55711c72" | |
64 |
|
64 | |||
|
65 | $PYOXIDIZER_URL = "https://github.com/indygreg/PyOxidizer/releases/download/pyoxidizer%2F0.16.0/PyOxidizer-0.16.0-x64.msi" | |||
|
66 | $PYOXIDIZER_SHA256 = "2a9c58add9161c272c418d5e6dec13fbe648f624b5d26770190357e4d664f24e" | |||
|
67 | ||||
65 | # Writing progress slows down downloads substantially. So disable it. |
|
68 | # Writing progress slows down downloads substantially. So disable it. | |
66 | $progressPreference = 'silentlyContinue' |
|
69 | $progressPreference = 'silentlyContinue' | |
67 |
|
70 | |||
68 | function Secure-Download($url, $path, $sha256) { |
|
71 | function Secure-Download($url, $path, $sha256) { | |
69 | if (Test-Path -Path $path) { |
|
72 | if (Test-Path -Path $path) { | |
70 | Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash |
|
73 | Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash | |
71 |
|
74 | |||
72 | if ($hash.Hash -eq $sha256) { |
|
75 | if ($hash.Hash -eq $sha256) { | |
73 | Write-Output "SHA256 of $path verified as $sha256" |
|
76 | Write-Output "SHA256 of $path verified as $sha256" | |
74 | return |
|
77 | return | |
75 | } |
|
78 | } | |
76 |
|
79 | |||
77 | Write-Output "hash mismatch on $path; downloading again" |
|
80 | Write-Output "hash mismatch on $path; downloading again" | |
78 | } |
|
81 | } | |
79 |
|
82 | |||
80 | Write-Output "downloading $url to $path" |
|
83 | Write-Output "downloading $url to $path" | |
81 | Invoke-WebRequest -Uri $url -OutFile $path |
|
84 | Invoke-WebRequest -Uri $url -OutFile $path | |
82 | Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash |
|
85 | Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash | |
83 |
|
86 | |||
84 | if ($hash.Hash -ne $sha256) { |
|
87 | if ($hash.Hash -ne $sha256) { | |
85 | Remove-Item -Path $path |
|
88 | Remove-Item -Path $path | |
86 | throw "hash mismatch when downloading $url; got $($hash.Hash), expected $sha256" |
|
89 | throw "hash mismatch when downloading $url; got $($hash.Hash), expected $sha256" | |
87 | } |
|
90 | } | |
88 | } |
|
91 | } | |
89 |
|
92 | |||
90 | function Invoke-Process($path, $arguments) { |
|
93 | function Invoke-Process($path, $arguments) { | |
91 | $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden |
|
94 | $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden | |
92 |
|
95 | |||
93 | if ($p.ExitCode -ne 0) { |
|
96 | if ($p.ExitCode -ne 0) { | |
94 | throw "process exited non-0: $($p.ExitCode)" |
|
97 | throw "process exited non-0: $($p.ExitCode)" | |
95 | } |
|
98 | } | |
96 | } |
|
99 | } | |
97 |
|
100 | |||
98 | function Install-Python3($name, $installer, $dest, $pip) { |
|
101 | function Install-Python3($name, $installer, $dest, $pip) { | |
99 | Write-Output "installing $name" |
|
102 | Write-Output "installing $name" | |
100 |
|
103 | |||
101 | # We hit this when running the script as part of Simple Systems Manager in |
|
104 | # We hit this when running the script as part of Simple Systems Manager in | |
102 | # EC2. The Python 3 installer doesn't seem to like per-user installs |
|
105 | # EC2. The Python 3 installer doesn't seem to like per-user installs | |
103 | # when running as the SYSTEM user. So enable global installs if executed in |
|
106 | # when running as the SYSTEM user. So enable global installs if executed in | |
104 | # this mode. |
|
107 | # this mode. | |
105 | if ($env:USERPROFILE -eq "C:\Windows\system32\config\systemprofile") { |
|
108 | if ($env:USERPROFILE -eq "C:\Windows\system32\config\systemprofile") { | |
106 | Write-Output "running with SYSTEM account; installing for all users" |
|
109 | Write-Output "running with SYSTEM account; installing for all users" | |
107 | $allusers = "1" |
|
110 | $allusers = "1" | |
108 | } |
|
111 | } | |
109 | else { |
|
112 | else { | |
110 | $allusers = "0" |
|
113 | $allusers = "0" | |
111 | } |
|
114 | } | |
112 |
|
115 | |||
113 | Invoke-Process $installer "/quiet TargetDir=${dest} InstallAllUsers=${allusers} AssociateFiles=0 CompileAll=0 PrependPath=0 Include_doc=0 Include_launcher=0 InstallLauncherAllUsers=0 Include_pip=0 Include_test=0" |
|
116 | Invoke-Process $installer "/quiet TargetDir=${dest} InstallAllUsers=${allusers} AssociateFiles=0 CompileAll=0 PrependPath=0 Include_doc=0 Include_launcher=0 InstallLauncherAllUsers=0 Include_pip=0 Include_test=0" | |
114 | Invoke-Process ${dest}\python.exe $pip |
|
117 | Invoke-Process ${dest}\python.exe $pip | |
115 | } |
|
118 | } | |
116 |
|
119 | |||
117 | function Install-Rust($prefix) { |
|
120 | function Install-Rust($prefix) { | |
118 | Write-Output "installing Rust" |
|
121 | Write-Output "installing Rust" | |
119 | $Env:RUSTUP_HOME = "${prefix}\rustup" |
|
122 | $Env:RUSTUP_HOME = "${prefix}\rustup" | |
120 | $Env:CARGO_HOME = "${prefix}\cargo" |
|
123 | $Env:CARGO_HOME = "${prefix}\cargo" | |
121 |
|
124 | |||
122 | Invoke-Process "${prefix}\assets\rustup-init.exe" "-y --default-host x86_64-pc-windows-msvc" |
|
125 | Invoke-Process "${prefix}\assets\rustup-init.exe" "-y --default-host x86_64-pc-windows-msvc" | |
123 | Invoke-Process "${prefix}\cargo\bin\rustup.exe" "target add i686-pc-windows-msvc" |
|
126 | Invoke-Process "${prefix}\cargo\bin\rustup.exe" "target add i686-pc-windows-msvc" | |
124 |
Invoke-Process "${prefix}\cargo\bin\rustup.exe" "install 1. |
|
127 | Invoke-Process "${prefix}\cargo\bin\rustup.exe" "install 1.52.0" | |
125 | Invoke-Process "${prefix}\cargo\bin\rustup.exe" "component add clippy" |
|
128 | Invoke-Process "${prefix}\cargo\bin\rustup.exe" "component add clippy" | |
126 |
|
||||
127 | # Install PyOxidizer for packaging. |
|
|||
128 | Invoke-Process "${prefix}\cargo\bin\cargo.exe" "install --version 0.10.3 pyoxidizer" |
|
|||
129 | } |
|
129 | } | |
130 |
|
130 | |||
131 | function Install-Dependencies($prefix) { |
|
131 | function Install-Dependencies($prefix) { | |
132 | if (!(Test-Path -Path $prefix\assets)) { |
|
132 | if (!(Test-Path -Path $prefix\assets)) { | |
133 | New-Item -Path $prefix\assets -ItemType Directory |
|
133 | New-Item -Path $prefix\assets -ItemType Directory | |
134 | } |
|
134 | } | |
135 |
|
135 | |||
136 | $pip = "${prefix}\assets\get-pip.py" |
|
136 | $pip = "${prefix}\assets\get-pip.py" | |
137 |
|
137 | |||
138 | Secure-Download $VC9_PYTHON_URL ${prefix}\assets\VCForPython27.msi $VC9_PYTHON_SHA256 |
|
138 | Secure-Download $VC9_PYTHON_URL ${prefix}\assets\VCForPython27.msi $VC9_PYTHON_SHA256 | |
139 | Secure-Download $PYTHON27_x86_URL ${prefix}\assets\python27-x86.msi $PYTHON27_x86_SHA256 |
|
139 | Secure-Download $PYTHON27_x86_URL ${prefix}\assets\python27-x86.msi $PYTHON27_x86_SHA256 | |
140 | Secure-Download $PYTHON27_x64_URL ${prefix}\assets\python27-x64.msi $PYTHON27_x64_SHA256 |
|
140 | Secure-Download $PYTHON27_x64_URL ${prefix}\assets\python27-x64.msi $PYTHON27_x64_SHA256 | |
141 | Secure-Download $PYTHON37_x86_URL ${prefix}\assets\python37-x86.exe $PYTHON37_x86_SHA256 |
|
141 | Secure-Download $PYTHON37_x86_URL ${prefix}\assets\python37-x86.exe $PYTHON37_x86_SHA256 | |
142 | Secure-Download $PYTHON37_x64_URL ${prefix}\assets\python37-x64.exe $PYTHON37_x64_SHA256 |
|
142 | Secure-Download $PYTHON37_x64_URL ${prefix}\assets\python37-x64.exe $PYTHON37_x64_SHA256 | |
143 | Secure-Download $PYTHON38_x86_URL ${prefix}\assets\python38-x86.exe $PYTHON38_x86_SHA256 |
|
143 | Secure-Download $PYTHON38_x86_URL ${prefix}\assets\python38-x86.exe $PYTHON38_x86_SHA256 | |
144 | Secure-Download $PYTHON38_x64_URL ${prefix}\assets\python38-x64.exe $PYTHON38_x64_SHA256 |
|
144 | Secure-Download $PYTHON38_x64_URL ${prefix}\assets\python38-x64.exe $PYTHON38_x64_SHA256 | |
145 | Secure-Download $PYTHON39_x86_URL ${prefix}\assets\python39-x86.exe $PYTHON39_x86_SHA256 |
|
145 | Secure-Download $PYTHON39_x86_URL ${prefix}\assets\python39-x86.exe $PYTHON39_x86_SHA256 | |
146 | Secure-Download $PYTHON39_x64_URL ${prefix}\assets\python39-x64.exe $PYTHON39_x64_SHA256 |
|
146 | Secure-Download $PYTHON39_x64_URL ${prefix}\assets\python39-x64.exe $PYTHON39_x64_SHA256 | |
147 | Secure-Download $PIP_URL ${pip} $PIP_SHA256 |
|
147 | Secure-Download $PIP_URL ${pip} $PIP_SHA256 | |
148 | Secure-Download $VIRTUALENV_URL ${prefix}\assets\virtualenv.tar.gz $VIRTUALENV_SHA256 |
|
148 | Secure-Download $VIRTUALENV_URL ${prefix}\assets\virtualenv.tar.gz $VIRTUALENV_SHA256 | |
149 | Secure-Download $VS_BUILD_TOOLS_URL ${prefix}\assets\vs_buildtools.exe $VS_BUILD_TOOLS_SHA256 |
|
149 | Secure-Download $VS_BUILD_TOOLS_URL ${prefix}\assets\vs_buildtools.exe $VS_BUILD_TOOLS_SHA256 | |
150 | Secure-Download $INNO_SETUP_URL ${prefix}\assets\InnoSetup.exe $INNO_SETUP_SHA256 |
|
150 | Secure-Download $INNO_SETUP_URL ${prefix}\assets\InnoSetup.exe $INNO_SETUP_SHA256 | |
151 | Secure-Download $MINGW_BIN_URL ${prefix}\assets\mingw-get-bin.zip $MINGW_BIN_SHA256 |
|
151 | Secure-Download $MINGW_BIN_URL ${prefix}\assets\mingw-get-bin.zip $MINGW_BIN_SHA256 | |
152 | Secure-Download $MERCURIAL_WHEEL_URL ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME} $MERCURIAL_WHEEL_SHA256 |
|
152 | Secure-Download $MERCURIAL_WHEEL_URL ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME} $MERCURIAL_WHEEL_SHA256 | |
153 | Secure-Download $RUSTUP_INIT_URL ${prefix}\assets\rustup-init.exe $RUSTUP_INIT_SHA256 |
|
153 | Secure-Download $RUSTUP_INIT_URL ${prefix}\assets\rustup-init.exe $RUSTUP_INIT_SHA256 | |
|
154 | Secure-Download $PYOXIDIZER_URL ${prefix}\assets\PyOxidizer.msi $PYOXIDIZER_SHA256 | |||
154 |
|
155 | |||
155 | Write-Output "installing Python 2.7 32-bit" |
|
156 | Write-Output "installing Python 2.7 32-bit" | |
156 | Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x86.msi /l* ${prefix}\assets\python27-x86.log /q TARGETDIR=${prefix}\python27-x86 ALLUSERS=" |
|
157 | Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x86.msi /l* ${prefix}\assets\python27-x86.log /q TARGETDIR=${prefix}\python27-x86 ALLUSERS=" | |
157 | Invoke-Process ${prefix}\python27-x86\python.exe ${prefix}\assets\get-pip.py |
|
158 | Invoke-Process ${prefix}\python27-x86\python.exe ${prefix}\assets\get-pip.py | |
158 | Invoke-Process ${prefix}\python27-x86\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz" |
|
159 | Invoke-Process ${prefix}\python27-x86\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz" | |
159 |
|
160 | |||
160 | Write-Output "installing Python 2.7 64-bit" |
|
161 | Write-Output "installing Python 2.7 64-bit" | |
161 | Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x64.msi /l* ${prefix}\assets\python27-x64.log /q TARGETDIR=${prefix}\python27-x64 ALLUSERS=" |
|
162 | Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x64.msi /l* ${prefix}\assets\python27-x64.log /q TARGETDIR=${prefix}\python27-x64 ALLUSERS=" | |
162 | Invoke-Process ${prefix}\python27-x64\python.exe ${prefix}\assets\get-pip.py |
|
163 | Invoke-Process ${prefix}\python27-x64\python.exe ${prefix}\assets\get-pip.py | |
163 | Invoke-Process ${prefix}\python27-x64\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz" |
|
164 | Invoke-Process ${prefix}\python27-x64\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz" | |
164 |
|
165 | |||
165 | Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip} |
|
166 | Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip} | |
166 | Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip} |
|
167 | Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip} | |
167 | Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip} |
|
168 | Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip} | |
168 | Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip} |
|
169 | Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip} | |
169 | Install-Python3 "Python 3.9 32-bit" ${prefix}\assets\python39-x86.exe ${prefix}\python39-x86 ${pip} |
|
170 | Install-Python3 "Python 3.9 32-bit" ${prefix}\assets\python39-x86.exe ${prefix}\python39-x86 ${pip} | |
170 | Install-Python3 "Python 3.9 64-bit" ${prefix}\assets\python39-x64.exe ${prefix}\python39-x64 ${pip} |
|
171 | Install-Python3 "Python 3.9 64-bit" ${prefix}\assets\python39-x64.exe ${prefix}\python39-x64 ${pip} | |
171 |
|
172 | |||
172 | Write-Output "installing Visual Studio 2017 Build Tools and SDKs" |
|
173 | Write-Output "installing Visual Studio 2017 Build Tools and SDKs" | |
173 | Invoke-Process ${prefix}\assets\vs_buildtools.exe "--quiet --wait --norestart --nocache --channelUri https://aka.ms/vs/15/release/channel --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK --add Microsoft.VisualStudio.Component.VC.140" |
|
174 | Invoke-Process ${prefix}\assets\vs_buildtools.exe "--quiet --wait --norestart --nocache --channelUri https://aka.ms/vs/15/release/channel --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK --add Microsoft.VisualStudio.Component.VC.140" | |
174 |
|
175 | |||
|
176 | Write-Output "installing PyOxidizer" | |||
|
177 | Invoke-Process msiexec.exe "/i ${prefix}\assets\PyOxidizer.msi /l* ${prefix}\assets\PyOxidizer.log /quiet" | |||
|
178 | ||||
175 | Install-Rust ${prefix} |
|
179 | Install-Rust ${prefix} | |
176 |
|
180 | |||
177 | Write-Output "installing Visual C++ 9.0 for Python 2.7" |
|
181 | Write-Output "installing Visual C++ 9.0 for Python 2.7" | |
178 | Invoke-Process msiexec.exe "/i ${prefix}\assets\VCForPython27.msi /l* ${prefix}\assets\VCForPython27.log /q" |
|
182 | Invoke-Process msiexec.exe "/i ${prefix}\assets\VCForPython27.msi /l* ${prefix}\assets\VCForPython27.log /q" | |
179 |
|
183 | |||
180 | Write-Output "installing Inno Setup" |
|
184 | Write-Output "installing Inno Setup" | |
181 | Invoke-Process ${prefix}\assets\InnoSetup.exe "/SP- /VERYSILENT /SUPPRESSMSGBOXES" |
|
185 | Invoke-Process ${prefix}\assets\InnoSetup.exe "/SP- /VERYSILENT /SUPPRESSMSGBOXES" | |
182 |
|
186 | |||
183 | Write-Output "extracting MinGW base archive" |
|
187 | Write-Output "extracting MinGW base archive" | |
184 | Expand-Archive -Path ${prefix}\assets\mingw-get-bin.zip -DestinationPath "${prefix}\MinGW" -Force |
|
188 | Expand-Archive -Path ${prefix}\assets\mingw-get-bin.zip -DestinationPath "${prefix}\MinGW" -Force | |
185 |
|
189 | |||
186 | Write-Output "updating MinGW package catalogs" |
|
190 | Write-Output "updating MinGW package catalogs" | |
187 | Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "update" |
|
191 | Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "update" | |
188 |
|
192 | |||
189 | Write-Output "installing MinGW packages" |
|
193 | Write-Output "installing MinGW packages" | |
190 | Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "install msys-base msys-coreutils msys-diffutils msys-unzip" |
|
194 | Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "install msys-base msys-coreutils msys-diffutils msys-unzip" | |
191 |
|
195 | |||
192 | # Construct a virtualenv useful for bootstrapping. It conveniently contains a |
|
196 | # Construct a virtualenv useful for bootstrapping. It conveniently contains a | |
193 | # Mercurial install. |
|
197 | # Mercurial install. | |
194 | Write-Output "creating bootstrap virtualenv with Mercurial" |
|
198 | Write-Output "creating bootstrap virtualenv with Mercurial" | |
195 | Invoke-Process "$prefix\python27-x64\Scripts\virtualenv.exe" "${prefix}\venv-bootstrap" |
|
199 | Invoke-Process "$prefix\python27-x64\Scripts\virtualenv.exe" "${prefix}\venv-bootstrap" | |
196 | Invoke-Process "${prefix}\venv-bootstrap\Scripts\pip.exe" "install ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME}" |
|
200 | Invoke-Process "${prefix}\venv-bootstrap\Scripts\pip.exe" "install ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME}" | |
197 | } |
|
201 | } | |
198 |
|
202 | |||
199 | function Clone-Mercurial-Repo($prefix, $repo_url, $dest) { |
|
203 | function Clone-Mercurial-Repo($prefix, $repo_url, $dest) { | |
200 | Write-Output "cloning $repo_url to $dest" |
|
204 | Write-Output "cloning $repo_url to $dest" | |
201 | # TODO Figure out why CA verification isn't working in EC2 and remove |
|
205 | # TODO Figure out why CA verification isn't working in EC2 and remove | |
202 | # --insecure. |
|
206 | # --insecure. | |
203 | Invoke-Process "${prefix}\venv-bootstrap\Scripts\hg.exe" "clone --insecure $repo_url $dest" |
|
207 | Invoke-Process "${prefix}\venv-bootstrap\Scripts\hg.exe" "clone --insecure $repo_url $dest" | |
204 |
|
208 | |||
205 | # Mark repo as non-publishing by default for convenience. |
|
209 | # Mark repo as non-publishing by default for convenience. | |
206 | Add-Content -Path "$dest\.hg\hgrc" -Value "`n[phases]`npublish = false" |
|
210 | Add-Content -Path "$dest\.hg\hgrc" -Value "`n[phases]`npublish = false" | |
207 | } |
|
211 | } | |
208 |
|
212 | |||
209 | $prefix = "c:\hgdev" |
|
213 | $prefix = "c:\hgdev" | |
210 | Install-Dependencies $prefix |
|
214 | Install-Dependencies $prefix | |
211 | Clone-Mercurial-Repo $prefix "https://www.mercurial-scm.org/repo/hg" $prefix\src |
|
215 | Clone-Mercurial-Repo $prefix "https://www.mercurial-scm.org/repo/hg" $prefix\src |
@@ -1,187 +1,194 b'' | |||||
1 | # cli.py - Command line interface for automation |
|
1 | # cli.py - Command line interface for automation | |
2 | # |
|
2 | # | |
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | # no-check-code because Python 3 native. |
|
8 | # no-check-code because Python 3 native. | |
9 |
|
9 | |||
10 | import argparse |
|
10 | import argparse | |
11 | import os |
|
11 | import os | |
12 | import pathlib |
|
12 | import pathlib | |
13 |
|
13 | |||
14 | from . import ( |
|
14 | from . import ( | |
15 | inno, |
|
15 | inno, | |
16 | wix, |
|
16 | wix, | |
17 | ) |
|
17 | ) | |
18 |
|
18 | |||
19 | HERE = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) |
|
19 | HERE = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) | |
20 | SOURCE_DIR = HERE.parent.parent.parent |
|
20 | SOURCE_DIR = HERE.parent.parent.parent | |
21 |
|
21 | |||
22 |
|
22 | |||
23 | def build_inno(pyoxidizer_target=None, python=None, iscc=None, version=None): |
|
23 | def build_inno(pyoxidizer_target=None, python=None, iscc=None, version=None): | |
24 | if not pyoxidizer_target and not python: |
|
24 | if not pyoxidizer_target and not python: | |
25 | raise Exception("--python required unless building with PyOxidizer") |
|
25 | raise Exception("--python required unless building with PyOxidizer") | |
26 |
|
26 | |||
27 | if python and not os.path.isabs(python): |
|
27 | if python and not os.path.isabs(python): | |
28 | raise Exception("--python arg must be an absolute path") |
|
28 | raise Exception("--python arg must be an absolute path") | |
29 |
|
29 | |||
30 | if iscc: |
|
30 | if iscc: | |
31 | iscc = pathlib.Path(iscc) |
|
31 | iscc = pathlib.Path(iscc) | |
32 | else: |
|
32 | else: | |
33 | iscc = ( |
|
33 | iscc = ( | |
34 | pathlib.Path(os.environ["ProgramFiles(x86)"]) |
|
34 | pathlib.Path(os.environ["ProgramFiles(x86)"]) | |
35 | / "Inno Setup 5" |
|
35 | / "Inno Setup 5" | |
36 | / "ISCC.exe" |
|
36 | / "ISCC.exe" | |
37 | ) |
|
37 | ) | |
38 |
|
38 | |||
39 | build_dir = SOURCE_DIR / "build" |
|
39 | build_dir = SOURCE_DIR / "build" | |
40 |
|
40 | |||
41 | if pyoxidizer_target: |
|
41 | if pyoxidizer_target: | |
42 | inno.build_with_pyoxidizer( |
|
42 | inno.build_with_pyoxidizer( | |
43 | SOURCE_DIR, build_dir, pyoxidizer_target, iscc, version=version |
|
43 | SOURCE_DIR, build_dir, pyoxidizer_target, iscc, version=version | |
44 | ) |
|
44 | ) | |
45 | else: |
|
45 | else: | |
46 | inno.build_with_py2exe( |
|
46 | inno.build_with_py2exe( | |
47 | SOURCE_DIR, |
|
47 | SOURCE_DIR, | |
48 | build_dir, |
|
48 | build_dir, | |
49 | pathlib.Path(python), |
|
49 | pathlib.Path(python), | |
50 | iscc, |
|
50 | iscc, | |
51 | version=version, |
|
51 | version=version, | |
52 | ) |
|
52 | ) | |
53 |
|
53 | |||
54 |
|
54 | |||
55 | def build_wix( |
|
55 | def build_wix( | |
56 | name=None, |
|
56 | name=None, | |
57 | pyoxidizer_target=None, |
|
57 | pyoxidizer_target=None, | |
58 | python=None, |
|
58 | python=None, | |
59 | version=None, |
|
59 | version=None, | |
60 | sign_sn=None, |
|
60 | sign_sn=None, | |
61 | sign_cert=None, |
|
61 | sign_cert=None, | |
62 | sign_password=None, |
|
62 | sign_password=None, | |
63 | sign_timestamp_url=None, |
|
63 | sign_timestamp_url=None, | |
64 | extra_packages_script=None, |
|
64 | extra_packages_script=None, | |
65 | extra_wxs=None, |
|
65 | extra_wxs=None, | |
66 | extra_features=None, |
|
66 | extra_features=None, | |
|
67 | extra_pyoxidizer_vars=None, | |||
67 | ): |
|
68 | ): | |
68 | if not pyoxidizer_target and not python: |
|
69 | if not pyoxidizer_target and not python: | |
69 | raise Exception("--python required unless building with PyOxidizer") |
|
70 | raise Exception("--python required unless building with PyOxidizer") | |
70 |
|
71 | |||
71 | if python and not os.path.isabs(python): |
|
72 | if python and not os.path.isabs(python): | |
72 | raise Exception("--python arg must be an absolute path") |
|
73 | raise Exception("--python arg must be an absolute path") | |
73 |
|
74 | |||
74 | kwargs = { |
|
75 | kwargs = { | |
75 | "source_dir": SOURCE_DIR, |
|
76 | "source_dir": SOURCE_DIR, | |
76 | "version": version, |
|
77 | "version": version, | |
77 | } |
|
78 | } | |
78 |
|
79 | |||
79 | if pyoxidizer_target: |
|
80 | if pyoxidizer_target: | |
80 | fn = wix.build_installer_pyoxidizer |
|
81 | fn = wix.build_installer_pyoxidizer | |
81 | kwargs["target_triple"] = pyoxidizer_target |
|
82 | kwargs["target_triple"] = pyoxidizer_target | |
82 | else: |
|
83 | else: | |
83 | fn = wix.build_installer_py2exe |
|
84 | fn = wix.build_installer_py2exe | |
84 | kwargs["python_exe"] = pathlib.Path(python) |
|
85 | kwargs["python_exe"] = pathlib.Path(python) | |
85 |
|
86 | |||
86 | if extra_packages_script: |
|
87 | if extra_packages_script: | |
87 | if pyoxidizer_target: |
|
88 | if pyoxidizer_target: | |
88 | raise Exception( |
|
89 | raise Exception( | |
89 | "pyoxidizer does not support --extra-packages-script" |
|
90 | "pyoxidizer does not support --extra-packages-script" | |
90 | ) |
|
91 | ) | |
91 | kwargs["extra_packages_script"] = extra_packages_script |
|
92 | kwargs["extra_packages_script"] = extra_packages_script | |
92 | if extra_wxs: |
|
93 | if extra_wxs: | |
93 | kwargs["extra_wxs"] = dict( |
|
94 | kwargs["extra_wxs"] = dict( | |
94 | thing.split("=") for thing in extra_wxs.split(",") |
|
95 | thing.split("=") for thing in extra_wxs.split(",") | |
95 | ) |
|
96 | ) | |
96 | if extra_features: |
|
97 | if extra_features: | |
97 | kwargs["extra_features"] = extra_features.split(",") |
|
98 | kwargs["extra_features"] = extra_features.split(",") | |
98 |
|
99 | |||
99 | if sign_sn or sign_cert: |
|
100 | if sign_sn or sign_cert: | |
100 | kwargs["signing_info"] = { |
|
101 | kwargs["signing_info"] = { | |
101 | "name": name, |
|
102 | "name": name, | |
102 | "subject_name": sign_sn, |
|
103 | "subject_name": sign_sn, | |
103 | "cert_path": sign_cert, |
|
104 | "cert_path": sign_cert, | |
104 | "cert_password": sign_password, |
|
105 | "cert_password": sign_password, | |
105 | "timestamp_url": sign_timestamp_url, |
|
106 | "timestamp_url": sign_timestamp_url, | |
106 | } |
|
107 | } | |
107 |
|
108 | |||
108 | fn(**kwargs) |
|
109 | fn(**kwargs, extra_pyoxidizer_vars=extra_pyoxidizer_vars) | |
109 |
|
110 | |||
110 |
|
111 | |||
111 | def get_parser(): |
|
112 | def get_parser(): | |
112 | parser = argparse.ArgumentParser() |
|
113 | parser = argparse.ArgumentParser() | |
113 |
|
114 | |||
114 | subparsers = parser.add_subparsers() |
|
115 | subparsers = parser.add_subparsers() | |
115 |
|
116 | |||
116 | sp = subparsers.add_parser("inno", help="Build Inno Setup installer") |
|
117 | sp = subparsers.add_parser("inno", help="Build Inno Setup installer") | |
117 | sp.add_argument( |
|
118 | sp.add_argument( | |
118 | "--pyoxidizer-target", |
|
119 | "--pyoxidizer-target", | |
119 | choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"}, |
|
120 | choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"}, | |
120 | help="Build with PyOxidizer targeting this host triple", |
|
121 | help="Build with PyOxidizer targeting this host triple", | |
121 | ) |
|
122 | ) | |
122 | sp.add_argument("--python", help="path to python.exe to use") |
|
123 | sp.add_argument("--python", help="path to python.exe to use") | |
123 | sp.add_argument("--iscc", help="path to iscc.exe to use") |
|
124 | sp.add_argument("--iscc", help="path to iscc.exe to use") | |
124 | sp.add_argument( |
|
125 | sp.add_argument( | |
125 | "--version", |
|
126 | "--version", | |
126 | help="Mercurial version string to use " |
|
127 | help="Mercurial version string to use " | |
127 | "(detected from __version__.py if not defined", |
|
128 | "(detected from __version__.py if not defined", | |
128 | ) |
|
129 | ) | |
129 | sp.set_defaults(func=build_inno) |
|
130 | sp.set_defaults(func=build_inno) | |
130 |
|
131 | |||
131 | sp = subparsers.add_parser( |
|
132 | sp = subparsers.add_parser( | |
132 | "wix", help="Build Windows installer with WiX Toolset" |
|
133 | "wix", help="Build Windows installer with WiX Toolset" | |
133 | ) |
|
134 | ) | |
134 | sp.add_argument("--name", help="Application name", default="Mercurial") |
|
135 | sp.add_argument("--name", help="Application name", default="Mercurial") | |
135 | sp.add_argument( |
|
136 | sp.add_argument( | |
136 | "--pyoxidizer-target", |
|
137 | "--pyoxidizer-target", | |
137 | choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"}, |
|
138 | choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"}, | |
138 | help="Build with PyOxidizer targeting this host triple", |
|
139 | help="Build with PyOxidizer targeting this host triple", | |
139 | ) |
|
140 | ) | |
140 | sp.add_argument("--python", help="Path to Python executable to use") |
|
141 | sp.add_argument("--python", help="Path to Python executable to use") | |
141 | sp.add_argument( |
|
142 | sp.add_argument( | |
142 | "--sign-sn", |
|
143 | "--sign-sn", | |
143 | help="Subject name (or fragment thereof) of certificate " |
|
144 | help="Subject name (or fragment thereof) of certificate " | |
144 | "to use for signing", |
|
145 | "to use for signing", | |
145 | ) |
|
146 | ) | |
146 | sp.add_argument( |
|
147 | sp.add_argument( | |
147 | "--sign-cert", help="Path to certificate to use for signing" |
|
148 | "--sign-cert", help="Path to certificate to use for signing" | |
148 | ) |
|
149 | ) | |
149 | sp.add_argument("--sign-password", help="Password for signing certificate") |
|
150 | sp.add_argument("--sign-password", help="Password for signing certificate") | |
150 | sp.add_argument( |
|
151 | sp.add_argument( | |
151 | "--sign-timestamp-url", |
|
152 | "--sign-timestamp-url", | |
152 | help="URL of timestamp server to use for signing", |
|
153 | help="URL of timestamp server to use for signing", | |
153 | ) |
|
154 | ) | |
154 | sp.add_argument("--version", help="Version string to use") |
|
155 | sp.add_argument("--version", help="Version string to use") | |
155 | sp.add_argument( |
|
156 | sp.add_argument( | |
156 | "--extra-packages-script", |
|
157 | "--extra-packages-script", | |
157 | help=( |
|
158 | help=( | |
158 | "Script to execute to include extra packages in " "py2exe binary." |
|
159 | "Script to execute to include extra packages in " "py2exe binary." | |
159 | ), |
|
160 | ), | |
160 | ) |
|
161 | ) | |
161 | sp.add_argument( |
|
162 | sp.add_argument( | |
162 | "--extra-wxs", help="CSV of path_to_wxs_file=working_dir_for_wxs_file" |
|
163 | "--extra-wxs", help="CSV of path_to_wxs_file=working_dir_for_wxs_file" | |
163 | ) |
|
164 | ) | |
164 | sp.add_argument( |
|
165 | sp.add_argument( | |
165 | "--extra-features", |
|
166 | "--extra-features", | |
166 | help=( |
|
167 | help=( | |
167 | "CSV of extra feature names to include " |
|
168 | "CSV of extra feature names to include " | |
168 | "in the installer from the extra wxs files" |
|
169 | "in the installer from the extra wxs files" | |
169 | ), |
|
170 | ), | |
170 | ) |
|
171 | ) | |
|
172 | ||||
|
173 | sp.add_argument( | |||
|
174 | "--extra-pyoxidizer-vars", | |||
|
175 | help="json map of extra variables to pass to pyoxidizer", | |||
|
176 | ) | |||
|
177 | ||||
171 | sp.set_defaults(func=build_wix) |
|
178 | sp.set_defaults(func=build_wix) | |
172 |
|
179 | |||
173 | return parser |
|
180 | return parser | |
174 |
|
181 | |||
175 |
|
182 | |||
176 | def main(): |
|
183 | def main(): | |
177 | parser = get_parser() |
|
184 | parser = get_parser() | |
178 | args = parser.parse_args() |
|
185 | args = parser.parse_args() | |
179 |
|
186 | |||
180 | if not hasattr(args, "func"): |
|
187 | if not hasattr(args, "func"): | |
181 | parser.print_help() |
|
188 | parser.print_help() | |
182 | return |
|
189 | return | |
183 |
|
190 | |||
184 | kwargs = dict(vars(args)) |
|
191 | kwargs = dict(vars(args)) | |
185 | del kwargs["func"] |
|
192 | del kwargs["func"] | |
186 |
|
193 | |||
187 | args.func(**kwargs) |
|
194 | args.func(**kwargs) |
@@ -1,242 +1,244 b'' | |||||
1 | # inno.py - Inno Setup functionality. |
|
1 | # inno.py - Inno Setup functionality. | |
2 | # |
|
2 | # | |
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | # no-check-code because Python 3 native. |
|
8 | # no-check-code because Python 3 native. | |
9 |
|
9 | |||
10 | import os |
|
10 | import os | |
11 | import pathlib |
|
11 | import pathlib | |
12 | import shutil |
|
12 | import shutil | |
13 | import subprocess |
|
13 | import subprocess | |
14 |
|
14 | |||
15 | import jinja2 |
|
15 | import jinja2 | |
16 |
|
16 | |||
17 | from .py2exe import ( |
|
17 | from .py2exe import ( | |
18 | build_py2exe, |
|
18 | build_py2exe, | |
19 | stage_install, |
|
19 | stage_install, | |
20 | ) |
|
20 | ) | |
21 |
from .pyoxidizer import |
|
21 | from .pyoxidizer import create_pyoxidizer_install_layout | |
22 | from .util import ( |
|
22 | from .util import ( | |
23 | find_legacy_vc_runtime_files, |
|
23 | find_legacy_vc_runtime_files, | |
24 | normalize_windows_version, |
|
24 | normalize_windows_version, | |
25 | process_install_rules, |
|
25 | process_install_rules, | |
26 | read_version_py, |
|
26 | read_version_py, | |
27 | ) |
|
27 | ) | |
28 |
|
28 | |||
29 | EXTRA_PACKAGES = { |
|
29 | EXTRA_PACKAGES = { | |
30 | 'dulwich', |
|
30 | 'dulwich', | |
31 | 'keyring', |
|
31 | 'keyring', | |
32 | 'pygments', |
|
32 | 'pygments', | |
33 | 'win32ctypes', |
|
33 | 'win32ctypes', | |
34 | } |
|
34 | } | |
35 |
|
35 | |||
36 | EXTRA_INCLUDES = { |
|
36 | EXTRA_INCLUDES = { | |
37 | '_curses', |
|
37 | '_curses', | |
38 | '_curses_panel', |
|
38 | '_curses_panel', | |
39 | } |
|
39 | } | |
40 |
|
40 | |||
41 | EXTRA_INSTALL_RULES = [ |
|
41 | EXTRA_INSTALL_RULES = [ | |
42 | ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'), |
|
42 | ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'), | |
43 | ] |
|
43 | ] | |
44 |
|
44 | |||
45 | PACKAGE_FILES_METADATA = { |
|
45 | PACKAGE_FILES_METADATA = { | |
46 | 'ReadMe.html': 'Flags: isreadme', |
|
46 | 'ReadMe.html': 'Flags: isreadme', | |
47 | } |
|
47 | } | |
48 |
|
48 | |||
49 |
|
49 | |||
50 | def build_with_py2exe( |
|
50 | def build_with_py2exe( | |
51 | source_dir: pathlib.Path, |
|
51 | source_dir: pathlib.Path, | |
52 | build_dir: pathlib.Path, |
|
52 | build_dir: pathlib.Path, | |
53 | python_exe: pathlib.Path, |
|
53 | python_exe: pathlib.Path, | |
54 | iscc_exe: pathlib.Path, |
|
54 | iscc_exe: pathlib.Path, | |
55 | version=None, |
|
55 | version=None, | |
56 | ): |
|
56 | ): | |
57 | """Build the Inno installer using py2exe. |
|
57 | """Build the Inno installer using py2exe. | |
58 |
|
58 | |||
59 | Build files will be placed in ``build_dir``. |
|
59 | Build files will be placed in ``build_dir``. | |
60 |
|
60 | |||
61 | py2exe's setup.py doesn't use setuptools. It doesn't have modern logic |
|
61 | py2exe's setup.py doesn't use setuptools. It doesn't have modern logic | |
62 | for finding the Python 2.7 toolchain. So, we require the environment |
|
62 | for finding the Python 2.7 toolchain. So, we require the environment | |
63 | to already be configured with an active toolchain. |
|
63 | to already be configured with an active toolchain. | |
64 | """ |
|
64 | """ | |
65 | if not iscc_exe.exists(): |
|
65 | if not iscc_exe.exists(): | |
66 | raise Exception('%s does not exist' % iscc_exe) |
|
66 | raise Exception('%s does not exist' % iscc_exe) | |
67 |
|
67 | |||
68 | vc_x64 = r'\x64' in os.environ.get('LIB', '') |
|
68 | vc_x64 = r'\x64' in os.environ.get('LIB', '') | |
69 | arch = 'x64' if vc_x64 else 'x86' |
|
69 | arch = 'x64' if vc_x64 else 'x86' | |
70 | inno_build_dir = build_dir / ('inno-py2exe-%s' % arch) |
|
70 | inno_build_dir = build_dir / ('inno-py2exe-%s' % arch) | |
71 | staging_dir = inno_build_dir / 'stage' |
|
71 | staging_dir = inno_build_dir / 'stage' | |
72 |
|
72 | |||
73 | requirements_txt = ( |
|
73 | requirements_txt = ( | |
74 | source_dir / 'contrib' / 'packaging' / 'requirements-windows-py2.txt' |
|
74 | source_dir / 'contrib' / 'packaging' / 'requirements-windows-py2.txt' | |
75 | ) |
|
75 | ) | |
76 |
|
76 | |||
77 | inno_build_dir.mkdir(parents=True, exist_ok=True) |
|
77 | inno_build_dir.mkdir(parents=True, exist_ok=True) | |
78 |
|
78 | |||
79 | build_py2exe( |
|
79 | build_py2exe( | |
80 | source_dir, |
|
80 | source_dir, | |
81 | build_dir, |
|
81 | build_dir, | |
82 | python_exe, |
|
82 | python_exe, | |
83 | 'inno', |
|
83 | 'inno', | |
84 | requirements_txt, |
|
84 | requirements_txt, | |
85 | extra_packages=EXTRA_PACKAGES, |
|
85 | extra_packages=EXTRA_PACKAGES, | |
86 | extra_includes=EXTRA_INCLUDES, |
|
86 | extra_includes=EXTRA_INCLUDES, | |
87 | ) |
|
87 | ) | |
88 |
|
88 | |||
89 | # Purge the staging directory for every build so packaging is |
|
89 | # Purge the staging directory for every build so packaging is | |
90 | # pristine. |
|
90 | # pristine. | |
91 | if staging_dir.exists(): |
|
91 | if staging_dir.exists(): | |
92 | print('purging %s' % staging_dir) |
|
92 | print('purging %s' % staging_dir) | |
93 | shutil.rmtree(staging_dir) |
|
93 | shutil.rmtree(staging_dir) | |
94 |
|
94 | |||
95 | # Now assemble all the packaged files into the staging directory. |
|
95 | # Now assemble all the packaged files into the staging directory. | |
96 | stage_install(source_dir, staging_dir) |
|
96 | stage_install(source_dir, staging_dir) | |
97 |
|
97 | |||
98 | # We also install some extra files. |
|
98 | # We also install some extra files. | |
99 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) |
|
99 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) | |
100 |
|
100 | |||
101 | # hg.exe depends on VC9 runtime DLLs. Copy those into place. |
|
101 | # hg.exe depends on VC9 runtime DLLs. Copy those into place. | |
102 | for f in find_legacy_vc_runtime_files(vc_x64): |
|
102 | for f in find_legacy_vc_runtime_files(vc_x64): | |
103 | if f.name.endswith('.manifest'): |
|
103 | if f.name.endswith('.manifest'): | |
104 | basename = 'Microsoft.VC90.CRT.manifest' |
|
104 | basename = 'Microsoft.VC90.CRT.manifest' | |
105 | else: |
|
105 | else: | |
106 | basename = f.name |
|
106 | basename = f.name | |
107 |
|
107 | |||
108 | dest_path = staging_dir / basename |
|
108 | dest_path = staging_dir / basename | |
109 |
|
109 | |||
110 | print('copying %s to %s' % (f, dest_path)) |
|
110 | print('copying %s to %s' % (f, dest_path)) | |
111 | shutil.copyfile(f, dest_path) |
|
111 | shutil.copyfile(f, dest_path) | |
112 |
|
112 | |||
113 | build_installer( |
|
113 | build_installer( | |
114 | source_dir, |
|
114 | source_dir, | |
115 | inno_build_dir, |
|
115 | inno_build_dir, | |
116 | staging_dir, |
|
116 | staging_dir, | |
117 | iscc_exe, |
|
117 | iscc_exe, | |
118 | version, |
|
118 | version, | |
119 | arch="x64" if vc_x64 else None, |
|
119 | arch="x64" if vc_x64 else None, | |
120 | suffix="-python2", |
|
120 | suffix="-python2", | |
121 | ) |
|
121 | ) | |
122 |
|
122 | |||
123 |
|
123 | |||
124 | def build_with_pyoxidizer( |
|
124 | def build_with_pyoxidizer( | |
125 | source_dir: pathlib.Path, |
|
125 | source_dir: pathlib.Path, | |
126 | build_dir: pathlib.Path, |
|
126 | build_dir: pathlib.Path, | |
127 | target_triple: str, |
|
127 | target_triple: str, | |
128 | iscc_exe: pathlib.Path, |
|
128 | iscc_exe: pathlib.Path, | |
129 | version=None, |
|
129 | version=None, | |
130 | ): |
|
130 | ): | |
131 | """Build the Inno installer using PyOxidizer.""" |
|
131 | """Build the Inno installer using PyOxidizer.""" | |
132 | if not iscc_exe.exists(): |
|
132 | if not iscc_exe.exists(): | |
133 | raise Exception("%s does not exist" % iscc_exe) |
|
133 | raise Exception("%s does not exist" % iscc_exe) | |
134 |
|
134 | |||
135 | inno_build_dir = build_dir / ("inno-pyoxidizer-%s" % target_triple) |
|
135 | inno_build_dir = build_dir / ("inno-pyoxidizer-%s" % target_triple) | |
136 | staging_dir = inno_build_dir / "stage" |
|
136 | staging_dir = inno_build_dir / "stage" | |
137 |
|
137 | |||
138 | inno_build_dir.mkdir(parents=True, exist_ok=True) |
|
138 | inno_build_dir.mkdir(parents=True, exist_ok=True) | |
139 | run_pyoxidizer(source_dir, inno_build_dir, staging_dir, target_triple) |
|
139 | create_pyoxidizer_install_layout( | |
|
140 | source_dir, inno_build_dir, staging_dir, target_triple | |||
|
141 | ) | |||
140 |
|
142 | |||
141 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) |
|
143 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) | |
142 |
|
144 | |||
143 | build_installer( |
|
145 | build_installer( | |
144 | source_dir, |
|
146 | source_dir, | |
145 | inno_build_dir, |
|
147 | inno_build_dir, | |
146 | staging_dir, |
|
148 | staging_dir, | |
147 | iscc_exe, |
|
149 | iscc_exe, | |
148 | version, |
|
150 | version, | |
149 | arch="x64" if "x86_64" in target_triple else None, |
|
151 | arch="x64" if "x86_64" in target_triple else None, | |
150 | ) |
|
152 | ) | |
151 |
|
153 | |||
152 |
|
154 | |||
153 | def build_installer( |
|
155 | def build_installer( | |
154 | source_dir: pathlib.Path, |
|
156 | source_dir: pathlib.Path, | |
155 | inno_build_dir: pathlib.Path, |
|
157 | inno_build_dir: pathlib.Path, | |
156 | staging_dir: pathlib.Path, |
|
158 | staging_dir: pathlib.Path, | |
157 | iscc_exe: pathlib.Path, |
|
159 | iscc_exe: pathlib.Path, | |
158 | version, |
|
160 | version, | |
159 | arch=None, |
|
161 | arch=None, | |
160 | suffix="", |
|
162 | suffix="", | |
161 | ): |
|
163 | ): | |
162 | """Build an Inno installer from staged Mercurial files. |
|
164 | """Build an Inno installer from staged Mercurial files. | |
163 |
|
165 | |||
164 | This function is agnostic about how to build Mercurial. It just |
|
166 | This function is agnostic about how to build Mercurial. It just | |
165 | cares that Mercurial files are in ``staging_dir``. |
|
167 | cares that Mercurial files are in ``staging_dir``. | |
166 | """ |
|
168 | """ | |
167 | inno_source_dir = source_dir / "contrib" / "packaging" / "inno" |
|
169 | inno_source_dir = source_dir / "contrib" / "packaging" / "inno" | |
168 |
|
170 | |||
169 | # The final package layout is simply a mirror of the staging directory. |
|
171 | # The final package layout is simply a mirror of the staging directory. | |
170 | package_files = [] |
|
172 | package_files = [] | |
171 | for root, dirs, files in os.walk(staging_dir): |
|
173 | for root, dirs, files in os.walk(staging_dir): | |
172 | dirs.sort() |
|
174 | dirs.sort() | |
173 |
|
175 | |||
174 | root = pathlib.Path(root) |
|
176 | root = pathlib.Path(root) | |
175 |
|
177 | |||
176 | for f in sorted(files): |
|
178 | for f in sorted(files): | |
177 | full = root / f |
|
179 | full = root / f | |
178 | rel = full.relative_to(staging_dir) |
|
180 | rel = full.relative_to(staging_dir) | |
179 | if str(rel.parent) == '.': |
|
181 | if str(rel.parent) == '.': | |
180 | dest_dir = '{app}' |
|
182 | dest_dir = '{app}' | |
181 | else: |
|
183 | else: | |
182 | dest_dir = '{app}\\%s' % rel.parent |
|
184 | dest_dir = '{app}\\%s' % rel.parent | |
183 |
|
185 | |||
184 | package_files.append( |
|
186 | package_files.append( | |
185 | { |
|
187 | { | |
186 | 'source': rel, |
|
188 | 'source': rel, | |
187 | 'dest_dir': dest_dir, |
|
189 | 'dest_dir': dest_dir, | |
188 | 'metadata': PACKAGE_FILES_METADATA.get(str(rel), None), |
|
190 | 'metadata': PACKAGE_FILES_METADATA.get(str(rel), None), | |
189 | } |
|
191 | } | |
190 | ) |
|
192 | ) | |
191 |
|
193 | |||
192 | print('creating installer') |
|
194 | print('creating installer') | |
193 |
|
195 | |||
194 | # Install Inno files by rendering a template. |
|
196 | # Install Inno files by rendering a template. | |
195 | jinja_env = jinja2.Environment( |
|
197 | jinja_env = jinja2.Environment( | |
196 | loader=jinja2.FileSystemLoader(str(inno_source_dir)), |
|
198 | loader=jinja2.FileSystemLoader(str(inno_source_dir)), | |
197 | # Need to change these to prevent conflict with Inno Setup. |
|
199 | # Need to change these to prevent conflict with Inno Setup. | |
198 | comment_start_string='{##', |
|
200 | comment_start_string='{##', | |
199 | comment_end_string='##}', |
|
201 | comment_end_string='##}', | |
200 | ) |
|
202 | ) | |
201 |
|
203 | |||
202 | try: |
|
204 | try: | |
203 | template = jinja_env.get_template('mercurial.iss') |
|
205 | template = jinja_env.get_template('mercurial.iss') | |
204 | except jinja2.TemplateSyntaxError as e: |
|
206 | except jinja2.TemplateSyntaxError as e: | |
205 | raise Exception( |
|
207 | raise Exception( | |
206 | 'template syntax error at %s:%d: %s' |
|
208 | 'template syntax error at %s:%d: %s' | |
207 | % ( |
|
209 | % ( | |
208 | e.name, |
|
210 | e.name, | |
209 | e.lineno, |
|
211 | e.lineno, | |
210 | e.message, |
|
212 | e.message, | |
211 | ) |
|
213 | ) | |
212 | ) |
|
214 | ) | |
213 |
|
215 | |||
214 | content = template.render(package_files=package_files) |
|
216 | content = template.render(package_files=package_files) | |
215 |
|
217 | |||
216 | with (inno_build_dir / 'mercurial.iss').open('w', encoding='utf-8') as fh: |
|
218 | with (inno_build_dir / 'mercurial.iss').open('w', encoding='utf-8') as fh: | |
217 | fh.write(content) |
|
219 | fh.write(content) | |
218 |
|
220 | |||
219 | # Copy additional files used by Inno. |
|
221 | # Copy additional files used by Inno. | |
220 | for p in ('mercurial.ico', 'postinstall.txt'): |
|
222 | for p in ('mercurial.ico', 'postinstall.txt'): | |
221 | shutil.copyfile( |
|
223 | shutil.copyfile( | |
222 | source_dir / 'contrib' / 'win32' / p, inno_build_dir / p |
|
224 | source_dir / 'contrib' / 'win32' / p, inno_build_dir / p | |
223 | ) |
|
225 | ) | |
224 |
|
226 | |||
225 | args = [str(iscc_exe)] |
|
227 | args = [str(iscc_exe)] | |
226 |
|
228 | |||
227 | if arch: |
|
229 | if arch: | |
228 | args.append('/dARCH=%s' % arch) |
|
230 | args.append('/dARCH=%s' % arch) | |
229 | args.append('/dSUFFIX=-%s%s' % (arch, suffix)) |
|
231 | args.append('/dSUFFIX=-%s%s' % (arch, suffix)) | |
230 | else: |
|
232 | else: | |
231 | args.append('/dSUFFIX=-x86%s' % suffix) |
|
233 | args.append('/dSUFFIX=-x86%s' % suffix) | |
232 |
|
234 | |||
233 | if not version: |
|
235 | if not version: | |
234 | version = read_version_py(source_dir) |
|
236 | version = read_version_py(source_dir) | |
235 |
|
237 | |||
236 | args.append('/dVERSION=%s' % version) |
|
238 | args.append('/dVERSION=%s' % version) | |
237 | args.append('/dQUAD_VERSION=%s' % normalize_windows_version(version)) |
|
239 | args.append('/dQUAD_VERSION=%s' % normalize_windows_version(version)) | |
238 |
|
240 | |||
239 | args.append('/Odist') |
|
241 | args.append('/Odist') | |
240 | args.append(str(inno_build_dir / 'mercurial.iss')) |
|
242 | args.append(str(inno_build_dir / 'mercurial.iss')) | |
241 |
|
243 | |||
242 | subprocess.run(args, cwd=str(source_dir), check=True) |
|
244 | subprocess.run(args, cwd=str(source_dir), check=True) |
@@ -1,144 +1,180 b'' | |||||
1 | # pyoxidizer.py - Packaging support for PyOxidizer |
|
1 | # pyoxidizer.py - Packaging support for PyOxidizer | |
2 | # |
|
2 | # | |
3 | # Copyright 2020 Gregory Szorc <gregory.szorc@gmail.com> |
|
3 | # Copyright 2020 Gregory Szorc <gregory.szorc@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | # no-check-code because Python 3 native. |
|
8 | # no-check-code because Python 3 native. | |
9 |
|
9 | |||
10 | import os |
|
10 | import os | |
11 | import pathlib |
|
11 | import pathlib | |
12 | import shutil |
|
12 | import shutil | |
13 | import subprocess |
|
13 | import subprocess | |
14 | import sys |
|
14 | import sys | |
|
15 | import typing | |||
15 |
|
16 | |||
16 | from .downloads import download_entry |
|
17 | from .downloads import download_entry | |
17 | from .util import ( |
|
18 | from .util import ( | |
18 | extract_zip_to_directory, |
|
19 | extract_zip_to_directory, | |
19 | process_install_rules, |
|
20 | process_install_rules, | |
20 | find_vc_runtime_dll, |
|
21 | find_vc_runtime_dll, | |
21 | ) |
|
22 | ) | |
22 |
|
23 | |||
23 |
|
24 | |||
24 | STAGING_RULES_WINDOWS = [ |
|
25 | STAGING_RULES_WINDOWS = [ | |
25 | ('contrib/bash_completion', 'contrib/'), |
|
26 | ('contrib/bash_completion', 'contrib/'), | |
26 | ('contrib/hgk', 'contrib/hgk.tcl'), |
|
27 | ('contrib/hgk', 'contrib/hgk.tcl'), | |
27 | ('contrib/hgweb.fcgi', 'contrib/'), |
|
28 | ('contrib/hgweb.fcgi', 'contrib/'), | |
28 | ('contrib/hgweb.wsgi', 'contrib/'), |
|
29 | ('contrib/hgweb.wsgi', 'contrib/'), | |
29 | ('contrib/logo-droplets.svg', 'contrib/'), |
|
30 | ('contrib/logo-droplets.svg', 'contrib/'), | |
30 | ('contrib/mercurial.el', 'contrib/'), |
|
31 | ('contrib/mercurial.el', 'contrib/'), | |
31 | ('contrib/mq.el', 'contrib/'), |
|
32 | ('contrib/mq.el', 'contrib/'), | |
32 | ('contrib/tcsh_completion', 'contrib/'), |
|
33 | ('contrib/tcsh_completion', 'contrib/'), | |
33 | ('contrib/tcsh_completion_build.sh', 'contrib/'), |
|
34 | ('contrib/tcsh_completion_build.sh', 'contrib/'), | |
34 | ('contrib/vim/*', 'contrib/vim/'), |
|
35 | ('contrib/vim/*', 'contrib/vim/'), | |
35 | ('contrib/win32/postinstall.txt', 'ReleaseNotes.txt'), |
|
36 | ('contrib/win32/postinstall.txt', 'ReleaseNotes.txt'), | |
36 | ('contrib/win32/ReadMe.html', 'ReadMe.html'), |
|
37 | ('contrib/win32/ReadMe.html', 'ReadMe.html'), | |
37 | ('contrib/xml.rnc', 'contrib/'), |
|
38 | ('contrib/xml.rnc', 'contrib/'), | |
38 | ('contrib/zsh_completion', 'contrib/'), |
|
39 | ('contrib/zsh_completion', 'contrib/'), | |
39 | ('doc/*.html', 'doc/'), |
|
40 | ('doc/*.html', 'doc/'), | |
40 | ('doc/style.css', 'doc/'), |
|
41 | ('doc/style.css', 'doc/'), | |
41 | ('COPYING', 'Copying.txt'), |
|
42 | ('COPYING', 'Copying.txt'), | |
42 | ] |
|
43 | ] | |
43 |
|
44 | |||
44 | STAGING_RULES_APP = [ |
|
45 | STAGING_RULES_APP = [ | |
45 | ('lib/mercurial/helptext/**/*.txt', 'helptext/'), |
|
46 | ('lib/mercurial/helptext/**/*.txt', 'helptext/'), | |
46 | ('lib/mercurial/defaultrc/*.rc', 'defaultrc/'), |
|
47 | ('lib/mercurial/defaultrc/*.rc', 'defaultrc/'), | |
47 | ('lib/mercurial/locale/**/*', 'locale/'), |
|
48 | ('lib/mercurial/locale/**/*', 'locale/'), | |
48 | ('lib/mercurial/templates/**/*', 'templates/'), |
|
49 | ('lib/mercurial/templates/**/*', 'templates/'), | |
49 | ] |
|
50 | ] | |
50 |
|
51 | |||
51 | STAGING_EXCLUDES_WINDOWS = [ |
|
52 | STAGING_EXCLUDES_WINDOWS = [ | |
52 | "doc/hg-ssh.8.html", |
|
53 | "doc/hg-ssh.8.html", | |
53 | ] |
|
54 | ] | |
54 |
|
55 | |||
55 |
|
56 | |||
|
57 | def build_docs_html(source_dir: pathlib.Path): | |||
|
58 | """Ensures HTML documentation is built. | |||
|
59 | ||||
|
60 | This will fail if docutils isn't available. | |||
|
61 | ||||
|
62 | (The HTML docs aren't built as part of `pip install` so we need to build them | |||
|
63 | out of band.) | |||
|
64 | """ | |||
|
65 | subprocess.run( | |||
|
66 | [sys.executable, str(source_dir / "setup.py"), "build_doc", "--html"], | |||
|
67 | cwd=str(source_dir), | |||
|
68 | check=True, | |||
|
69 | ) | |||
|
70 | ||||
|
71 | ||||
56 | def run_pyoxidizer( |
|
72 | def run_pyoxidizer( | |
57 | source_dir: pathlib.Path, |
|
73 | source_dir: pathlib.Path, | |
58 | build_dir: pathlib.Path, |
|
74 | build_dir: pathlib.Path, | |
59 | out_dir: pathlib.Path, |
|
|||
60 | target_triple: str, |
|
75 | target_triple: str, | |
61 | ): |
|
76 | build_vars: typing.Optional[typing.Dict[str, str]] = None, | |
62 | """Build Mercurial with PyOxidizer and copy additional files into place. |
|
77 | target: typing.Optional[str] = None, | |
|
78 | ) -> pathlib.Path: | |||
|
79 | """Run `pyoxidizer` in an environment with access to build dependencies. | |||
63 |
|
80 | |||
64 | After successful completion, ``out_dir`` contains files constituting a |
|
81 | Returns the output directory that pyoxidizer would have used for build | |
65 | Mercurial install. |
|
82 | artifacts. Actual build artifacts are likely in a sub-directory with the | |
|
83 | name of the pyoxidizer build target that was built. | |||
66 | """ |
|
84 | """ | |
|
85 | build_vars = build_vars or {} | |||
|
86 | ||||
67 | # We need to make gettext binaries available for compiling i18n files. |
|
87 | # We need to make gettext binaries available for compiling i18n files. | |
68 | gettext_pkg, gettext_entry = download_entry('gettext', build_dir) |
|
88 | gettext_pkg, gettext_entry = download_entry('gettext', build_dir) | |
69 | gettext_dep_pkg = download_entry('gettext-dep', build_dir)[0] |
|
89 | gettext_dep_pkg = download_entry('gettext-dep', build_dir)[0] | |
70 |
|
90 | |||
71 | gettext_root = build_dir / ('gettext-win-%s' % gettext_entry['version']) |
|
91 | gettext_root = build_dir / ('gettext-win-%s' % gettext_entry['version']) | |
72 |
|
92 | |||
73 | if not gettext_root.exists(): |
|
93 | if not gettext_root.exists(): | |
74 | extract_zip_to_directory(gettext_pkg, gettext_root) |
|
94 | extract_zip_to_directory(gettext_pkg, gettext_root) | |
75 | extract_zip_to_directory(gettext_dep_pkg, gettext_root) |
|
95 | extract_zip_to_directory(gettext_dep_pkg, gettext_root) | |
76 |
|
96 | |||
77 | env = dict(os.environ) |
|
97 | env = dict(os.environ) | |
78 | env["PATH"] = "%s%s%s" % ( |
|
98 | env["PATH"] = "%s%s%s" % ( | |
79 | env["PATH"], |
|
99 | env["PATH"], | |
80 | os.pathsep, |
|
100 | os.pathsep, | |
81 | str(gettext_root / "bin"), |
|
101 | str(gettext_root / "bin"), | |
82 | ) |
|
102 | ) | |
83 |
|
103 | |||
84 | args = [ |
|
104 | args = [ | |
85 | "pyoxidizer", |
|
105 | "pyoxidizer", | |
86 | "build", |
|
106 | "build", | |
87 | "--path", |
|
107 | "--path", | |
88 | str(source_dir / "rust" / "hgcli"), |
|
108 | str(source_dir / "rust" / "hgcli"), | |
89 | "--release", |
|
109 | "--release", | |
90 | "--target-triple", |
|
110 | "--target-triple", | |
91 | target_triple, |
|
111 | target_triple, | |
92 | ] |
|
112 | ] | |
93 |
|
113 | |||
|
114 | for k, v in sorted(build_vars.items()): | |||
|
115 | args.extend(["--var", k, v]) | |||
|
116 | ||||
|
117 | if target: | |||
|
118 | args.append(target) | |||
|
119 | ||||
94 | subprocess.run(args, env=env, check=True) |
|
120 | subprocess.run(args, env=env, check=True) | |
95 |
|
121 | |||
|
122 | return source_dir / "build" / "pyoxidizer" / target_triple / "release" | |||
|
123 | ||||
|
124 | ||||
|
125 | def create_pyoxidizer_install_layout( | |||
|
126 | source_dir: pathlib.Path, | |||
|
127 | build_dir: pathlib.Path, | |||
|
128 | out_dir: pathlib.Path, | |||
|
129 | target_triple: str, | |||
|
130 | ): | |||
|
131 | """Build Mercurial with PyOxidizer and copy additional files into place. | |||
|
132 | ||||
|
133 | After successful completion, ``out_dir`` contains files constituting a | |||
|
134 | Mercurial install. | |||
|
135 | """ | |||
|
136 | ||||
|
137 | run_pyoxidizer(source_dir, build_dir, target_triple) | |||
|
138 | ||||
96 | if "windows" in target_triple: |
|
139 | if "windows" in target_triple: | |
97 | target = "app_windows" |
|
140 | target = "app_windows" | |
98 | else: |
|
141 | else: | |
99 | target = "app_posix" |
|
142 | target = "app_posix" | |
100 |
|
143 | |||
101 | build_dir = ( |
|
144 | build_dir = ( | |
102 | source_dir / "build" / "pyoxidizer" / target_triple / "release" / target |
|
145 | source_dir / "build" / "pyoxidizer" / target_triple / "release" / target | |
103 | ) |
|
146 | ) | |
104 |
|
147 | |||
105 | if out_dir.exists(): |
|
148 | if out_dir.exists(): | |
106 | print("purging %s" % out_dir) |
|
149 | print("purging %s" % out_dir) | |
107 | shutil.rmtree(out_dir) |
|
150 | shutil.rmtree(out_dir) | |
108 |
|
151 | |||
109 | # Now assemble all the files from PyOxidizer into the staging directory. |
|
152 | # Now assemble all the files from PyOxidizer into the staging directory. | |
110 | shutil.copytree(build_dir, out_dir) |
|
153 | shutil.copytree(build_dir, out_dir) | |
111 |
|
154 | |||
112 | # Move some of those files around. We can get rid of this once Mercurial |
|
155 | # Move some of those files around. We can get rid of this once Mercurial | |
113 | # is taught to use the importlib APIs for reading resources. |
|
156 | # is taught to use the importlib APIs for reading resources. | |
114 | process_install_rules(STAGING_RULES_APP, build_dir, out_dir) |
|
157 | process_install_rules(STAGING_RULES_APP, build_dir, out_dir) | |
115 |
|
158 | |||
116 | # We also need to run setup.py build_doc to produce html files, |
|
159 | build_docs_html(source_dir) | |
117 | # as they aren't built as part of ``pip install``. |
|
|||
118 | # This will fail if docutils isn't installed. |
|
|||
119 | subprocess.run( |
|
|||
120 | [sys.executable, str(source_dir / "setup.py"), "build_doc", "--html"], |
|
|||
121 | cwd=str(source_dir), |
|
|||
122 | check=True, |
|
|||
123 | ) |
|
|||
124 |
|
160 | |||
125 | if "windows" in target_triple: |
|
161 | if "windows" in target_triple: | |
126 | process_install_rules(STAGING_RULES_WINDOWS, source_dir, out_dir) |
|
162 | process_install_rules(STAGING_RULES_WINDOWS, source_dir, out_dir) | |
127 |
|
163 | |||
128 | # Write out a default editor.rc file to configure notepad as the |
|
164 | # Write out a default editor.rc file to configure notepad as the | |
129 | # default editor. |
|
165 | # default editor. | |
130 | os.makedirs(out_dir / "defaultrc", exist_ok=True) |
|
166 | os.makedirs(out_dir / "defaultrc", exist_ok=True) | |
131 | with (out_dir / "defaultrc" / "editor.rc").open( |
|
167 | with (out_dir / "defaultrc" / "editor.rc").open( | |
132 | "w", encoding="utf-8" |
|
168 | "w", encoding="utf-8" | |
133 | ) as fh: |
|
169 | ) as fh: | |
134 | fh.write("[ui]\neditor = notepad\n") |
|
170 | fh.write("[ui]\neditor = notepad\n") | |
135 |
|
171 | |||
136 | for f in STAGING_EXCLUDES_WINDOWS: |
|
172 | for f in STAGING_EXCLUDES_WINDOWS: | |
137 | p = out_dir / f |
|
173 | p = out_dir / f | |
138 | if p.exists(): |
|
174 | if p.exists(): | |
139 | print("removing %s" % p) |
|
175 | print("removing %s" % p) | |
140 | p.unlink() |
|
176 | p.unlink() | |
141 |
|
177 | |||
142 | # Add vcruntimeXXX.dll next to executable. |
|
178 | # Add vcruntimeXXX.dll next to executable. | |
143 | vc_runtime_dll = find_vc_runtime_dll(x64="x86_64" in target_triple) |
|
179 | vc_runtime_dll = find_vc_runtime_dll(x64="x86_64" in target_triple) | |
144 | shutil.copy(vc_runtime_dll, out_dir / vc_runtime_dll.name) |
|
180 | shutil.copy(vc_runtime_dll, out_dir / vc_runtime_dll.name) |
@@ -1,547 +1,586 b'' | |||||
1 | # wix.py - WiX installer functionality |
|
1 | # wix.py - WiX installer functionality | |
2 | # |
|
2 | # | |
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | # no-check-code because Python 3 native. |
|
8 | # no-check-code because Python 3 native. | |
9 |
|
9 | |||
10 | import collections |
|
10 | import collections | |
|
11 | import json | |||
11 | import os |
|
12 | import os | |
12 | import pathlib |
|
13 | import pathlib | |
13 | import re |
|
14 | import re | |
14 | import shutil |
|
15 | import shutil | |
15 | import subprocess |
|
16 | import subprocess | |
16 | import typing |
|
17 | import typing | |
17 | import uuid |
|
18 | import uuid | |
18 | import xml.dom.minidom |
|
19 | import xml.dom.minidom | |
19 |
|
20 | |||
20 | from .downloads import download_entry |
|
21 | from .downloads import download_entry | |
21 | from .py2exe import ( |
|
22 | from .py2exe import ( | |
22 | build_py2exe, |
|
23 | build_py2exe, | |
23 | stage_install, |
|
24 | stage_install, | |
24 | ) |
|
25 | ) | |
25 |
from .pyoxidizer import |
|
26 | from .pyoxidizer import ( | |
|
27 | build_docs_html, | |||
|
28 | create_pyoxidizer_install_layout, | |||
|
29 | run_pyoxidizer, | |||
|
30 | ) | |||
26 | from .util import ( |
|
31 | from .util import ( | |
27 | extract_zip_to_directory, |
|
32 | extract_zip_to_directory, | |
28 | normalize_windows_version, |
|
33 | normalize_windows_version, | |
29 | process_install_rules, |
|
34 | process_install_rules, | |
30 | sign_with_signtool, |
|
35 | sign_with_signtool, | |
31 | ) |
|
36 | ) | |
32 |
|
37 | |||
33 |
|
38 | |||
34 | EXTRA_PACKAGES = { |
|
39 | EXTRA_PACKAGES = { | |
35 | 'dulwich', |
|
40 | 'dulwich', | |
36 | 'distutils', |
|
41 | 'distutils', | |
37 | 'keyring', |
|
42 | 'keyring', | |
38 | 'pygments', |
|
43 | 'pygments', | |
39 | 'win32ctypes', |
|
44 | 'win32ctypes', | |
40 | } |
|
45 | } | |
41 |
|
46 | |||
42 | EXTRA_INCLUDES = { |
|
47 | EXTRA_INCLUDES = { | |
43 | '_curses', |
|
48 | '_curses', | |
44 | '_curses_panel', |
|
49 | '_curses_panel', | |
45 | } |
|
50 | } | |
46 |
|
51 | |||
47 | EXTRA_INSTALL_RULES = [ |
|
52 | EXTRA_INSTALL_RULES = [ | |
48 | ('contrib/packaging/wix/COPYING.rtf', 'COPYING.rtf'), |
|
53 | ('contrib/packaging/wix/COPYING.rtf', 'COPYING.rtf'), | |
49 | ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'), |
|
54 | ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'), | |
50 | ] |
|
55 | ] | |
51 |
|
56 | |||
52 | STAGING_REMOVE_FILES = [ |
|
57 | STAGING_REMOVE_FILES = [ | |
53 | # We use the RTF variant. |
|
58 | # We use the RTF variant. | |
54 | 'copying.txt', |
|
59 | 'copying.txt', | |
55 | ] |
|
60 | ] | |
56 |
|
61 | |||
57 | SHORTCUTS = { |
|
62 | SHORTCUTS = { | |
58 | # hg.1.html' |
|
63 | # hg.1.html' | |
59 | 'hg.file.5d3e441c_28d9_5542_afd0_cdd4234f12d5': { |
|
64 | 'hg.file.5d3e441c_28d9_5542_afd0_cdd4234f12d5': { | |
60 | 'Name': 'Mercurial Command Reference', |
|
65 | 'Name': 'Mercurial Command Reference', | |
61 | }, |
|
66 | }, | |
62 | # hgignore.5.html |
|
67 | # hgignore.5.html | |
63 | 'hg.file.5757d8e0_f207_5e10_a2ec_3ba0a062f431': { |
|
68 | 'hg.file.5757d8e0_f207_5e10_a2ec_3ba0a062f431': { | |
64 | 'Name': 'Mercurial Ignore Files', |
|
69 | 'Name': 'Mercurial Ignore Files', | |
65 | }, |
|
70 | }, | |
66 | # hgrc.5.html |
|
71 | # hgrc.5.html | |
67 | 'hg.file.92e605fd_1d1a_5dc6_9fc0_5d2998eb8f5e': { |
|
72 | 'hg.file.92e605fd_1d1a_5dc6_9fc0_5d2998eb8f5e': { | |
68 | 'Name': 'Mercurial Configuration Files', |
|
73 | 'Name': 'Mercurial Configuration Files', | |
69 | }, |
|
74 | }, | |
70 | } |
|
75 | } | |
71 |
|
76 | |||
72 |
|
77 | |||
73 | def find_version(source_dir: pathlib.Path): |
|
78 | def find_version(source_dir: pathlib.Path): | |
74 | version_py = source_dir / 'mercurial' / '__version__.py' |
|
79 | version_py = source_dir / 'mercurial' / '__version__.py' | |
75 |
|
80 | |||
76 | with version_py.open('r', encoding='utf-8') as fh: |
|
81 | with version_py.open('r', encoding='utf-8') as fh: | |
77 | source = fh.read().strip() |
|
82 | source = fh.read().strip() | |
78 |
|
83 | |||
79 | m = re.search('version = b"(.*)"', source) |
|
84 | m = re.search('version = b"(.*)"', source) | |
80 | return m.group(1) |
|
85 | return m.group(1) | |
81 |
|
86 | |||
82 |
|
87 | |||
83 | def ensure_vc90_merge_modules(build_dir): |
|
88 | def ensure_vc90_merge_modules(build_dir): | |
84 | x86 = ( |
|
89 | x86 = ( | |
85 | download_entry( |
|
90 | download_entry( | |
86 | 'vc9-crt-x86-msm', |
|
91 | 'vc9-crt-x86-msm', | |
87 | build_dir, |
|
92 | build_dir, | |
88 | local_name='microsoft.vcxx.crt.x86_msm.msm', |
|
93 | local_name='microsoft.vcxx.crt.x86_msm.msm', | |
89 | )[0], |
|
94 | )[0], | |
90 | download_entry( |
|
95 | download_entry( | |
91 | 'vc9-crt-x86-msm-policy', |
|
96 | 'vc9-crt-x86-msm-policy', | |
92 | build_dir, |
|
97 | build_dir, | |
93 | local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm', |
|
98 | local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm', | |
94 | )[0], |
|
99 | )[0], | |
95 | ) |
|
100 | ) | |
96 |
|
101 | |||
97 | x64 = ( |
|
102 | x64 = ( | |
98 | download_entry( |
|
103 | download_entry( | |
99 | 'vc9-crt-x64-msm', |
|
104 | 'vc9-crt-x64-msm', | |
100 | build_dir, |
|
105 | build_dir, | |
101 | local_name='microsoft.vcxx.crt.x64_msm.msm', |
|
106 | local_name='microsoft.vcxx.crt.x64_msm.msm', | |
102 | )[0], |
|
107 | )[0], | |
103 | download_entry( |
|
108 | download_entry( | |
104 | 'vc9-crt-x64-msm-policy', |
|
109 | 'vc9-crt-x64-msm-policy', | |
105 | build_dir, |
|
110 | build_dir, | |
106 | local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm', |
|
111 | local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm', | |
107 | )[0], |
|
112 | )[0], | |
108 | ) |
|
113 | ) | |
109 | return { |
|
114 | return { | |
110 | 'x86': x86, |
|
115 | 'x86': x86, | |
111 | 'x64': x64, |
|
116 | 'x64': x64, | |
112 | } |
|
117 | } | |
113 |
|
118 | |||
114 |
|
119 | |||
115 | def run_candle(wix, cwd, wxs, source_dir, defines=None): |
|
120 | def run_candle(wix, cwd, wxs, source_dir, defines=None): | |
116 | args = [ |
|
121 | args = [ | |
117 | str(wix / 'candle.exe'), |
|
122 | str(wix / 'candle.exe'), | |
118 | '-nologo', |
|
123 | '-nologo', | |
119 | str(wxs), |
|
124 | str(wxs), | |
120 | '-dSourceDir=%s' % source_dir, |
|
125 | '-dSourceDir=%s' % source_dir, | |
121 | ] |
|
126 | ] | |
122 |
|
127 | |||
123 | if defines: |
|
128 | if defines: | |
124 | args.extend('-d%s=%s' % define for define in sorted(defines.items())) |
|
129 | args.extend('-d%s=%s' % define for define in sorted(defines.items())) | |
125 |
|
130 | |||
126 | subprocess.run(args, cwd=str(cwd), check=True) |
|
131 | subprocess.run(args, cwd=str(cwd), check=True) | |
127 |
|
132 | |||
128 |
|
133 | |||
129 | def make_files_xml(staging_dir: pathlib.Path, is_x64) -> str: |
|
134 | def make_files_xml(staging_dir: pathlib.Path, is_x64) -> str: | |
130 | """Create XML string listing every file to be installed.""" |
|
135 | """Create XML string listing every file to be installed.""" | |
131 |
|
136 | |||
132 | # We derive GUIDs from a deterministic file path identifier. |
|
137 | # We derive GUIDs from a deterministic file path identifier. | |
133 | # We shoehorn the name into something that looks like a URL because |
|
138 | # We shoehorn the name into something that looks like a URL because | |
134 | # the UUID namespaces are supposed to work that way (even though |
|
139 | # the UUID namespaces are supposed to work that way (even though | |
135 | # the input data probably is never validated). |
|
140 | # the input data probably is never validated). | |
136 |
|
141 | |||
137 | doc = xml.dom.minidom.parseString( |
|
142 | doc = xml.dom.minidom.parseString( | |
138 | '<?xml version="1.0" encoding="utf-8"?>' |
|
143 | '<?xml version="1.0" encoding="utf-8"?>' | |
139 | '<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">' |
|
144 | '<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">' | |
140 | '</Wix>' |
|
145 | '</Wix>' | |
141 | ) |
|
146 | ) | |
142 |
|
147 | |||
143 | # Assemble the install layout by directory. This makes it easier to |
|
148 | # Assemble the install layout by directory. This makes it easier to | |
144 | # emit XML, since each directory has separate entities. |
|
149 | # emit XML, since each directory has separate entities. | |
145 | manifest = collections.defaultdict(dict) |
|
150 | manifest = collections.defaultdict(dict) | |
146 |
|
151 | |||
147 | for root, dirs, files in os.walk(staging_dir): |
|
152 | for root, dirs, files in os.walk(staging_dir): | |
148 | dirs.sort() |
|
153 | dirs.sort() | |
149 |
|
154 | |||
150 | root = pathlib.Path(root) |
|
155 | root = pathlib.Path(root) | |
151 | rel_dir = root.relative_to(staging_dir) |
|
156 | rel_dir = root.relative_to(staging_dir) | |
152 |
|
157 | |||
153 | for i in range(len(rel_dir.parts)): |
|
158 | for i in range(len(rel_dir.parts)): | |
154 | parent = '/'.join(rel_dir.parts[0 : i + 1]) |
|
159 | parent = '/'.join(rel_dir.parts[0 : i + 1]) | |
155 | manifest.setdefault(parent, {}) |
|
160 | manifest.setdefault(parent, {}) | |
156 |
|
161 | |||
157 | for f in sorted(files): |
|
162 | for f in sorted(files): | |
158 | full = root / f |
|
163 | full = root / f | |
159 | manifest[str(rel_dir).replace('\\', '/')][full.name] = full |
|
164 | manifest[str(rel_dir).replace('\\', '/')][full.name] = full | |
160 |
|
165 | |||
161 | component_groups = collections.defaultdict(list) |
|
166 | component_groups = collections.defaultdict(list) | |
162 |
|
167 | |||
163 | # Now emit a <Fragment> for each directory. |
|
168 | # Now emit a <Fragment> for each directory. | |
164 | # Each directory is composed of a <DirectoryRef> pointing to its parent |
|
169 | # Each directory is composed of a <DirectoryRef> pointing to its parent | |
165 | # and defines child <Directory>'s and a <Component> with all the files. |
|
170 | # and defines child <Directory>'s and a <Component> with all the files. | |
166 | for dir_name, entries in sorted(manifest.items()): |
|
171 | for dir_name, entries in sorted(manifest.items()): | |
167 | # The directory id is derived from the path. But the root directory |
|
172 | # The directory id is derived from the path. But the root directory | |
168 | # is special. |
|
173 | # is special. | |
169 | if dir_name == '.': |
|
174 | if dir_name == '.': | |
170 | parent_directory_id = 'INSTALLDIR' |
|
175 | parent_directory_id = 'INSTALLDIR' | |
171 | else: |
|
176 | else: | |
172 | parent_directory_id = 'hg.dir.%s' % dir_name.replace( |
|
177 | parent_directory_id = 'hg.dir.%s' % dir_name.replace( | |
173 | '/', '.' |
|
178 | '/', '.' | |
174 | ).replace('-', '_') |
|
179 | ).replace('-', '_') | |
175 |
|
180 | |||
176 | fragment = doc.createElement('Fragment') |
|
181 | fragment = doc.createElement('Fragment') | |
177 | directory_ref = doc.createElement('DirectoryRef') |
|
182 | directory_ref = doc.createElement('DirectoryRef') | |
178 | directory_ref.setAttribute('Id', parent_directory_id) |
|
183 | directory_ref.setAttribute('Id', parent_directory_id) | |
179 |
|
184 | |||
180 | # Add <Directory> entries for immediate children directories. |
|
185 | # Add <Directory> entries for immediate children directories. | |
181 | for possible_child in sorted(manifest.keys()): |
|
186 | for possible_child in sorted(manifest.keys()): | |
182 | if ( |
|
187 | if ( | |
183 | dir_name == '.' |
|
188 | dir_name == '.' | |
184 | and '/' not in possible_child |
|
189 | and '/' not in possible_child | |
185 | and possible_child != '.' |
|
190 | and possible_child != '.' | |
186 | ): |
|
191 | ): | |
187 | child_directory_id = ('hg.dir.%s' % possible_child).replace( |
|
192 | child_directory_id = ('hg.dir.%s' % possible_child).replace( | |
188 | '-', '_' |
|
193 | '-', '_' | |
189 | ) |
|
194 | ) | |
190 | name = possible_child |
|
195 | name = possible_child | |
191 | else: |
|
196 | else: | |
192 | if not possible_child.startswith('%s/' % dir_name): |
|
197 | if not possible_child.startswith('%s/' % dir_name): | |
193 | continue |
|
198 | continue | |
194 | name = possible_child[len(dir_name) + 1 :] |
|
199 | name = possible_child[len(dir_name) + 1 :] | |
195 | if '/' in name: |
|
200 | if '/' in name: | |
196 | continue |
|
201 | continue | |
197 |
|
202 | |||
198 | child_directory_id = 'hg.dir.%s' % possible_child.replace( |
|
203 | child_directory_id = 'hg.dir.%s' % possible_child.replace( | |
199 | '/', '.' |
|
204 | '/', '.' | |
200 | ).replace('-', '_') |
|
205 | ).replace('-', '_') | |
201 |
|
206 | |||
202 | directory = doc.createElement('Directory') |
|
207 | directory = doc.createElement('Directory') | |
203 | directory.setAttribute('Id', child_directory_id) |
|
208 | directory.setAttribute('Id', child_directory_id) | |
204 | directory.setAttribute('Name', name) |
|
209 | directory.setAttribute('Name', name) | |
205 | directory_ref.appendChild(directory) |
|
210 | directory_ref.appendChild(directory) | |
206 |
|
211 | |||
207 | # Add <Component>s for files in this directory. |
|
212 | # Add <Component>s for files in this directory. | |
208 | for rel, source_path in sorted(entries.items()): |
|
213 | for rel, source_path in sorted(entries.items()): | |
209 | if dir_name == '.': |
|
214 | if dir_name == '.': | |
210 | full_rel = rel |
|
215 | full_rel = rel | |
211 | else: |
|
216 | else: | |
212 | full_rel = '%s/%s' % (dir_name, rel) |
|
217 | full_rel = '%s/%s' % (dir_name, rel) | |
213 |
|
218 | |||
214 | component_unique_id = ( |
|
219 | component_unique_id = ( | |
215 | 'https://www.mercurial-scm.org/wix-installer/0/component/%s' |
|
220 | 'https://www.mercurial-scm.org/wix-installer/0/component/%s' | |
216 | % full_rel |
|
221 | % full_rel | |
217 | ) |
|
222 | ) | |
218 | component_guid = uuid.uuid5(uuid.NAMESPACE_URL, component_unique_id) |
|
223 | component_guid = uuid.uuid5(uuid.NAMESPACE_URL, component_unique_id) | |
219 | component_id = 'hg.component.%s' % str(component_guid).replace( |
|
224 | component_id = 'hg.component.%s' % str(component_guid).replace( | |
220 | '-', '_' |
|
225 | '-', '_' | |
221 | ) |
|
226 | ) | |
222 |
|
227 | |||
223 | component = doc.createElement('Component') |
|
228 | component = doc.createElement('Component') | |
224 |
|
229 | |||
225 | component.setAttribute('Id', component_id) |
|
230 | component.setAttribute('Id', component_id) | |
226 | component.setAttribute('Guid', str(component_guid).upper()) |
|
231 | component.setAttribute('Guid', str(component_guid).upper()) | |
227 | component.setAttribute('Win64', 'yes' if is_x64 else 'no') |
|
232 | component.setAttribute('Win64', 'yes' if is_x64 else 'no') | |
228 |
|
233 | |||
229 | # Assign this component to a top-level group. |
|
234 | # Assign this component to a top-level group. | |
230 | if dir_name == '.': |
|
235 | if dir_name == '.': | |
231 | component_groups['ROOT'].append(component_id) |
|
236 | component_groups['ROOT'].append(component_id) | |
232 | elif '/' in dir_name: |
|
237 | elif '/' in dir_name: | |
233 | component_groups[dir_name[0 : dir_name.index('/')]].append( |
|
238 | component_groups[dir_name[0 : dir_name.index('/')]].append( | |
234 | component_id |
|
239 | component_id | |
235 | ) |
|
240 | ) | |
236 | else: |
|
241 | else: | |
237 | component_groups[dir_name].append(component_id) |
|
242 | component_groups[dir_name].append(component_id) | |
238 |
|
243 | |||
239 | unique_id = ( |
|
244 | unique_id = ( | |
240 | 'https://www.mercurial-scm.org/wix-installer/0/%s' % full_rel |
|
245 | 'https://www.mercurial-scm.org/wix-installer/0/%s' % full_rel | |
241 | ) |
|
246 | ) | |
242 | file_guid = uuid.uuid5(uuid.NAMESPACE_URL, unique_id) |
|
247 | file_guid = uuid.uuid5(uuid.NAMESPACE_URL, unique_id) | |
243 |
|
248 | |||
244 | # IDs have length limits. So use GUID to derive them. |
|
249 | # IDs have length limits. So use GUID to derive them. | |
245 | file_guid_normalized = str(file_guid).replace('-', '_') |
|
250 | file_guid_normalized = str(file_guid).replace('-', '_') | |
246 | file_id = 'hg.file.%s' % file_guid_normalized |
|
251 | file_id = 'hg.file.%s' % file_guid_normalized | |
247 |
|
252 | |||
248 | file_element = doc.createElement('File') |
|
253 | file_element = doc.createElement('File') | |
249 | file_element.setAttribute('Id', file_id) |
|
254 | file_element.setAttribute('Id', file_id) | |
250 | file_element.setAttribute('Source', str(source_path)) |
|
255 | file_element.setAttribute('Source', str(source_path)) | |
251 | file_element.setAttribute('KeyPath', 'yes') |
|
256 | file_element.setAttribute('KeyPath', 'yes') | |
252 | file_element.setAttribute('ReadOnly', 'yes') |
|
257 | file_element.setAttribute('ReadOnly', 'yes') | |
253 |
|
258 | |||
254 | component.appendChild(file_element) |
|
259 | component.appendChild(file_element) | |
255 | directory_ref.appendChild(component) |
|
260 | directory_ref.appendChild(component) | |
256 |
|
261 | |||
257 | fragment.appendChild(directory_ref) |
|
262 | fragment.appendChild(directory_ref) | |
258 | doc.documentElement.appendChild(fragment) |
|
263 | doc.documentElement.appendChild(fragment) | |
259 |
|
264 | |||
260 | for group, component_ids in sorted(component_groups.items()): |
|
265 | for group, component_ids in sorted(component_groups.items()): | |
261 | fragment = doc.createElement('Fragment') |
|
266 | fragment = doc.createElement('Fragment') | |
262 | component_group = doc.createElement('ComponentGroup') |
|
267 | component_group = doc.createElement('ComponentGroup') | |
263 | component_group.setAttribute('Id', 'hg.group.%s' % group) |
|
268 | component_group.setAttribute('Id', 'hg.group.%s' % group) | |
264 |
|
269 | |||
265 | for component_id in component_ids: |
|
270 | for component_id in component_ids: | |
266 | component_ref = doc.createElement('ComponentRef') |
|
271 | component_ref = doc.createElement('ComponentRef') | |
267 | component_ref.setAttribute('Id', component_id) |
|
272 | component_ref.setAttribute('Id', component_id) | |
268 | component_group.appendChild(component_ref) |
|
273 | component_group.appendChild(component_ref) | |
269 |
|
274 | |||
270 | fragment.appendChild(component_group) |
|
275 | fragment.appendChild(component_group) | |
271 | doc.documentElement.appendChild(fragment) |
|
276 | doc.documentElement.appendChild(fragment) | |
272 |
|
277 | |||
273 | # Add <Shortcut> to files that have it defined. |
|
278 | # Add <Shortcut> to files that have it defined. | |
274 | for file_id, metadata in sorted(SHORTCUTS.items()): |
|
279 | for file_id, metadata in sorted(SHORTCUTS.items()): | |
275 | els = doc.getElementsByTagName('File') |
|
280 | els = doc.getElementsByTagName('File') | |
276 | els = [el for el in els if el.getAttribute('Id') == file_id] |
|
281 | els = [el for el in els if el.getAttribute('Id') == file_id] | |
277 |
|
282 | |||
278 | if not els: |
|
283 | if not els: | |
279 | raise Exception('could not find File[Id=%s]' % file_id) |
|
284 | raise Exception('could not find File[Id=%s]' % file_id) | |
280 |
|
285 | |||
281 | for el in els: |
|
286 | for el in els: | |
282 | shortcut = doc.createElement('Shortcut') |
|
287 | shortcut = doc.createElement('Shortcut') | |
283 | shortcut.setAttribute('Id', 'hg.shortcut.%s' % file_id) |
|
288 | shortcut.setAttribute('Id', 'hg.shortcut.%s' % file_id) | |
284 | shortcut.setAttribute('Directory', 'ProgramMenuDir') |
|
289 | shortcut.setAttribute('Directory', 'ProgramMenuDir') | |
285 | shortcut.setAttribute('Icon', 'hgIcon.ico') |
|
290 | shortcut.setAttribute('Icon', 'hgIcon.ico') | |
286 | shortcut.setAttribute('IconIndex', '0') |
|
291 | shortcut.setAttribute('IconIndex', '0') | |
287 | shortcut.setAttribute('Advertise', 'yes') |
|
292 | shortcut.setAttribute('Advertise', 'yes') | |
288 | for k, v in sorted(metadata.items()): |
|
293 | for k, v in sorted(metadata.items()): | |
289 | shortcut.setAttribute(k, v) |
|
294 | shortcut.setAttribute(k, v) | |
290 |
|
295 | |||
291 | el.appendChild(shortcut) |
|
296 | el.appendChild(shortcut) | |
292 |
|
297 | |||
293 | return doc.toprettyxml() |
|
298 | return doc.toprettyxml() | |
294 |
|
299 | |||
295 |
|
300 | |||
296 | def build_installer_py2exe( |
|
301 | def build_installer_py2exe( | |
297 | source_dir: pathlib.Path, |
|
302 | source_dir: pathlib.Path, | |
298 | python_exe: pathlib.Path, |
|
303 | python_exe: pathlib.Path, | |
299 | msi_name='mercurial', |
|
304 | msi_name='mercurial', | |
300 | version=None, |
|
305 | version=None, | |
301 | extra_packages_script=None, |
|
306 | extra_packages_script=None, | |
302 | extra_wxs: typing.Optional[typing.Dict[str, str]] = None, |
|
307 | extra_wxs: typing.Optional[typing.Dict[str, str]] = None, | |
303 | extra_features: typing.Optional[typing.List[str]] = None, |
|
308 | extra_features: typing.Optional[typing.List[str]] = None, | |
304 | signing_info: typing.Optional[typing.Dict[str, str]] = None, |
|
309 | signing_info: typing.Optional[typing.Dict[str, str]] = None, | |
305 | ): |
|
310 | ): | |
306 | """Build a WiX MSI installer using py2exe. |
|
311 | """Build a WiX MSI installer using py2exe. | |
307 |
|
312 | |||
308 | ``source_dir`` is the path to the Mercurial source tree to use. |
|
313 | ``source_dir`` is the path to the Mercurial source tree to use. | |
309 | ``arch`` is the target architecture. either ``x86`` or ``x64``. |
|
314 | ``arch`` is the target architecture. either ``x86`` or ``x64``. | |
310 | ``python_exe`` is the path to the Python executable to use/bundle. |
|
315 | ``python_exe`` is the path to the Python executable to use/bundle. | |
311 | ``version`` is the Mercurial version string. If not defined, |
|
316 | ``version`` is the Mercurial version string. If not defined, | |
312 | ``mercurial/__version__.py`` will be consulted. |
|
317 | ``mercurial/__version__.py`` will be consulted. | |
313 | ``extra_packages_script`` is a command to be run to inject extra packages |
|
318 | ``extra_packages_script`` is a command to be run to inject extra packages | |
314 | into the py2exe binary. It should stage packages into the virtualenv and |
|
319 | into the py2exe binary. It should stage packages into the virtualenv and | |
315 | print a null byte followed by a newline-separated list of packages that |
|
320 | print a null byte followed by a newline-separated list of packages that | |
316 | should be included in the exe. |
|
321 | should be included in the exe. | |
317 | ``extra_wxs`` is a dict of {wxs_name: working_dir_for_wxs_build}. |
|
322 | ``extra_wxs`` is a dict of {wxs_name: working_dir_for_wxs_build}. | |
318 | ``extra_features`` is a list of additional named Features to include in |
|
323 | ``extra_features`` is a list of additional named Features to include in | |
319 | the build. These must match Feature names in one of the wxs scripts. |
|
324 | the build. These must match Feature names in one of the wxs scripts. | |
320 | """ |
|
325 | """ | |
321 | arch = 'x64' if r'\x64' in os.environ.get('LIB', '') else 'x86' |
|
326 | arch = 'x64' if r'\x64' in os.environ.get('LIB', '') else 'x86' | |
322 |
|
327 | |||
323 | hg_build_dir = source_dir / 'build' |
|
328 | hg_build_dir = source_dir / 'build' | |
324 |
|
329 | |||
325 | requirements_txt = ( |
|
330 | requirements_txt = ( | |
326 | source_dir / 'contrib' / 'packaging' / 'requirements-windows-py2.txt' |
|
331 | source_dir / 'contrib' / 'packaging' / 'requirements-windows-py2.txt' | |
327 | ) |
|
332 | ) | |
328 |
|
333 | |||
329 | build_py2exe( |
|
334 | build_py2exe( | |
330 | source_dir, |
|
335 | source_dir, | |
331 | hg_build_dir, |
|
336 | hg_build_dir, | |
332 | python_exe, |
|
337 | python_exe, | |
333 | 'wix', |
|
338 | 'wix', | |
334 | requirements_txt, |
|
339 | requirements_txt, | |
335 | extra_packages=EXTRA_PACKAGES, |
|
340 | extra_packages=EXTRA_PACKAGES, | |
336 | extra_packages_script=extra_packages_script, |
|
341 | extra_packages_script=extra_packages_script, | |
337 | extra_includes=EXTRA_INCLUDES, |
|
342 | extra_includes=EXTRA_INCLUDES, | |
338 | ) |
|
343 | ) | |
339 |
|
344 | |||
340 | build_dir = hg_build_dir / ('wix-%s' % arch) |
|
345 | build_dir = hg_build_dir / ('wix-%s' % arch) | |
341 | staging_dir = build_dir / 'stage' |
|
346 | staging_dir = build_dir / 'stage' | |
342 |
|
347 | |||
343 | build_dir.mkdir(exist_ok=True) |
|
348 | build_dir.mkdir(exist_ok=True) | |
344 |
|
349 | |||
345 | # Purge the staging directory for every build so packaging is pristine. |
|
350 | # Purge the staging directory for every build so packaging is pristine. | |
346 | if staging_dir.exists(): |
|
351 | if staging_dir.exists(): | |
347 | print('purging %s' % staging_dir) |
|
352 | print('purging %s' % staging_dir) | |
348 | shutil.rmtree(staging_dir) |
|
353 | shutil.rmtree(staging_dir) | |
349 |
|
354 | |||
350 | stage_install(source_dir, staging_dir, lower_case=True) |
|
355 | stage_install(source_dir, staging_dir, lower_case=True) | |
351 |
|
356 | |||
352 | # We also install some extra files. |
|
357 | # We also install some extra files. | |
353 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) |
|
358 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) | |
354 |
|
359 | |||
355 | # And remove some files we don't want. |
|
360 | # And remove some files we don't want. | |
356 | for f in STAGING_REMOVE_FILES: |
|
361 | for f in STAGING_REMOVE_FILES: | |
357 | p = staging_dir / f |
|
362 | p = staging_dir / f | |
358 | if p.exists(): |
|
363 | if p.exists(): | |
359 | print('removing %s' % p) |
|
364 | print('removing %s' % p) | |
360 | p.unlink() |
|
365 | p.unlink() | |
361 |
|
366 | |||
362 | return run_wix_packaging( |
|
367 | return run_wix_packaging( | |
363 | source_dir, |
|
368 | source_dir, | |
364 | build_dir, |
|
369 | build_dir, | |
365 | staging_dir, |
|
370 | staging_dir, | |
366 | arch, |
|
371 | arch, | |
367 | version=version, |
|
372 | version=version, | |
368 | python2=True, |
|
373 | python2=True, | |
369 | msi_name=msi_name, |
|
374 | msi_name=msi_name, | |
370 | suffix="-python2", |
|
375 | suffix="-python2", | |
371 | extra_wxs=extra_wxs, |
|
376 | extra_wxs=extra_wxs, | |
372 | extra_features=extra_features, |
|
377 | extra_features=extra_features, | |
373 | signing_info=signing_info, |
|
378 | signing_info=signing_info, | |
374 | ) |
|
379 | ) | |
375 |
|
380 | |||
376 |
|
381 | |||
377 | def build_installer_pyoxidizer( |
|
382 | def build_installer_pyoxidizer( | |
378 | source_dir: pathlib.Path, |
|
383 | source_dir: pathlib.Path, | |
379 | target_triple: str, |
|
384 | target_triple: str, | |
380 | msi_name='mercurial', |
|
385 | msi_name='mercurial', | |
381 | version=None, |
|
386 | version=None, | |
382 | extra_wxs: typing.Optional[typing.Dict[str, str]] = None, |
|
387 | extra_wxs: typing.Optional[typing.Dict[str, str]] = None, | |
383 | extra_features: typing.Optional[typing.List[str]] = None, |
|
388 | extra_features: typing.Optional[typing.List[str]] = None, | |
384 | signing_info: typing.Optional[typing.Dict[str, str]] = None, |
|
389 | signing_info: typing.Optional[typing.Dict[str, str]] = None, | |
|
390 | extra_pyoxidizer_vars=None, | |||
385 | ): |
|
391 | ): | |
386 | """Build a WiX MSI installer using PyOxidizer.""" |
|
392 | """Build a WiX MSI installer using PyOxidizer.""" | |
387 | hg_build_dir = source_dir / "build" |
|
393 | hg_build_dir = source_dir / "build" | |
388 | build_dir = hg_build_dir / ("wix-%s" % target_triple) |
|
394 | build_dir = hg_build_dir / ("wix-%s" % target_triple) | |
389 | staging_dir = build_dir / "stage" |
|
|||
390 |
|
||||
391 | arch = "x64" if "x86_64" in target_triple else "x86" |
|
|||
392 |
|
395 | |||
393 | build_dir.mkdir(parents=True, exist_ok=True) |
|
396 | build_dir.mkdir(parents=True, exist_ok=True) | |
394 | run_pyoxidizer(source_dir, build_dir, staging_dir, target_triple) |
|
397 | ||
|
398 | # Need to ensure docs HTML is built because this isn't done as part of | |||
|
399 | # `pip install Mercurial`. | |||
|
400 | build_docs_html(source_dir) | |||
|
401 | ||||
|
402 | build_vars = {} | |||
395 |
|
403 | |||
396 | # We also install some extra files. |
|
404 | if msi_name: | |
397 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) |
|
405 | build_vars["MSI_NAME"] = msi_name | |
|
406 | ||||
|
407 | if version: | |||
|
408 | build_vars["VERSION"] = version | |||
|
409 | ||||
|
410 | if extra_features: | |||
|
411 | build_vars["EXTRA_MSI_FEATURES"] = ";".join(extra_features) | |||
398 |
|
412 | |||
399 | # And remove some files we don't want. |
|
413 | if signing_info: | |
400 | for f in STAGING_REMOVE_FILES: |
|
414 | if signing_info["cert_path"]: | |
401 | p = staging_dir / f |
|
415 | build_vars["SIGNING_PFX_PATH"] = signing_info["cert_path"] | |
402 | if p.exists(): |
|
416 | if signing_info["cert_password"]: | |
403 | print('removing %s' % p) |
|
417 | build_vars["SIGNING_PFX_PASSWORD"] = signing_info["cert_password"] | |
404 | p.unlink() |
|
418 | if signing_info["subject_name"]: | |
|
419 | build_vars["SIGNING_SUBJECT_NAME"] = signing_info["subject_name"] | |||
|
420 | if signing_info["timestamp_url"]: | |||
|
421 | build_vars["TIME_STAMP_SERVER_URL"] = signing_info["timestamp_url"] | |||
405 |
|
422 | |||
406 | return run_wix_packaging( |
|
423 | if extra_pyoxidizer_vars: | |
|
424 | build_vars.update(json.loads(extra_pyoxidizer_vars)) | |||
|
425 | ||||
|
426 | if extra_wxs: | |||
|
427 | raise Exception( | |||
|
428 | "support for extra .wxs files has been temporarily dropped" | |||
|
429 | ) | |||
|
430 | ||||
|
431 | out_dir = run_pyoxidizer( | |||
407 | source_dir, |
|
432 | source_dir, | |
408 | build_dir, |
|
433 | build_dir, | |
409 |
|
|
434 | target_triple, | |
410 | arch, |
|
435 | build_vars=build_vars, | |
411 |
|
|
436 | target="msi", | |
412 | python2=False, |
|
|||
413 | msi_name=msi_name, |
|
|||
414 | extra_wxs=extra_wxs, |
|
|||
415 | extra_features=extra_features, |
|
|||
416 | signing_info=signing_info, |
|
|||
417 | ) |
|
437 | ) | |
418 |
|
438 | |||
|
439 | msi_dir = out_dir / "msi" | |||
|
440 | msi_files = [f for f in os.listdir(msi_dir) if f.endswith(".msi")] | |||
|
441 | ||||
|
442 | if len(msi_files) != 1: | |||
|
443 | raise Exception("expected exactly 1 .msi file; got %d" % len(msi_files)) | |||
|
444 | ||||
|
445 | msi_filename = msi_files[0] | |||
|
446 | ||||
|
447 | msi_path = msi_dir / msi_filename | |||
|
448 | dist_path = source_dir / "dist" / msi_filename | |||
|
449 | ||||
|
450 | dist_path.parent.mkdir(parents=True, exist_ok=True) | |||
|
451 | ||||
|
452 | shutil.copyfile(msi_path, dist_path) | |||
|
453 | ||||
|
454 | return { | |||
|
455 | "msi_path": dist_path, | |||
|
456 | } | |||
|
457 | ||||
419 |
|
458 | |||
420 | def run_wix_packaging( |
|
459 | def run_wix_packaging( | |
421 | source_dir: pathlib.Path, |
|
460 | source_dir: pathlib.Path, | |
422 | build_dir: pathlib.Path, |
|
461 | build_dir: pathlib.Path, | |
423 | staging_dir: pathlib.Path, |
|
462 | staging_dir: pathlib.Path, | |
424 | arch: str, |
|
463 | arch: str, | |
425 | version: str, |
|
464 | version: str, | |
426 | python2: bool, |
|
465 | python2: bool, | |
427 | msi_name: typing.Optional[str] = "mercurial", |
|
466 | msi_name: typing.Optional[str] = "mercurial", | |
428 | suffix: str = "", |
|
467 | suffix: str = "", | |
429 | extra_wxs: typing.Optional[typing.Dict[str, str]] = None, |
|
468 | extra_wxs: typing.Optional[typing.Dict[str, str]] = None, | |
430 | extra_features: typing.Optional[typing.List[str]] = None, |
|
469 | extra_features: typing.Optional[typing.List[str]] = None, | |
431 | signing_info: typing.Optional[typing.Dict[str, str]] = None, |
|
470 | signing_info: typing.Optional[typing.Dict[str, str]] = None, | |
432 | ): |
|
471 | ): | |
433 | """Invokes WiX to package up a built Mercurial. |
|
472 | """Invokes WiX to package up a built Mercurial. | |
434 |
|
473 | |||
435 | ``signing_info`` is a dict defining properties to facilitate signing the |
|
474 | ``signing_info`` is a dict defining properties to facilitate signing the | |
436 | installer. Recognized keys include ``name``, ``subject_name``, |
|
475 | installer. Recognized keys include ``name``, ``subject_name``, | |
437 | ``cert_path``, ``cert_password``, and ``timestamp_url``. If populated, |
|
476 | ``cert_path``, ``cert_password``, and ``timestamp_url``. If populated, | |
438 | we will sign both the hg.exe and the .msi using the signing credentials |
|
477 | we will sign both the hg.exe and the .msi using the signing credentials | |
439 | specified. |
|
478 | specified. | |
440 | """ |
|
479 | """ | |
441 |
|
480 | |||
442 | orig_version = version or find_version(source_dir) |
|
481 | orig_version = version or find_version(source_dir) | |
443 | version = normalize_windows_version(orig_version) |
|
482 | version = normalize_windows_version(orig_version) | |
444 | print('using version string: %s' % version) |
|
483 | print('using version string: %s' % version) | |
445 | if version != orig_version: |
|
484 | if version != orig_version: | |
446 | print('(normalized from: %s)' % orig_version) |
|
485 | print('(normalized from: %s)' % orig_version) | |
447 |
|
486 | |||
448 | if signing_info: |
|
487 | if signing_info: | |
449 | sign_with_signtool( |
|
488 | sign_with_signtool( | |
450 | staging_dir / "hg.exe", |
|
489 | staging_dir / "hg.exe", | |
451 | "%s %s" % (signing_info["name"], version), |
|
490 | "%s %s" % (signing_info["name"], version), | |
452 | subject_name=signing_info["subject_name"], |
|
491 | subject_name=signing_info["subject_name"], | |
453 | cert_path=signing_info["cert_path"], |
|
492 | cert_path=signing_info["cert_path"], | |
454 | cert_password=signing_info["cert_password"], |
|
493 | cert_password=signing_info["cert_password"], | |
455 | timestamp_url=signing_info["timestamp_url"], |
|
494 | timestamp_url=signing_info["timestamp_url"], | |
456 | ) |
|
495 | ) | |
457 |
|
496 | |||
458 | wix_dir = source_dir / 'contrib' / 'packaging' / 'wix' |
|
497 | wix_dir = source_dir / 'contrib' / 'packaging' / 'wix' | |
459 |
|
498 | |||
460 | wix_pkg, wix_entry = download_entry('wix', build_dir) |
|
499 | wix_pkg, wix_entry = download_entry('wix', build_dir) | |
461 | wix_path = build_dir / ('wix-%s' % wix_entry['version']) |
|
500 | wix_path = build_dir / ('wix-%s' % wix_entry['version']) | |
462 |
|
501 | |||
463 | if not wix_path.exists(): |
|
502 | if not wix_path.exists(): | |
464 | extract_zip_to_directory(wix_pkg, wix_path) |
|
503 | extract_zip_to_directory(wix_pkg, wix_path) | |
465 |
|
504 | |||
466 | if python2: |
|
505 | if python2: | |
467 | ensure_vc90_merge_modules(build_dir) |
|
506 | ensure_vc90_merge_modules(build_dir) | |
468 |
|
507 | |||
469 | source_build_rel = pathlib.Path(os.path.relpath(source_dir, build_dir)) |
|
508 | source_build_rel = pathlib.Path(os.path.relpath(source_dir, build_dir)) | |
470 |
|
509 | |||
471 | defines = {'Platform': arch} |
|
510 | defines = {'Platform': arch} | |
472 |
|
511 | |||
473 | # Derive a .wxs file with the staged files. |
|
512 | # Derive a .wxs file with the staged files. | |
474 | manifest_wxs = build_dir / 'stage.wxs' |
|
513 | manifest_wxs = build_dir / 'stage.wxs' | |
475 | with manifest_wxs.open('w', encoding='utf-8') as fh: |
|
514 | with manifest_wxs.open('w', encoding='utf-8') as fh: | |
476 | fh.write(make_files_xml(staging_dir, is_x64=arch == 'x64')) |
|
515 | fh.write(make_files_xml(staging_dir, is_x64=arch == 'x64')) | |
477 |
|
516 | |||
478 | run_candle(wix_path, build_dir, manifest_wxs, staging_dir, defines=defines) |
|
517 | run_candle(wix_path, build_dir, manifest_wxs, staging_dir, defines=defines) | |
479 |
|
518 | |||
480 | for source, rel_path in sorted((extra_wxs or {}).items()): |
|
519 | for source, rel_path in sorted((extra_wxs or {}).items()): | |
481 | run_candle(wix_path, build_dir, source, rel_path, defines=defines) |
|
520 | run_candle(wix_path, build_dir, source, rel_path, defines=defines) | |
482 |
|
521 | |||
483 | source = wix_dir / 'mercurial.wxs' |
|
522 | source = wix_dir / 'mercurial.wxs' | |
484 | defines['Version'] = version |
|
523 | defines['Version'] = version | |
485 | defines['Comments'] = 'Installs Mercurial version %s' % version |
|
524 | defines['Comments'] = 'Installs Mercurial version %s' % version | |
486 |
|
525 | |||
487 | if python2: |
|
526 | if python2: | |
488 | defines["PythonVersion"] = "2" |
|
527 | defines["PythonVersion"] = "2" | |
489 | defines['VCRedistSrcDir'] = str(build_dir) |
|
528 | defines['VCRedistSrcDir'] = str(build_dir) | |
490 | else: |
|
529 | else: | |
491 | defines["PythonVersion"] = "3" |
|
530 | defines["PythonVersion"] = "3" | |
492 |
|
531 | |||
493 | if (staging_dir / "lib").exists(): |
|
532 | if (staging_dir / "lib").exists(): | |
494 | defines["MercurialHasLib"] = "1" |
|
533 | defines["MercurialHasLib"] = "1" | |
495 |
|
534 | |||
496 | if extra_features: |
|
535 | if extra_features: | |
497 | assert all(';' not in f for f in extra_features) |
|
536 | assert all(';' not in f for f in extra_features) | |
498 | defines['MercurialExtraFeatures'] = ';'.join(extra_features) |
|
537 | defines['MercurialExtraFeatures'] = ';'.join(extra_features) | |
499 |
|
538 | |||
500 | run_candle(wix_path, build_dir, source, source_build_rel, defines=defines) |
|
539 | run_candle(wix_path, build_dir, source, source_build_rel, defines=defines) | |
501 |
|
540 | |||
502 | msi_path = ( |
|
541 | msi_path = ( | |
503 | source_dir |
|
542 | source_dir | |
504 | / 'dist' |
|
543 | / 'dist' | |
505 | / ('%s-%s-%s%s.msi' % (msi_name, orig_version, arch, suffix)) |
|
544 | / ('%s-%s-%s%s.msi' % (msi_name, orig_version, arch, suffix)) | |
506 | ) |
|
545 | ) | |
507 |
|
546 | |||
508 | args = [ |
|
547 | args = [ | |
509 | str(wix_path / 'light.exe'), |
|
548 | str(wix_path / 'light.exe'), | |
510 | '-nologo', |
|
549 | '-nologo', | |
511 | '-ext', |
|
550 | '-ext', | |
512 | 'WixUIExtension', |
|
551 | 'WixUIExtension', | |
513 | '-sw1076', |
|
552 | '-sw1076', | |
514 | '-spdb', |
|
553 | '-spdb', | |
515 | '-o', |
|
554 | '-o', | |
516 | str(msi_path), |
|
555 | str(msi_path), | |
517 | ] |
|
556 | ] | |
518 |
|
557 | |||
519 | for source, rel_path in sorted((extra_wxs or {}).items()): |
|
558 | for source, rel_path in sorted((extra_wxs or {}).items()): | |
520 | assert source.endswith('.wxs') |
|
559 | assert source.endswith('.wxs') | |
521 | source = os.path.basename(source) |
|
560 | source = os.path.basename(source) | |
522 | args.append(str(build_dir / ('%s.wixobj' % source[:-4]))) |
|
561 | args.append(str(build_dir / ('%s.wixobj' % source[:-4]))) | |
523 |
|
562 | |||
524 | args.extend( |
|
563 | args.extend( | |
525 | [ |
|
564 | [ | |
526 | str(build_dir / 'stage.wixobj'), |
|
565 | str(build_dir / 'stage.wixobj'), | |
527 | str(build_dir / 'mercurial.wixobj'), |
|
566 | str(build_dir / 'mercurial.wixobj'), | |
528 | ] |
|
567 | ] | |
529 | ) |
|
568 | ) | |
530 |
|
569 | |||
531 | subprocess.run(args, cwd=str(source_dir), check=True) |
|
570 | subprocess.run(args, cwd=str(source_dir), check=True) | |
532 |
|
571 | |||
533 | print('%s created' % msi_path) |
|
572 | print('%s created' % msi_path) | |
534 |
|
573 | |||
535 | if signing_info: |
|
574 | if signing_info: | |
536 | sign_with_signtool( |
|
575 | sign_with_signtool( | |
537 | msi_path, |
|
576 | msi_path, | |
538 | "%s %s" % (signing_info["name"], version), |
|
577 | "%s %s" % (signing_info["name"], version), | |
539 | subject_name=signing_info["subject_name"], |
|
578 | subject_name=signing_info["subject_name"], | |
540 | cert_path=signing_info["cert_path"], |
|
579 | cert_path=signing_info["cert_path"], | |
541 | cert_password=signing_info["cert_password"], |
|
580 | cert_password=signing_info["cert_password"], | |
542 | timestamp_url=signing_info["timestamp_url"], |
|
581 | timestamp_url=signing_info["timestamp_url"], | |
543 | ) |
|
582 | ) | |
544 |
|
583 | |||
545 | return { |
|
584 | return { | |
546 | 'msi_path': msi_path, |
|
585 | 'msi_path': msi_path, | |
547 | } |
|
586 | } |
@@ -1,153 +1,157 b'' | |||||
1 | <?xml version='1.0' encoding='windows-1252'?> |
|
1 | <?xml version='1.0' encoding='windows-1252'?> | |
2 | <Wix xmlns='http://schemas.microsoft.com/wix/2006/wi'> |
|
2 | <Wix xmlns='http://schemas.microsoft.com/wix/2006/wi'> | |
3 |
|
3 | |||
4 | <!-- Copyright 2010 Steve Borho <steve@borho.org> |
|
4 | <!-- Copyright 2010 Steve Borho <steve@borho.org> | |
5 |
|
5 | |||
6 | This software may be used and distributed according to the terms of the |
|
6 | This software may be used and distributed according to the terms of the | |
7 | GNU General Public License version 2 or any later version. --> |
|
7 | GNU General Public License version 2 or any later version. --> | |
8 |
|
8 | |||
9 | <?include guids.wxi ?> |
|
9 | <?include guids.wxi ?> | |
10 | <?include defines.wxi ?> |
|
10 | <?include defines.wxi ?> | |
11 |
|
11 | |||
12 | <?if $(var.Platform) = "x64" ?> |
|
12 | <?if $(var.Platform) = "x64" ?> | |
13 | <?define PFolder = ProgramFiles64Folder ?> |
|
13 | <?define PFolder = ProgramFiles64Folder ?> | |
14 | <?else?> |
|
14 | <?else?> | |
15 | <?define PFolder = ProgramFilesFolder ?> |
|
15 | <?define PFolder = ProgramFilesFolder ?> | |
16 | <?endif?> |
|
16 | <?endif?> | |
17 |
|
17 | |||
18 | <Product Id='*' |
|
18 | <Product Id='*' | |
19 | Name='Mercurial $(var.Version) ($(var.Platform))' |
|
19 | Name='Mercurial $(var.Version) ($(var.Platform))' | |
20 | UpgradeCode='$(var.ProductUpgradeCode)' |
|
20 | UpgradeCode='$(var.ProductUpgradeCode)' | |
21 | Language='1033' Codepage='1252' Version='$(var.Version)' |
|
21 | Language='1033' Codepage='1252' Version='$(var.Version)' | |
22 | Manufacturer='Olivia Mackall and others'> |
|
22 | Manufacturer='Olivia Mackall and others'> | |
23 |
|
23 | |||
24 | <Package Id='*' |
|
24 | <Package Id='*' | |
25 | Keywords='Installer' |
|
25 | Keywords='Installer' | |
26 | Description="Mercurial distributed SCM (version $(var.Version))" |
|
26 | Description="Mercurial distributed SCM (version $(var.Version))" | |
27 | Comments='$(var.Comments)' |
|
27 | Comments='$(var.Comments)' | |
28 | Platform='$(var.Platform)' |
|
28 | Platform='$(var.Platform)' | |
29 | Manufacturer='Olivia Mackall and others' |
|
29 | Manufacturer='Olivia Mackall and others' | |
30 | InstallerVersion='300' Languages='1033' Compressed='yes' SummaryCodepage='1252' /> |
|
30 | InstallerVersion='300' Languages='1033' Compressed='yes' SummaryCodepage='1252' /> | |
31 |
|
31 | |||
32 | <Media Id='1' Cabinet='mercurial.cab' EmbedCab='yes' DiskPrompt='CD-ROM #1' |
|
32 | <Media Id='1' Cabinet='mercurial.cab' EmbedCab='yes' DiskPrompt='CD-ROM #1' | |
33 | CompressionLevel='high' /> |
|
33 | CompressionLevel='high' /> | |
34 | <Property Id='DiskPrompt' Value="Mercurial $(var.Version) Installation [1]" /> |
|
34 | <Property Id='DiskPrompt' Value="Mercurial $(var.Version) Installation [1]" /> | |
35 |
|
35 | |||
36 | <Condition Message='Mercurial MSI installers require Windows XP or higher'> |
|
36 | <Condition Message='Mercurial MSI installers require Windows XP or higher'> | |
37 | VersionNT >= 501 |
|
37 | VersionNT >= 501 | |
38 | </Condition> |
|
38 | </Condition> | |
39 |
|
39 | |||
40 | <Property Id="INSTALLDIR"> |
|
40 | <Property Id="INSTALLDIR"> | |
41 | <ComponentSearch Id='SearchForMainExecutableComponent' |
|
41 | <ComponentSearch Id='SearchForMainExecutableComponent' | |
42 | Guid='$(var.ComponentMainExecutableGUID)' |
|
42 | Guid='$(var.ComponentMainExecutableGUID)' | |
43 | Type='directory' /> |
|
43 | Type='directory' /> | |
44 | </Property> |
|
44 | </Property> | |
45 |
|
45 | |||
46 | <!--Property Id='ARPCOMMENTS'>any comments</Property--> |
|
46 | <!--Property Id='ARPCOMMENTS'>any comments</Property--> | |
47 | <Property Id='ARPCONTACT'>mercurial@mercurial-scm.org</Property> |
|
47 | <Property Id='ARPCONTACT'>mercurial@mercurial-scm.org</Property> | |
48 | <Property Id='ARPHELPLINK'>https://mercurial-scm.org/wiki/</Property> |
|
48 | <Property Id='ARPHELPLINK'>https://mercurial-scm.org/wiki/</Property> | |
49 | <Property Id='ARPURLINFOABOUT'>https://mercurial-scm.org/about/</Property> |
|
49 | <Property Id='ARPURLINFOABOUT'>https://mercurial-scm.org/about/</Property> | |
50 | <Property Id='ARPURLUPDATEINFO'>https://mercurial-scm.org/downloads/</Property> |
|
50 | <Property Id='ARPURLUPDATEINFO'>https://mercurial-scm.org/downloads/</Property> | |
51 | <Property Id='ARPHELPTELEPHONE'>https://mercurial-scm.org/wiki/Support</Property> |
|
51 | <Property Id='ARPHELPTELEPHONE'>https://mercurial-scm.org/wiki/Support</Property> | |
52 | <Property Id='ARPPRODUCTICON'>hgIcon.ico</Property> |
|
52 | <Property Id='ARPPRODUCTICON'>hgIcon.ico</Property> | |
53 |
|
53 | |||
54 | <Property Id='INSTALLEDMERCURIALPRODUCTS' Secure='yes'></Property> |
|
54 | <Property Id='INSTALLEDMERCURIALPRODUCTS' Secure='yes'></Property> | |
55 | <Property Id='REINSTALLMODE'>amus</Property> |
|
55 | <Property Id='REINSTALLMODE'>amus</Property> | |
56 |
|
56 | |||
57 | <!--Auto-accept the license page--> |
|
57 | <!--Auto-accept the license page--> | |
58 | <Property Id='LicenseAccepted'>1</Property> |
|
58 | <Property Id='LicenseAccepted'>1</Property> | |
59 |
|
59 | |||
60 | <Directory Id='TARGETDIR' Name='SourceDir'> |
|
60 | <Directory Id='TARGETDIR' Name='SourceDir'> | |
61 | <Directory Id='$(var.PFolder)' Name='PFiles'> |
|
61 | <Directory Id='$(var.PFolder)' Name='PFiles'> | |
62 | <Directory Id='INSTALLDIR' Name='Mercurial'> |
|
62 | <Directory Id='INSTALLDIR' Name='Mercurial'> | |
63 | <Component Id='MainExecutable' Guid='$(var.ComponentMainExecutableGUID)' Win64='$(var.IsX64)'> |
|
63 | <Component Id='MainExecutable' Guid='$(var.ComponentMainExecutableGUID)' Win64='$(var.IsX64)'> | |
64 | <CreateFolder /> |
|
64 | <CreateFolder /> | |
65 | <Environment Id="Environment" Name="PATH" Part="last" System="yes" |
|
65 | <Environment Id="Environment" Name="PATH" Part="last" System="yes" | |
66 | Permanent="no" Value="[INSTALLDIR]" Action="set" /> |
|
66 | Permanent="no" Value="[INSTALLDIR]" Action="set" /> | |
67 | </Component> |
|
67 | </Component> | |
68 | </Directory> |
|
68 | </Directory> | |
69 | </Directory> |
|
69 | </Directory> | |
70 |
|
70 | |||
71 | <Directory Id="ProgramMenuFolder" Name="Programs"> |
|
71 | <Directory Id="ProgramMenuFolder" Name="Programs"> | |
72 | <Directory Id="ProgramMenuDir" Name="Mercurial $(var.Version)"> |
|
72 | <Directory Id="ProgramMenuDir" Name="Mercurial $(var.Version)"> | |
73 | <Component Id="ProgramMenuDir" Guid="$(var.ProgramMenuDir.guid)" Win64='$(var.IsX64)'> |
|
73 | <Component Id="ProgramMenuDir" Guid="$(var.ProgramMenuDir.guid)" Win64='$(var.IsX64)'> | |
74 | <RemoveFolder Id='ProgramMenuDir' On='uninstall' /> |
|
74 | <RemoveFolder Id='ProgramMenuDir' On='uninstall' /> | |
75 | <RegistryValue Root='HKCU' Key='Software\Mercurial\InstallDir' Type='string' |
|
75 | <RegistryValue Root='HKCU' Key='Software\Mercurial\InstallDir' Type='string' | |
76 | Value='[INSTALLDIR]' KeyPath='yes' /> |
|
76 | Value='[INSTALLDIR]' KeyPath='yes' /> | |
77 | <Shortcut Id='UrlShortcut' Directory='ProgramMenuDir' Name='Mercurial Web Site' |
|
77 | <Shortcut Id='UrlShortcut' Directory='ProgramMenuDir' Name='Mercurial Web Site' | |
78 | Target='[ARPHELPLINK]' Icon="hgIcon.ico" IconIndex='0' /> |
|
78 | Target='[ARPHELPLINK]' Icon="hgIcon.ico" IconIndex='0' /> | |
79 | </Component> |
|
79 | </Component> | |
80 | </Directory> |
|
80 | </Directory> | |
81 | </Directory> |
|
81 | </Directory> | |
82 |
|
82 | |||
83 | <!-- Install VCRedist merge modules on Python 2. On Python 3, |
|
83 | <!-- Install VCRedist merge modules on Python 2. On Python 3, | |
84 | vcruntimeXXX.dll is part of the install layout and gets picked up |
|
84 | vcruntimeXXX.dll is part of the install layout and gets picked up | |
85 | as a regular file. --> |
|
85 | as a regular file. --> | |
86 | <?if $(var.PythonVersion) = "2" ?> |
|
86 | <?if $(var.PythonVersion) = "2" ?> | |
87 | <?if $(var.Platform) = "x86" ?> |
|
87 | <?if $(var.Platform) = "x86" ?> | |
88 | <Merge Id='VCRuntime' DiskId='1' Language='1033' |
|
88 | <Merge Id='VCRuntime' DiskId='1' Language='1033' | |
89 | SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x86_msm.msm' /> |
|
89 | SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x86_msm.msm' /> | |
90 | <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033' |
|
90 | <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033' | |
91 | SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x86_msm.msm' /> |
|
91 | SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x86_msm.msm' /> | |
92 | <?else?> |
|
92 | <?else?> | |
93 | <Merge Id='VCRuntime' DiskId='1' Language='1033' |
|
93 | <Merge Id='VCRuntime' DiskId='1' Language='1033' | |
94 | SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x64_msm.msm' /> |
|
94 | SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x64_msm.msm' /> | |
95 | <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033' |
|
95 | <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033' | |
96 | SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x64_msm.msm' /> |
|
96 | SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x64_msm.msm' /> | |
97 | <?endif?> |
|
97 | <?endif?> | |
98 | <?endif?> |
|
98 | <?endif?> | |
99 | </Directory> |
|
99 | </Directory> | |
100 |
|
100 | |||
101 | <Feature Id='Complete' Title='Mercurial' Description='The complete package' |
|
101 | <Feature Id='Complete' Title='Mercurial' Description='The complete package' | |
102 | Display='expand' Level='1' ConfigurableDirectory='INSTALLDIR' > |
|
102 | Display='expand' Level='1' ConfigurableDirectory='INSTALLDIR' > | |
103 | <Feature Id='MainProgram' Title='Program' Description='Mercurial command line app' |
|
103 | <Feature Id='MainProgram' Title='Program' Description='Mercurial command line app' | |
104 | Level='1' Absent='disallow' > |
|
104 | Level='1' Absent='disallow' > | |
105 | <ComponentRef Id='MainExecutable' /> |
|
105 | <ComponentRef Id='MainExecutable' /> | |
106 | <ComponentRef Id='ProgramMenuDir' /> |
|
106 | <ComponentRef Id='ProgramMenuDir' /> | |
107 | <ComponentGroupRef Id="hg.group.ROOT" /> |
|
107 | <ComponentGroupRef Id="hg.group.ROOT" /> | |
108 | <ComponentGroupRef Id="hg.group.defaultrc" /> |
|
108 | <ComponentGroupRef Id="hg.group.defaultrc" /> | |
109 | <ComponentGroupRef Id="hg.group.helptext" /> |
|
109 | <ComponentGroupRef Id="hg.group.helptext" /> | |
110 | <?ifdef MercurialHasLib?> |
|
110 | <?ifdef MercurialHasLib?> | |
111 | <ComponentGroupRef Id="hg.group.lib" /> |
|
111 | <ComponentGroupRef Id="hg.group.lib" /> | |
112 | <?endif?> |
|
112 | <?endif?> | |
113 | <ComponentGroupRef Id="hg.group.templates" /> |
|
113 | <ComponentGroupRef Id="hg.group.templates" /> | |
114 | <?if $(var.PythonVersion) = "2" ?> |
|
114 | <?if $(var.PythonVersion) = "2" ?> | |
115 | <MergeRef Id='VCRuntime' /> |
|
115 | <MergeRef Id='VCRuntime' /> | |
116 | <MergeRef Id='VCRuntimePolicy' /> |
|
116 | <MergeRef Id='VCRuntimePolicy' /> | |
117 | <?endif?> |
|
117 | <?endif?> | |
118 | </Feature> |
|
118 | </Feature> | |
119 | <?ifdef MercurialExtraFeatures?> |
|
119 | <?ifdef MercurialExtraFeatures?> | |
120 | <?foreach EXTRAFEAT in $(var.MercurialExtraFeatures)?> |
|
120 | <?foreach EXTRAFEAT in $(var.MercurialExtraFeatures)?> | |
121 | <FeatureRef Id="$(var.EXTRAFEAT)" /> |
|
121 | <FeatureRef Id="$(var.EXTRAFEAT)" /> | |
122 | <?endforeach?> |
|
122 | <?endforeach?> | |
123 | <?endif?> |
|
123 | <?endif?> | |
124 | <Feature Id='Locales' Title='Translations' Description='Translations' Level='1'> |
|
124 | <Feature Id='Locales' Title='Translations' Description='Translations' Level='1'> | |
125 | <ComponentGroupRef Id="hg.group.locale" /> |
|
125 | <ComponentGroupRef Id="hg.group.locale" /> | |
126 | </Feature> |
|
126 | </Feature> | |
127 | <Feature Id='Documentation' Title='Documentation' Description='HTML man pages' Level='1'> |
|
127 | <Feature Id='Documentation' Title='Documentation' Description='HTML man pages' Level='1'> | |
128 | <ComponentGroupRef Id="hg.group.doc" /> |
|
128 | <ComponentGroupRef Id="hg.group.doc" /> | |
129 | </Feature> |
|
129 | </Feature> | |
130 | <Feature Id='Misc' Title='Miscellaneous' Description='Contributed scripts' Level='1'> |
|
130 | <Feature Id='Misc' Title='Miscellaneous' Description='Contributed scripts' Level='1'> | |
131 | <ComponentGroupRef Id="hg.group.contrib" /> |
|
131 | <ComponentGroupRef Id="hg.group.contrib" /> | |
132 | </Feature> |
|
132 | </Feature> | |
133 | </Feature> |
|
133 | </Feature> | |
134 |
|
134 | |||
135 | <UIRef Id="WixUI_FeatureTree" /> |
|
135 | <UIRef Id="WixUI_FeatureTree" /> | |
136 | <UIRef Id="WixUI_ErrorProgressText" /> |
|
136 | <UIRef Id="WixUI_ErrorProgressText" /> | |
137 |
|
137 | |||
|
138 | <?ifdef PyOxidizer?> | |||
|
139 | <WixVariable Id="WixUILicenseRtf" Value="COPYING.rtf" /> | |||
|
140 | <Icon Id="hgIcon.ico" SourceFile="mercurial.ico" /> | |||
|
141 | <?else?> | |||
138 | <WixVariable Id="WixUILicenseRtf" Value="contrib\packaging\wix\COPYING.rtf" /> |
|
142 | <WixVariable Id="WixUILicenseRtf" Value="contrib\packaging\wix\COPYING.rtf" /> | |
139 |
|
||||
140 | <Icon Id="hgIcon.ico" SourceFile="contrib/win32/mercurial.ico" /> |
|
143 | <Icon Id="hgIcon.ico" SourceFile="contrib/win32/mercurial.ico" /> | |
|
144 | <?endif?> | |||
141 |
|
145 | |||
142 | <Upgrade Id='$(var.ProductUpgradeCode)'> |
|
146 | <Upgrade Id='$(var.ProductUpgradeCode)'> | |
143 | <UpgradeVersion |
|
147 | <UpgradeVersion | |
144 | IncludeMinimum='yes' Minimum='0.0.0' IncludeMaximum='no' OnlyDetect='no' |
|
148 | IncludeMinimum='yes' Minimum='0.0.0' IncludeMaximum='no' OnlyDetect='no' | |
145 | Property='INSTALLEDMERCURIALPRODUCTS' /> |
|
149 | Property='INSTALLEDMERCURIALPRODUCTS' /> | |
146 | </Upgrade> |
|
150 | </Upgrade> | |
147 |
|
151 | |||
148 | <InstallExecuteSequence> |
|
152 | <InstallExecuteSequence> | |
149 | <RemoveExistingProducts After='InstallInitialize'/> |
|
153 | <RemoveExistingProducts After='InstallInitialize'/> | |
150 | </InstallExecuteSequence> |
|
154 | </InstallExecuteSequence> | |
151 |
|
155 | |||
152 | </Product> |
|
156 | </Product> | |
153 | </Wix> |
|
157 | </Wix> |
@@ -1,3928 +1,3980 b'' | |||||
1 | # perf.py - performance test routines |
|
1 | # perf.py - performance test routines | |
2 | '''helper extension to measure performance |
|
2 | '''helper extension to measure performance | |
3 |
|
3 | |||
4 | Configurations |
|
4 | Configurations | |
5 | ============== |
|
5 | ============== | |
6 |
|
6 | |||
7 | ``perf`` |
|
7 | ``perf`` | |
8 | -------- |
|
8 | -------- | |
9 |
|
9 | |||
10 | ``all-timing`` |
|
10 | ``all-timing`` | |
11 | When set, additional statistics will be reported for each benchmark: best, |
|
11 | When set, additional statistics will be reported for each benchmark: best, | |
12 | worst, median average. If not set only the best timing is reported |
|
12 | worst, median average. If not set only the best timing is reported | |
13 | (default: off). |
|
13 | (default: off). | |
14 |
|
14 | |||
15 | ``presleep`` |
|
15 | ``presleep`` | |
16 | number of second to wait before any group of runs (default: 1) |
|
16 | number of second to wait before any group of runs (default: 1) | |
17 |
|
17 | |||
18 | ``pre-run`` |
|
18 | ``pre-run`` | |
19 | number of run to perform before starting measurement. |
|
19 | number of run to perform before starting measurement. | |
20 |
|
20 | |||
21 | ``profile-benchmark`` |
|
21 | ``profile-benchmark`` | |
22 | Enable profiling for the benchmarked section. |
|
22 | Enable profiling for the benchmarked section. | |
23 | (The first iteration is benchmarked) |
|
23 | (The first iteration is benchmarked) | |
24 |
|
24 | |||
25 | ``run-limits`` |
|
25 | ``run-limits`` | |
26 | Control the number of runs each benchmark will perform. The option value |
|
26 | Control the number of runs each benchmark will perform. The option value | |
27 | should be a list of `<time>-<numberofrun>` pairs. After each run the |
|
27 | should be a list of `<time>-<numberofrun>` pairs. After each run the | |
28 | conditions are considered in order with the following logic: |
|
28 | conditions are considered in order with the following logic: | |
29 |
|
29 | |||
30 | If benchmark has been running for <time> seconds, and we have performed |
|
30 | If benchmark has been running for <time> seconds, and we have performed | |
31 | <numberofrun> iterations, stop the benchmark, |
|
31 | <numberofrun> iterations, stop the benchmark, | |
32 |
|
32 | |||
33 | The default value is: `3.0-100, 10.0-3` |
|
33 | The default value is: `3.0-100, 10.0-3` | |
34 |
|
34 | |||
35 | ``stub`` |
|
35 | ``stub`` | |
36 | When set, benchmarks will only be run once, useful for testing |
|
36 | When set, benchmarks will only be run once, useful for testing | |
37 | (default: off) |
|
37 | (default: off) | |
38 | ''' |
|
38 | ''' | |
39 |
|
39 | |||
40 | # "historical portability" policy of perf.py: |
|
40 | # "historical portability" policy of perf.py: | |
41 | # |
|
41 | # | |
42 | # We have to do: |
|
42 | # We have to do: | |
43 | # - make perf.py "loadable" with as wide Mercurial version as possible |
|
43 | # - make perf.py "loadable" with as wide Mercurial version as possible | |
44 | # This doesn't mean that perf commands work correctly with that Mercurial. |
|
44 | # This doesn't mean that perf commands work correctly with that Mercurial. | |
45 | # BTW, perf.py itself has been available since 1.1 (or eb240755386d). |
|
45 | # BTW, perf.py itself has been available since 1.1 (or eb240755386d). | |
46 | # - make historical perf command work correctly with as wide Mercurial |
|
46 | # - make historical perf command work correctly with as wide Mercurial | |
47 | # version as possible |
|
47 | # version as possible | |
48 | # |
|
48 | # | |
49 | # We have to do, if possible with reasonable cost: |
|
49 | # We have to do, if possible with reasonable cost: | |
50 | # - make recent perf command for historical feature work correctly |
|
50 | # - make recent perf command for historical feature work correctly | |
51 | # with early Mercurial |
|
51 | # with early Mercurial | |
52 | # |
|
52 | # | |
53 | # We don't have to do: |
|
53 | # We don't have to do: | |
54 | # - make perf command for recent feature work correctly with early |
|
54 | # - make perf command for recent feature work correctly with early | |
55 | # Mercurial |
|
55 | # Mercurial | |
56 |
|
56 | |||
57 | from __future__ import absolute_import |
|
57 | from __future__ import absolute_import | |
58 | import contextlib |
|
58 | import contextlib | |
59 | import functools |
|
59 | import functools | |
60 | import gc |
|
60 | import gc | |
61 | import os |
|
61 | import os | |
62 | import random |
|
62 | import random | |
63 | import shutil |
|
63 | import shutil | |
64 | import struct |
|
64 | import struct | |
65 | import sys |
|
65 | import sys | |
66 | import tempfile |
|
66 | import tempfile | |
67 | import threading |
|
67 | import threading | |
68 | import time |
|
68 | import time | |
|
69 | ||||
|
70 | import mercurial.revlog | |||
69 | from mercurial import ( |
|
71 | from mercurial import ( | |
70 | changegroup, |
|
72 | changegroup, | |
71 | cmdutil, |
|
73 | cmdutil, | |
72 | commands, |
|
74 | commands, | |
73 | copies, |
|
75 | copies, | |
74 | error, |
|
76 | error, | |
75 | extensions, |
|
77 | extensions, | |
76 | hg, |
|
78 | hg, | |
77 | mdiff, |
|
79 | mdiff, | |
78 | merge, |
|
80 | merge, | |
79 | revlog, |
|
|||
80 | util, |
|
81 | util, | |
81 | ) |
|
82 | ) | |
82 |
|
83 | |||
83 | # for "historical portability": |
|
84 | # for "historical portability": | |
84 | # try to import modules separately (in dict order), and ignore |
|
85 | # try to import modules separately (in dict order), and ignore | |
85 | # failure, because these aren't available with early Mercurial |
|
86 | # failure, because these aren't available with early Mercurial | |
86 | try: |
|
87 | try: | |
87 | from mercurial import branchmap # since 2.5 (or bcee63733aad) |
|
88 | from mercurial import branchmap # since 2.5 (or bcee63733aad) | |
88 | except ImportError: |
|
89 | except ImportError: | |
89 | pass |
|
90 | pass | |
90 | try: |
|
91 | try: | |
91 | from mercurial import obsolete # since 2.3 (or ad0d6c2b3279) |
|
92 | from mercurial import obsolete # since 2.3 (or ad0d6c2b3279) | |
92 | except ImportError: |
|
93 | except ImportError: | |
93 | pass |
|
94 | pass | |
94 | try: |
|
95 | try: | |
95 | from mercurial import registrar # since 3.7 (or 37d50250b696) |
|
96 | from mercurial import registrar # since 3.7 (or 37d50250b696) | |
96 |
|
97 | |||
97 | dir(registrar) # forcibly load it |
|
98 | dir(registrar) # forcibly load it | |
98 | except ImportError: |
|
99 | except ImportError: | |
99 | registrar = None |
|
100 | registrar = None | |
100 | try: |
|
101 | try: | |
101 | from mercurial import repoview # since 2.5 (or 3a6ddacb7198) |
|
102 | from mercurial import repoview # since 2.5 (or 3a6ddacb7198) | |
102 | except ImportError: |
|
103 | except ImportError: | |
103 | pass |
|
104 | pass | |
104 | try: |
|
105 | try: | |
105 | from mercurial.utils import repoviewutil # since 5.0 |
|
106 | from mercurial.utils import repoviewutil # since 5.0 | |
106 | except ImportError: |
|
107 | except ImportError: | |
107 | repoviewutil = None |
|
108 | repoviewutil = None | |
108 | try: |
|
109 | try: | |
109 | from mercurial import scmutil # since 1.9 (or 8b252e826c68) |
|
110 | from mercurial import scmutil # since 1.9 (or 8b252e826c68) | |
110 | except ImportError: |
|
111 | except ImportError: | |
111 | pass |
|
112 | pass | |
112 | try: |
|
113 | try: | |
113 | from mercurial import setdiscovery # since 1.9 (or cb98fed52495) |
|
114 | from mercurial import setdiscovery # since 1.9 (or cb98fed52495) | |
114 | except ImportError: |
|
115 | except ImportError: | |
115 | pass |
|
116 | pass | |
116 |
|
117 | |||
117 | try: |
|
118 | try: | |
118 | from mercurial import profiling |
|
119 | from mercurial import profiling | |
119 | except ImportError: |
|
120 | except ImportError: | |
120 | profiling = None |
|
121 | profiling = None | |
121 |
|
122 | |||
|
123 | try: | |||
|
124 | from mercurial.revlogutils import constants as revlog_constants | |||
|
125 | ||||
|
126 | perf_rl_kind = (revlog_constants.KIND_OTHER, b'created-by-perf') | |||
|
127 | ||||
|
128 | def revlog(opener, *args, **kwargs): | |||
|
129 | return mercurial.revlog.revlog(opener, perf_rl_kind, *args, **kwargs) | |||
|
130 | ||||
|
131 | ||||
|
132 | except (ImportError, AttributeError): | |||
|
133 | perf_rl_kind = None | |||
|
134 | ||||
|
135 | def revlog(opener, *args, **kwargs): | |||
|
136 | return mercurial.revlog.revlog(opener, *args, **kwargs) | |||
|
137 | ||||
122 |
|
138 | |||
123 | def identity(a): |
|
139 | def identity(a): | |
124 | return a |
|
140 | return a | |
125 |
|
141 | |||
126 |
|
142 | |||
127 | try: |
|
143 | try: | |
128 | from mercurial import pycompat |
|
144 | from mercurial import pycompat | |
129 |
|
145 | |||
130 | getargspec = pycompat.getargspec # added to module after 4.5 |
|
146 | getargspec = pycompat.getargspec # added to module after 4.5 | |
131 | _byteskwargs = pycompat.byteskwargs # since 4.1 (or fbc3f73dc802) |
|
147 | _byteskwargs = pycompat.byteskwargs # since 4.1 (or fbc3f73dc802) | |
132 | _sysstr = pycompat.sysstr # since 4.0 (or 2219f4f82ede) |
|
148 | _sysstr = pycompat.sysstr # since 4.0 (or 2219f4f82ede) | |
133 | _bytestr = pycompat.bytestr # since 4.2 (or b70407bd84d5) |
|
149 | _bytestr = pycompat.bytestr # since 4.2 (or b70407bd84d5) | |
134 | _xrange = pycompat.xrange # since 4.8 (or 7eba8f83129b) |
|
150 | _xrange = pycompat.xrange # since 4.8 (or 7eba8f83129b) | |
135 | fsencode = pycompat.fsencode # since 3.9 (or f4a5e0e86a7e) |
|
151 | fsencode = pycompat.fsencode # since 3.9 (or f4a5e0e86a7e) | |
136 | if pycompat.ispy3: |
|
152 | if pycompat.ispy3: | |
137 | _maxint = sys.maxsize # per py3 docs for replacing maxint |
|
153 | _maxint = sys.maxsize # per py3 docs for replacing maxint | |
138 | else: |
|
154 | else: | |
139 | _maxint = sys.maxint |
|
155 | _maxint = sys.maxint | |
140 | except (NameError, ImportError, AttributeError): |
|
156 | except (NameError, ImportError, AttributeError): | |
141 | import inspect |
|
157 | import inspect | |
142 |
|
158 | |||
143 | getargspec = inspect.getargspec |
|
159 | getargspec = inspect.getargspec | |
144 | _byteskwargs = identity |
|
160 | _byteskwargs = identity | |
145 | _bytestr = str |
|
161 | _bytestr = str | |
146 | fsencode = identity # no py3 support |
|
162 | fsencode = identity # no py3 support | |
147 | _maxint = sys.maxint # no py3 support |
|
163 | _maxint = sys.maxint # no py3 support | |
148 | _sysstr = lambda x: x # no py3 support |
|
164 | _sysstr = lambda x: x # no py3 support | |
149 | _xrange = xrange |
|
165 | _xrange = xrange | |
150 |
|
166 | |||
151 | try: |
|
167 | try: | |
152 | # 4.7+ |
|
168 | # 4.7+ | |
153 | queue = pycompat.queue.Queue |
|
169 | queue = pycompat.queue.Queue | |
154 | except (NameError, AttributeError, ImportError): |
|
170 | except (NameError, AttributeError, ImportError): | |
155 | # <4.7. |
|
171 | # <4.7. | |
156 | try: |
|
172 | try: | |
157 | queue = pycompat.queue |
|
173 | queue = pycompat.queue | |
158 | except (NameError, AttributeError, ImportError): |
|
174 | except (NameError, AttributeError, ImportError): | |
159 | import Queue as queue |
|
175 | import Queue as queue | |
160 |
|
176 | |||
161 | try: |
|
177 | try: | |
162 | from mercurial import logcmdutil |
|
178 | from mercurial import logcmdutil | |
163 |
|
179 | |||
164 | makelogtemplater = logcmdutil.maketemplater |
|
180 | makelogtemplater = logcmdutil.maketemplater | |
165 | except (AttributeError, ImportError): |
|
181 | except (AttributeError, ImportError): | |
166 | try: |
|
182 | try: | |
167 | makelogtemplater = cmdutil.makelogtemplater |
|
183 | makelogtemplater = cmdutil.makelogtemplater | |
168 | except (AttributeError, ImportError): |
|
184 | except (AttributeError, ImportError): | |
169 | makelogtemplater = None |
|
185 | makelogtemplater = None | |
170 |
|
186 | |||
171 | # for "historical portability": |
|
187 | # for "historical portability": | |
172 | # define util.safehasattr forcibly, because util.safehasattr has been |
|
188 | # define util.safehasattr forcibly, because util.safehasattr has been | |
173 | # available since 1.9.3 (or 94b200a11cf7) |
|
189 | # available since 1.9.3 (or 94b200a11cf7) | |
174 | _undefined = object() |
|
190 | _undefined = object() | |
175 |
|
191 | |||
176 |
|
192 | |||
177 | def safehasattr(thing, attr): |
|
193 | def safehasattr(thing, attr): | |
178 | return getattr(thing, _sysstr(attr), _undefined) is not _undefined |
|
194 | return getattr(thing, _sysstr(attr), _undefined) is not _undefined | |
179 |
|
195 | |||
180 |
|
196 | |||
181 | setattr(util, 'safehasattr', safehasattr) |
|
197 | setattr(util, 'safehasattr', safehasattr) | |
182 |
|
198 | |||
183 | # for "historical portability": |
|
199 | # for "historical portability": | |
184 | # define util.timer forcibly, because util.timer has been available |
|
200 | # define util.timer forcibly, because util.timer has been available | |
185 | # since ae5d60bb70c9 |
|
201 | # since ae5d60bb70c9 | |
186 | if safehasattr(time, 'perf_counter'): |
|
202 | if safehasattr(time, 'perf_counter'): | |
187 | util.timer = time.perf_counter |
|
203 | util.timer = time.perf_counter | |
188 | elif os.name == b'nt': |
|
204 | elif os.name == b'nt': | |
189 | util.timer = time.clock |
|
205 | util.timer = time.clock | |
190 | else: |
|
206 | else: | |
191 | util.timer = time.time |
|
207 | util.timer = time.time | |
192 |
|
208 | |||
193 | # for "historical portability": |
|
209 | # for "historical portability": | |
194 | # use locally defined empty option list, if formatteropts isn't |
|
210 | # use locally defined empty option list, if formatteropts isn't | |
195 | # available, because commands.formatteropts has been available since |
|
211 | # available, because commands.formatteropts has been available since | |
196 | # 3.2 (or 7a7eed5176a4), even though formatting itself has been |
|
212 | # 3.2 (or 7a7eed5176a4), even though formatting itself has been | |
197 | # available since 2.2 (or ae5f92e154d3) |
|
213 | # available since 2.2 (or ae5f92e154d3) | |
198 | formatteropts = getattr( |
|
214 | formatteropts = getattr( | |
199 | cmdutil, "formatteropts", getattr(commands, "formatteropts", []) |
|
215 | cmdutil, "formatteropts", getattr(commands, "formatteropts", []) | |
200 | ) |
|
216 | ) | |
201 |
|
217 | |||
202 | # for "historical portability": |
|
218 | # for "historical portability": | |
203 | # use locally defined option list, if debugrevlogopts isn't available, |
|
219 | # use locally defined option list, if debugrevlogopts isn't available, | |
204 | # because commands.debugrevlogopts has been available since 3.7 (or |
|
220 | # because commands.debugrevlogopts has been available since 3.7 (or | |
205 | # 5606f7d0d063), even though cmdutil.openrevlog() has been available |
|
221 | # 5606f7d0d063), even though cmdutil.openrevlog() has been available | |
206 | # since 1.9 (or a79fea6b3e77). |
|
222 | # since 1.9 (or a79fea6b3e77). | |
207 | revlogopts = getattr( |
|
223 | revlogopts = getattr( | |
208 | cmdutil, |
|
224 | cmdutil, | |
209 | "debugrevlogopts", |
|
225 | "debugrevlogopts", | |
210 | getattr( |
|
226 | getattr( | |
211 | commands, |
|
227 | commands, | |
212 | "debugrevlogopts", |
|
228 | "debugrevlogopts", | |
213 | [ |
|
229 | [ | |
214 | (b'c', b'changelog', False, b'open changelog'), |
|
230 | (b'c', b'changelog', False, b'open changelog'), | |
215 | (b'm', b'manifest', False, b'open manifest'), |
|
231 | (b'm', b'manifest', False, b'open manifest'), | |
216 | (b'', b'dir', False, b'open directory manifest'), |
|
232 | (b'', b'dir', False, b'open directory manifest'), | |
217 | ], |
|
233 | ], | |
218 | ), |
|
234 | ), | |
219 | ) |
|
235 | ) | |
220 |
|
236 | |||
221 | cmdtable = {} |
|
237 | cmdtable = {} | |
222 |
|
238 | |||
223 | # for "historical portability": |
|
239 | # for "historical portability": | |
224 | # define parsealiases locally, because cmdutil.parsealiases has been |
|
240 | # define parsealiases locally, because cmdutil.parsealiases has been | |
225 | # available since 1.5 (or 6252852b4332) |
|
241 | # available since 1.5 (or 6252852b4332) | |
226 | def parsealiases(cmd): |
|
242 | def parsealiases(cmd): | |
227 | return cmd.split(b"|") |
|
243 | return cmd.split(b"|") | |
228 |
|
244 | |||
229 |
|
245 | |||
230 | if safehasattr(registrar, 'command'): |
|
246 | if safehasattr(registrar, 'command'): | |
231 | command = registrar.command(cmdtable) |
|
247 | command = registrar.command(cmdtable) | |
232 | elif safehasattr(cmdutil, 'command'): |
|
248 | elif safehasattr(cmdutil, 'command'): | |
233 | command = cmdutil.command(cmdtable) |
|
249 | command = cmdutil.command(cmdtable) | |
234 | if 'norepo' not in getargspec(command).args: |
|
250 | if 'norepo' not in getargspec(command).args: | |
235 | # for "historical portability": |
|
251 | # for "historical portability": | |
236 | # wrap original cmdutil.command, because "norepo" option has |
|
252 | # wrap original cmdutil.command, because "norepo" option has | |
237 | # been available since 3.1 (or 75a96326cecb) |
|
253 | # been available since 3.1 (or 75a96326cecb) | |
238 | _command = command |
|
254 | _command = command | |
239 |
|
255 | |||
240 | def command(name, options=(), synopsis=None, norepo=False): |
|
256 | def command(name, options=(), synopsis=None, norepo=False): | |
241 | if norepo: |
|
257 | if norepo: | |
242 | commands.norepo += b' %s' % b' '.join(parsealiases(name)) |
|
258 | commands.norepo += b' %s' % b' '.join(parsealiases(name)) | |
243 | return _command(name, list(options), synopsis) |
|
259 | return _command(name, list(options), synopsis) | |
244 |
|
260 | |||
245 |
|
261 | |||
246 | else: |
|
262 | else: | |
247 | # for "historical portability": |
|
263 | # for "historical portability": | |
248 | # define "@command" annotation locally, because cmdutil.command |
|
264 | # define "@command" annotation locally, because cmdutil.command | |
249 | # has been available since 1.9 (or 2daa5179e73f) |
|
265 | # has been available since 1.9 (or 2daa5179e73f) | |
250 | def command(name, options=(), synopsis=None, norepo=False): |
|
266 | def command(name, options=(), synopsis=None, norepo=False): | |
251 | def decorator(func): |
|
267 | def decorator(func): | |
252 | if synopsis: |
|
268 | if synopsis: | |
253 | cmdtable[name] = func, list(options), synopsis |
|
269 | cmdtable[name] = func, list(options), synopsis | |
254 | else: |
|
270 | else: | |
255 | cmdtable[name] = func, list(options) |
|
271 | cmdtable[name] = func, list(options) | |
256 | if norepo: |
|
272 | if norepo: | |
257 | commands.norepo += b' %s' % b' '.join(parsealiases(name)) |
|
273 | commands.norepo += b' %s' % b' '.join(parsealiases(name)) | |
258 | return func |
|
274 | return func | |
259 |
|
275 | |||
260 | return decorator |
|
276 | return decorator | |
261 |
|
277 | |||
262 |
|
278 | |||
263 | try: |
|
279 | try: | |
264 | import mercurial.registrar |
|
280 | import mercurial.registrar | |
265 | import mercurial.configitems |
|
281 | import mercurial.configitems | |
266 |
|
282 | |||
267 | configtable = {} |
|
283 | configtable = {} | |
268 | configitem = mercurial.registrar.configitem(configtable) |
|
284 | configitem = mercurial.registrar.configitem(configtable) | |
269 | configitem( |
|
285 | configitem( | |
270 | b'perf', |
|
286 | b'perf', | |
271 | b'presleep', |
|
287 | b'presleep', | |
272 | default=mercurial.configitems.dynamicdefault, |
|
288 | default=mercurial.configitems.dynamicdefault, | |
273 | experimental=True, |
|
289 | experimental=True, | |
274 | ) |
|
290 | ) | |
275 | configitem( |
|
291 | configitem( | |
276 | b'perf', |
|
292 | b'perf', | |
277 | b'stub', |
|
293 | b'stub', | |
278 | default=mercurial.configitems.dynamicdefault, |
|
294 | default=mercurial.configitems.dynamicdefault, | |
279 | experimental=True, |
|
295 | experimental=True, | |
280 | ) |
|
296 | ) | |
281 | configitem( |
|
297 | configitem( | |
282 | b'perf', |
|
298 | b'perf', | |
283 | b'parentscount', |
|
299 | b'parentscount', | |
284 | default=mercurial.configitems.dynamicdefault, |
|
300 | default=mercurial.configitems.dynamicdefault, | |
285 | experimental=True, |
|
301 | experimental=True, | |
286 | ) |
|
302 | ) | |
287 | configitem( |
|
303 | configitem( | |
288 | b'perf', |
|
304 | b'perf', | |
289 | b'all-timing', |
|
305 | b'all-timing', | |
290 | default=mercurial.configitems.dynamicdefault, |
|
306 | default=mercurial.configitems.dynamicdefault, | |
291 | experimental=True, |
|
307 | experimental=True, | |
292 | ) |
|
308 | ) | |
293 | configitem( |
|
309 | configitem( | |
294 | b'perf', |
|
310 | b'perf', | |
295 | b'pre-run', |
|
311 | b'pre-run', | |
296 | default=mercurial.configitems.dynamicdefault, |
|
312 | default=mercurial.configitems.dynamicdefault, | |
297 | ) |
|
313 | ) | |
298 | configitem( |
|
314 | configitem( | |
299 | b'perf', |
|
315 | b'perf', | |
300 | b'profile-benchmark', |
|
316 | b'profile-benchmark', | |
301 | default=mercurial.configitems.dynamicdefault, |
|
317 | default=mercurial.configitems.dynamicdefault, | |
302 | ) |
|
318 | ) | |
303 | configitem( |
|
319 | configitem( | |
304 | b'perf', |
|
320 | b'perf', | |
305 | b'run-limits', |
|
321 | b'run-limits', | |
306 | default=mercurial.configitems.dynamicdefault, |
|
322 | default=mercurial.configitems.dynamicdefault, | |
307 | experimental=True, |
|
323 | experimental=True, | |
308 | ) |
|
324 | ) | |
309 | except (ImportError, AttributeError): |
|
325 | except (ImportError, AttributeError): | |
310 | pass |
|
326 | pass | |
311 | except TypeError: |
|
327 | except TypeError: | |
312 | # compatibility fix for a11fd395e83f |
|
328 | # compatibility fix for a11fd395e83f | |
313 | # hg version: 5.2 |
|
329 | # hg version: 5.2 | |
314 | configitem( |
|
330 | configitem( | |
315 | b'perf', |
|
331 | b'perf', | |
316 | b'presleep', |
|
332 | b'presleep', | |
317 | default=mercurial.configitems.dynamicdefault, |
|
333 | default=mercurial.configitems.dynamicdefault, | |
318 | ) |
|
334 | ) | |
319 | configitem( |
|
335 | configitem( | |
320 | b'perf', |
|
336 | b'perf', | |
321 | b'stub', |
|
337 | b'stub', | |
322 | default=mercurial.configitems.dynamicdefault, |
|
338 | default=mercurial.configitems.dynamicdefault, | |
323 | ) |
|
339 | ) | |
324 | configitem( |
|
340 | configitem( | |
325 | b'perf', |
|
341 | b'perf', | |
326 | b'parentscount', |
|
342 | b'parentscount', | |
327 | default=mercurial.configitems.dynamicdefault, |
|
343 | default=mercurial.configitems.dynamicdefault, | |
328 | ) |
|
344 | ) | |
329 | configitem( |
|
345 | configitem( | |
330 | b'perf', |
|
346 | b'perf', | |
331 | b'all-timing', |
|
347 | b'all-timing', | |
332 | default=mercurial.configitems.dynamicdefault, |
|
348 | default=mercurial.configitems.dynamicdefault, | |
333 | ) |
|
349 | ) | |
334 | configitem( |
|
350 | configitem( | |
335 | b'perf', |
|
351 | b'perf', | |
336 | b'pre-run', |
|
352 | b'pre-run', | |
337 | default=mercurial.configitems.dynamicdefault, |
|
353 | default=mercurial.configitems.dynamicdefault, | |
338 | ) |
|
354 | ) | |
339 | configitem( |
|
355 | configitem( | |
340 | b'perf', |
|
356 | b'perf', | |
341 | b'profile-benchmark', |
|
357 | b'profile-benchmark', | |
342 | default=mercurial.configitems.dynamicdefault, |
|
358 | default=mercurial.configitems.dynamicdefault, | |
343 | ) |
|
359 | ) | |
344 | configitem( |
|
360 | configitem( | |
345 | b'perf', |
|
361 | b'perf', | |
346 | b'run-limits', |
|
362 | b'run-limits', | |
347 | default=mercurial.configitems.dynamicdefault, |
|
363 | default=mercurial.configitems.dynamicdefault, | |
348 | ) |
|
364 | ) | |
349 |
|
365 | |||
350 |
|
366 | |||
351 | def getlen(ui): |
|
367 | def getlen(ui): | |
352 | if ui.configbool(b"perf", b"stub", False): |
|
368 | if ui.configbool(b"perf", b"stub", False): | |
353 | return lambda x: 1 |
|
369 | return lambda x: 1 | |
354 | return len |
|
370 | return len | |
355 |
|
371 | |||
356 |
|
372 | |||
357 | class noop(object): |
|
373 | class noop(object): | |
358 | """dummy context manager""" |
|
374 | """dummy context manager""" | |
359 |
|
375 | |||
360 | def __enter__(self): |
|
376 | def __enter__(self): | |
361 | pass |
|
377 | pass | |
362 |
|
378 | |||
363 | def __exit__(self, *args): |
|
379 | def __exit__(self, *args): | |
364 | pass |
|
380 | pass | |
365 |
|
381 | |||
366 |
|
382 | |||
367 | NOOPCTX = noop() |
|
383 | NOOPCTX = noop() | |
368 |
|
384 | |||
369 |
|
385 | |||
370 | def gettimer(ui, opts=None): |
|
386 | def gettimer(ui, opts=None): | |
371 | """return a timer function and formatter: (timer, formatter) |
|
387 | """return a timer function and formatter: (timer, formatter) | |
372 |
|
388 | |||
373 | This function exists to gather the creation of formatter in a single |
|
389 | This function exists to gather the creation of formatter in a single | |
374 | place instead of duplicating it in all performance commands.""" |
|
390 | place instead of duplicating it in all performance commands.""" | |
375 |
|
391 | |||
376 | # enforce an idle period before execution to counteract power management |
|
392 | # enforce an idle period before execution to counteract power management | |
377 | # experimental config: perf.presleep |
|
393 | # experimental config: perf.presleep | |
378 | time.sleep(getint(ui, b"perf", b"presleep", 1)) |
|
394 | time.sleep(getint(ui, b"perf", b"presleep", 1)) | |
379 |
|
395 | |||
380 | if opts is None: |
|
396 | if opts is None: | |
381 | opts = {} |
|
397 | opts = {} | |
382 | # redirect all to stderr unless buffer api is in use |
|
398 | # redirect all to stderr unless buffer api is in use | |
383 | if not ui._buffers: |
|
399 | if not ui._buffers: | |
384 | ui = ui.copy() |
|
400 | ui = ui.copy() | |
385 | uifout = safeattrsetter(ui, b'fout', ignoremissing=True) |
|
401 | uifout = safeattrsetter(ui, b'fout', ignoremissing=True) | |
386 | if uifout: |
|
402 | if uifout: | |
387 | # for "historical portability": |
|
403 | # for "historical portability": | |
388 | # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d) |
|
404 | # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d) | |
389 | uifout.set(ui.ferr) |
|
405 | uifout.set(ui.ferr) | |
390 |
|
406 | |||
391 | # get a formatter |
|
407 | # get a formatter | |
392 | uiformatter = getattr(ui, 'formatter', None) |
|
408 | uiformatter = getattr(ui, 'formatter', None) | |
393 | if uiformatter: |
|
409 | if uiformatter: | |
394 | fm = uiformatter(b'perf', opts) |
|
410 | fm = uiformatter(b'perf', opts) | |
395 | else: |
|
411 | else: | |
396 | # for "historical portability": |
|
412 | # for "historical portability": | |
397 | # define formatter locally, because ui.formatter has been |
|
413 | # define formatter locally, because ui.formatter has been | |
398 | # available since 2.2 (or ae5f92e154d3) |
|
414 | # available since 2.2 (or ae5f92e154d3) | |
399 | from mercurial import node |
|
415 | from mercurial import node | |
400 |
|
416 | |||
401 | class defaultformatter(object): |
|
417 | class defaultformatter(object): | |
402 | """Minimized composition of baseformatter and plainformatter""" |
|
418 | """Minimized composition of baseformatter and plainformatter""" | |
403 |
|
419 | |||
404 | def __init__(self, ui, topic, opts): |
|
420 | def __init__(self, ui, topic, opts): | |
405 | self._ui = ui |
|
421 | self._ui = ui | |
406 | if ui.debugflag: |
|
422 | if ui.debugflag: | |
407 | self.hexfunc = node.hex |
|
423 | self.hexfunc = node.hex | |
408 | else: |
|
424 | else: | |
409 | self.hexfunc = node.short |
|
425 | self.hexfunc = node.short | |
410 |
|
426 | |||
411 | def __nonzero__(self): |
|
427 | def __nonzero__(self): | |
412 | return False |
|
428 | return False | |
413 |
|
429 | |||
414 | __bool__ = __nonzero__ |
|
430 | __bool__ = __nonzero__ | |
415 |
|
431 | |||
416 | def startitem(self): |
|
432 | def startitem(self): | |
417 | pass |
|
433 | pass | |
418 |
|
434 | |||
419 | def data(self, **data): |
|
435 | def data(self, **data): | |
420 | pass |
|
436 | pass | |
421 |
|
437 | |||
422 | def write(self, fields, deftext, *fielddata, **opts): |
|
438 | def write(self, fields, deftext, *fielddata, **opts): | |
423 | self._ui.write(deftext % fielddata, **opts) |
|
439 | self._ui.write(deftext % fielddata, **opts) | |
424 |
|
440 | |||
425 | def condwrite(self, cond, fields, deftext, *fielddata, **opts): |
|
441 | def condwrite(self, cond, fields, deftext, *fielddata, **opts): | |
426 | if cond: |
|
442 | if cond: | |
427 | self._ui.write(deftext % fielddata, **opts) |
|
443 | self._ui.write(deftext % fielddata, **opts) | |
428 |
|
444 | |||
429 | def plain(self, text, **opts): |
|
445 | def plain(self, text, **opts): | |
430 | self._ui.write(text, **opts) |
|
446 | self._ui.write(text, **opts) | |
431 |
|
447 | |||
432 | def end(self): |
|
448 | def end(self): | |
433 | pass |
|
449 | pass | |
434 |
|
450 | |||
435 | fm = defaultformatter(ui, b'perf', opts) |
|
451 | fm = defaultformatter(ui, b'perf', opts) | |
436 |
|
452 | |||
437 | # stub function, runs code only once instead of in a loop |
|
453 | # stub function, runs code only once instead of in a loop | |
438 | # experimental config: perf.stub |
|
454 | # experimental config: perf.stub | |
439 | if ui.configbool(b"perf", b"stub", False): |
|
455 | if ui.configbool(b"perf", b"stub", False): | |
440 | return functools.partial(stub_timer, fm), fm |
|
456 | return functools.partial(stub_timer, fm), fm | |
441 |
|
457 | |||
442 | # experimental config: perf.all-timing |
|
458 | # experimental config: perf.all-timing | |
443 | displayall = ui.configbool(b"perf", b"all-timing", False) |
|
459 | displayall = ui.configbool(b"perf", b"all-timing", False) | |
444 |
|
460 | |||
445 | # experimental config: perf.run-limits |
|
461 | # experimental config: perf.run-limits | |
446 | limitspec = ui.configlist(b"perf", b"run-limits", []) |
|
462 | limitspec = ui.configlist(b"perf", b"run-limits", []) | |
447 | limits = [] |
|
463 | limits = [] | |
448 | for item in limitspec: |
|
464 | for item in limitspec: | |
449 | parts = item.split(b'-', 1) |
|
465 | parts = item.split(b'-', 1) | |
450 | if len(parts) < 2: |
|
466 | if len(parts) < 2: | |
451 | ui.warn((b'malformatted run limit entry, missing "-": %s\n' % item)) |
|
467 | ui.warn((b'malformatted run limit entry, missing "-": %s\n' % item)) | |
452 | continue |
|
468 | continue | |
453 | try: |
|
469 | try: | |
454 | time_limit = float(_sysstr(parts[0])) |
|
470 | time_limit = float(_sysstr(parts[0])) | |
455 | except ValueError as e: |
|
471 | except ValueError as e: | |
456 | ui.warn( |
|
472 | ui.warn( | |
457 | ( |
|
473 | ( | |
458 | b'malformatted run limit entry, %s: %s\n' |
|
474 | b'malformatted run limit entry, %s: %s\n' | |
459 | % (_bytestr(e), item) |
|
475 | % (_bytestr(e), item) | |
460 | ) |
|
476 | ) | |
461 | ) |
|
477 | ) | |
462 | continue |
|
478 | continue | |
463 | try: |
|
479 | try: | |
464 | run_limit = int(_sysstr(parts[1])) |
|
480 | run_limit = int(_sysstr(parts[1])) | |
465 | except ValueError as e: |
|
481 | except ValueError as e: | |
466 | ui.warn( |
|
482 | ui.warn( | |
467 | ( |
|
483 | ( | |
468 | b'malformatted run limit entry, %s: %s\n' |
|
484 | b'malformatted run limit entry, %s: %s\n' | |
469 | % (_bytestr(e), item) |
|
485 | % (_bytestr(e), item) | |
470 | ) |
|
486 | ) | |
471 | ) |
|
487 | ) | |
472 | continue |
|
488 | continue | |
473 | limits.append((time_limit, run_limit)) |
|
489 | limits.append((time_limit, run_limit)) | |
474 | if not limits: |
|
490 | if not limits: | |
475 | limits = DEFAULTLIMITS |
|
491 | limits = DEFAULTLIMITS | |
476 |
|
492 | |||
477 | profiler = None |
|
493 | profiler = None | |
478 | if profiling is not None: |
|
494 | if profiling is not None: | |
479 | if ui.configbool(b"perf", b"profile-benchmark", False): |
|
495 | if ui.configbool(b"perf", b"profile-benchmark", False): | |
480 | profiler = profiling.profile(ui) |
|
496 | profiler = profiling.profile(ui) | |
481 |
|
497 | |||
482 | prerun = getint(ui, b"perf", b"pre-run", 0) |
|
498 | prerun = getint(ui, b"perf", b"pre-run", 0) | |
483 | t = functools.partial( |
|
499 | t = functools.partial( | |
484 | _timer, |
|
500 | _timer, | |
485 | fm, |
|
501 | fm, | |
486 | displayall=displayall, |
|
502 | displayall=displayall, | |
487 | limits=limits, |
|
503 | limits=limits, | |
488 | prerun=prerun, |
|
504 | prerun=prerun, | |
489 | profiler=profiler, |
|
505 | profiler=profiler, | |
490 | ) |
|
506 | ) | |
491 | return t, fm |
|
507 | return t, fm | |
492 |
|
508 | |||
493 |
|
509 | |||
494 | def stub_timer(fm, func, setup=None, title=None): |
|
510 | def stub_timer(fm, func, setup=None, title=None): | |
495 | if setup is not None: |
|
511 | if setup is not None: | |
496 | setup() |
|
512 | setup() | |
497 | func() |
|
513 | func() | |
498 |
|
514 | |||
499 |
|
515 | |||
500 | @contextlib.contextmanager |
|
516 | @contextlib.contextmanager | |
501 | def timeone(): |
|
517 | def timeone(): | |
502 | r = [] |
|
518 | r = [] | |
503 | ostart = os.times() |
|
519 | ostart = os.times() | |
504 | cstart = util.timer() |
|
520 | cstart = util.timer() | |
505 | yield r |
|
521 | yield r | |
506 | cstop = util.timer() |
|
522 | cstop = util.timer() | |
507 | ostop = os.times() |
|
523 | ostop = os.times() | |
508 | a, b = ostart, ostop |
|
524 | a, b = ostart, ostop | |
509 | r.append((cstop - cstart, b[0] - a[0], b[1] - a[1])) |
|
525 | r.append((cstop - cstart, b[0] - a[0], b[1] - a[1])) | |
510 |
|
526 | |||
511 |
|
527 | |||
512 | # list of stop condition (elapsed time, minimal run count) |
|
528 | # list of stop condition (elapsed time, minimal run count) | |
513 | DEFAULTLIMITS = ( |
|
529 | DEFAULTLIMITS = ( | |
514 | (3.0, 100), |
|
530 | (3.0, 100), | |
515 | (10.0, 3), |
|
531 | (10.0, 3), | |
516 | ) |
|
532 | ) | |
517 |
|
533 | |||
518 |
|
534 | |||
519 | def _timer( |
|
535 | def _timer( | |
520 | fm, |
|
536 | fm, | |
521 | func, |
|
537 | func, | |
522 | setup=None, |
|
538 | setup=None, | |
523 | title=None, |
|
539 | title=None, | |
524 | displayall=False, |
|
540 | displayall=False, | |
525 | limits=DEFAULTLIMITS, |
|
541 | limits=DEFAULTLIMITS, | |
526 | prerun=0, |
|
542 | prerun=0, | |
527 | profiler=None, |
|
543 | profiler=None, | |
528 | ): |
|
544 | ): | |
529 | gc.collect() |
|
545 | gc.collect() | |
530 | results = [] |
|
546 | results = [] | |
531 | begin = util.timer() |
|
547 | begin = util.timer() | |
532 | count = 0 |
|
548 | count = 0 | |
533 | if profiler is None: |
|
549 | if profiler is None: | |
534 | profiler = NOOPCTX |
|
550 | profiler = NOOPCTX | |
535 | for i in range(prerun): |
|
551 | for i in range(prerun): | |
536 | if setup is not None: |
|
552 | if setup is not None: | |
537 | setup() |
|
553 | setup() | |
538 | func() |
|
554 | func() | |
539 | keepgoing = True |
|
555 | keepgoing = True | |
540 | while keepgoing: |
|
556 | while keepgoing: | |
541 | if setup is not None: |
|
557 | if setup is not None: | |
542 | setup() |
|
558 | setup() | |
543 | with profiler: |
|
559 | with profiler: | |
544 | with timeone() as item: |
|
560 | with timeone() as item: | |
545 | r = func() |
|
561 | r = func() | |
546 | profiler = NOOPCTX |
|
562 | profiler = NOOPCTX | |
547 | count += 1 |
|
563 | count += 1 | |
548 | results.append(item[0]) |
|
564 | results.append(item[0]) | |
549 | cstop = util.timer() |
|
565 | cstop = util.timer() | |
550 | # Look for a stop condition. |
|
566 | # Look for a stop condition. | |
551 | elapsed = cstop - begin |
|
567 | elapsed = cstop - begin | |
552 | for t, mincount in limits: |
|
568 | for t, mincount in limits: | |
553 | if elapsed >= t and count >= mincount: |
|
569 | if elapsed >= t and count >= mincount: | |
554 | keepgoing = False |
|
570 | keepgoing = False | |
555 | break |
|
571 | break | |
556 |
|
572 | |||
557 | formatone(fm, results, title=title, result=r, displayall=displayall) |
|
573 | formatone(fm, results, title=title, result=r, displayall=displayall) | |
558 |
|
574 | |||
559 |
|
575 | |||
560 | def formatone(fm, timings, title=None, result=None, displayall=False): |
|
576 | def formatone(fm, timings, title=None, result=None, displayall=False): | |
561 |
|
577 | |||
562 | count = len(timings) |
|
578 | count = len(timings) | |
563 |
|
579 | |||
564 | fm.startitem() |
|
580 | fm.startitem() | |
565 |
|
581 | |||
566 | if title: |
|
582 | if title: | |
567 | fm.write(b'title', b'! %s\n', title) |
|
583 | fm.write(b'title', b'! %s\n', title) | |
568 | if result: |
|
584 | if result: | |
569 | fm.write(b'result', b'! result: %s\n', result) |
|
585 | fm.write(b'result', b'! result: %s\n', result) | |
570 |
|
586 | |||
571 | def display(role, entry): |
|
587 | def display(role, entry): | |
572 | prefix = b'' |
|
588 | prefix = b'' | |
573 | if role != b'best': |
|
589 | if role != b'best': | |
574 | prefix = b'%s.' % role |
|
590 | prefix = b'%s.' % role | |
575 | fm.plain(b'!') |
|
591 | fm.plain(b'!') | |
576 | fm.write(prefix + b'wall', b' wall %f', entry[0]) |
|
592 | fm.write(prefix + b'wall', b' wall %f', entry[0]) | |
577 | fm.write(prefix + b'comb', b' comb %f', entry[1] + entry[2]) |
|
593 | fm.write(prefix + b'comb', b' comb %f', entry[1] + entry[2]) | |
578 | fm.write(prefix + b'user', b' user %f', entry[1]) |
|
594 | fm.write(prefix + b'user', b' user %f', entry[1]) | |
579 | fm.write(prefix + b'sys', b' sys %f', entry[2]) |
|
595 | fm.write(prefix + b'sys', b' sys %f', entry[2]) | |
580 | fm.write(prefix + b'count', b' (%s of %%d)' % role, count) |
|
596 | fm.write(prefix + b'count', b' (%s of %%d)' % role, count) | |
581 | fm.plain(b'\n') |
|
597 | fm.plain(b'\n') | |
582 |
|
598 | |||
583 | timings.sort() |
|
599 | timings.sort() | |
584 | min_val = timings[0] |
|
600 | min_val = timings[0] | |
585 | display(b'best', min_val) |
|
601 | display(b'best', min_val) | |
586 | if displayall: |
|
602 | if displayall: | |
587 | max_val = timings[-1] |
|
603 | max_val = timings[-1] | |
588 | display(b'max', max_val) |
|
604 | display(b'max', max_val) | |
589 | avg = tuple([sum(x) / count for x in zip(*timings)]) |
|
605 | avg = tuple([sum(x) / count for x in zip(*timings)]) | |
590 | display(b'avg', avg) |
|
606 | display(b'avg', avg) | |
591 | median = timings[len(timings) // 2] |
|
607 | median = timings[len(timings) // 2] | |
592 | display(b'median', median) |
|
608 | display(b'median', median) | |
593 |
|
609 | |||
594 |
|
610 | |||
595 | # utilities for historical portability |
|
611 | # utilities for historical portability | |
596 |
|
612 | |||
597 |
|
613 | |||
598 | def getint(ui, section, name, default): |
|
614 | def getint(ui, section, name, default): | |
599 | # for "historical portability": |
|
615 | # for "historical portability": | |
600 | # ui.configint has been available since 1.9 (or fa2b596db182) |
|
616 | # ui.configint has been available since 1.9 (or fa2b596db182) | |
601 | v = ui.config(section, name, None) |
|
617 | v = ui.config(section, name, None) | |
602 | if v is None: |
|
618 | if v is None: | |
603 | return default |
|
619 | return default | |
604 | try: |
|
620 | try: | |
605 | return int(v) |
|
621 | return int(v) | |
606 | except ValueError: |
|
622 | except ValueError: | |
607 | raise error.ConfigError( |
|
623 | raise error.ConfigError( | |
608 | b"%s.%s is not an integer ('%s')" % (section, name, v) |
|
624 | b"%s.%s is not an integer ('%s')" % (section, name, v) | |
609 | ) |
|
625 | ) | |
610 |
|
626 | |||
611 |
|
627 | |||
612 | def safeattrsetter(obj, name, ignoremissing=False): |
|
628 | def safeattrsetter(obj, name, ignoremissing=False): | |
613 | """Ensure that 'obj' has 'name' attribute before subsequent setattr |
|
629 | """Ensure that 'obj' has 'name' attribute before subsequent setattr | |
614 |
|
630 | |||
615 | This function is aborted, if 'obj' doesn't have 'name' attribute |
|
631 | This function is aborted, if 'obj' doesn't have 'name' attribute | |
616 | at runtime. This avoids overlooking removal of an attribute, which |
|
632 | at runtime. This avoids overlooking removal of an attribute, which | |
617 | breaks assumption of performance measurement, in the future. |
|
633 | breaks assumption of performance measurement, in the future. | |
618 |
|
634 | |||
619 | This function returns the object to (1) assign a new value, and |
|
635 | This function returns the object to (1) assign a new value, and | |
620 | (2) restore an original value to the attribute. |
|
636 | (2) restore an original value to the attribute. | |
621 |
|
637 | |||
622 | If 'ignoremissing' is true, missing 'name' attribute doesn't cause |
|
638 | If 'ignoremissing' is true, missing 'name' attribute doesn't cause | |
623 | abortion, and this function returns None. This is useful to |
|
639 | abortion, and this function returns None. This is useful to | |
624 | examine an attribute, which isn't ensured in all Mercurial |
|
640 | examine an attribute, which isn't ensured in all Mercurial | |
625 | versions. |
|
641 | versions. | |
626 | """ |
|
642 | """ | |
627 | if not util.safehasattr(obj, name): |
|
643 | if not util.safehasattr(obj, name): | |
628 | if ignoremissing: |
|
644 | if ignoremissing: | |
629 | return None |
|
645 | return None | |
630 | raise error.Abort( |
|
646 | raise error.Abort( | |
631 | ( |
|
647 | ( | |
632 | b"missing attribute %s of %s might break assumption" |
|
648 | b"missing attribute %s of %s might break assumption" | |
633 | b" of performance measurement" |
|
649 | b" of performance measurement" | |
634 | ) |
|
650 | ) | |
635 | % (name, obj) |
|
651 | % (name, obj) | |
636 | ) |
|
652 | ) | |
637 |
|
653 | |||
638 | origvalue = getattr(obj, _sysstr(name)) |
|
654 | origvalue = getattr(obj, _sysstr(name)) | |
639 |
|
655 | |||
640 | class attrutil(object): |
|
656 | class attrutil(object): | |
641 | def set(self, newvalue): |
|
657 | def set(self, newvalue): | |
642 | setattr(obj, _sysstr(name), newvalue) |
|
658 | setattr(obj, _sysstr(name), newvalue) | |
643 |
|
659 | |||
644 | def restore(self): |
|
660 | def restore(self): | |
645 | setattr(obj, _sysstr(name), origvalue) |
|
661 | setattr(obj, _sysstr(name), origvalue) | |
646 |
|
662 | |||
647 | return attrutil() |
|
663 | return attrutil() | |
648 |
|
664 | |||
649 |
|
665 | |||
650 | # utilities to examine each internal API changes |
|
666 | # utilities to examine each internal API changes | |
651 |
|
667 | |||
652 |
|
668 | |||
653 | def getbranchmapsubsettable(): |
|
669 | def getbranchmapsubsettable(): | |
654 | # for "historical portability": |
|
670 | # for "historical portability": | |
655 | # subsettable is defined in: |
|
671 | # subsettable is defined in: | |
656 | # - branchmap since 2.9 (or 175c6fd8cacc) |
|
672 | # - branchmap since 2.9 (or 175c6fd8cacc) | |
657 | # - repoview since 2.5 (or 59a9f18d4587) |
|
673 | # - repoview since 2.5 (or 59a9f18d4587) | |
658 | # - repoviewutil since 5.0 |
|
674 | # - repoviewutil since 5.0 | |
659 | for mod in (branchmap, repoview, repoviewutil): |
|
675 | for mod in (branchmap, repoview, repoviewutil): | |
660 | subsettable = getattr(mod, 'subsettable', None) |
|
676 | subsettable = getattr(mod, 'subsettable', None) | |
661 | if subsettable: |
|
677 | if subsettable: | |
662 | return subsettable |
|
678 | return subsettable | |
663 |
|
679 | |||
664 | # bisecting in bcee63733aad::59a9f18d4587 can reach here (both |
|
680 | # bisecting in bcee63733aad::59a9f18d4587 can reach here (both | |
665 | # branchmap and repoview modules exist, but subsettable attribute |
|
681 | # branchmap and repoview modules exist, but subsettable attribute | |
666 | # doesn't) |
|
682 | # doesn't) | |
667 | raise error.Abort( |
|
683 | raise error.Abort( | |
668 | b"perfbranchmap not available with this Mercurial", |
|
684 | b"perfbranchmap not available with this Mercurial", | |
669 | hint=b"use 2.5 or later", |
|
685 | hint=b"use 2.5 or later", | |
670 | ) |
|
686 | ) | |
671 |
|
687 | |||
672 |
|
688 | |||
673 | def getsvfs(repo): |
|
689 | def getsvfs(repo): | |
674 | """Return appropriate object to access files under .hg/store""" |
|
690 | """Return appropriate object to access files under .hg/store""" | |
675 | # for "historical portability": |
|
691 | # for "historical portability": | |
676 | # repo.svfs has been available since 2.3 (or 7034365089bf) |
|
692 | # repo.svfs has been available since 2.3 (or 7034365089bf) | |
677 | svfs = getattr(repo, 'svfs', None) |
|
693 | svfs = getattr(repo, 'svfs', None) | |
678 | if svfs: |
|
694 | if svfs: | |
679 | return svfs |
|
695 | return svfs | |
680 | else: |
|
696 | else: | |
681 | return getattr(repo, 'sopener') |
|
697 | return getattr(repo, 'sopener') | |
682 |
|
698 | |||
683 |
|
699 | |||
684 | def getvfs(repo): |
|
700 | def getvfs(repo): | |
685 | """Return appropriate object to access files under .hg""" |
|
701 | """Return appropriate object to access files under .hg""" | |
686 | # for "historical portability": |
|
702 | # for "historical portability": | |
687 | # repo.vfs has been available since 2.3 (or 7034365089bf) |
|
703 | # repo.vfs has been available since 2.3 (or 7034365089bf) | |
688 | vfs = getattr(repo, 'vfs', None) |
|
704 | vfs = getattr(repo, 'vfs', None) | |
689 | if vfs: |
|
705 | if vfs: | |
690 | return vfs |
|
706 | return vfs | |
691 | else: |
|
707 | else: | |
692 | return getattr(repo, 'opener') |
|
708 | return getattr(repo, 'opener') | |
693 |
|
709 | |||
694 |
|
710 | |||
695 | def repocleartagscachefunc(repo): |
|
711 | def repocleartagscachefunc(repo): | |
696 | """Return the function to clear tags cache according to repo internal API""" |
|
712 | """Return the function to clear tags cache according to repo internal API""" | |
697 | if util.safehasattr(repo, b'_tagscache'): # since 2.0 (or 9dca7653b525) |
|
713 | if util.safehasattr(repo, b'_tagscache'): # since 2.0 (or 9dca7653b525) | |
698 | # in this case, setattr(repo, '_tagscache', None) or so isn't |
|
714 | # in this case, setattr(repo, '_tagscache', None) or so isn't | |
699 | # correct way to clear tags cache, because existing code paths |
|
715 | # correct way to clear tags cache, because existing code paths | |
700 | # expect _tagscache to be a structured object. |
|
716 | # expect _tagscache to be a structured object. | |
701 | def clearcache(): |
|
717 | def clearcache(): | |
702 | # _tagscache has been filteredpropertycache since 2.5 (or |
|
718 | # _tagscache has been filteredpropertycache since 2.5 (or | |
703 | # 98c867ac1330), and delattr() can't work in such case |
|
719 | # 98c867ac1330), and delattr() can't work in such case | |
704 | if '_tagscache' in vars(repo): |
|
720 | if '_tagscache' in vars(repo): | |
705 | del repo.__dict__['_tagscache'] |
|
721 | del repo.__dict__['_tagscache'] | |
706 |
|
722 | |||
707 | return clearcache |
|
723 | return clearcache | |
708 |
|
724 | |||
709 | repotags = safeattrsetter(repo, b'_tags', ignoremissing=True) |
|
725 | repotags = safeattrsetter(repo, b'_tags', ignoremissing=True) | |
710 | if repotags: # since 1.4 (or 5614a628d173) |
|
726 | if repotags: # since 1.4 (or 5614a628d173) | |
711 | return lambda: repotags.set(None) |
|
727 | return lambda: repotags.set(None) | |
712 |
|
728 | |||
713 | repotagscache = safeattrsetter(repo, b'tagscache', ignoremissing=True) |
|
729 | repotagscache = safeattrsetter(repo, b'tagscache', ignoremissing=True) | |
714 | if repotagscache: # since 0.6 (or d7df759d0e97) |
|
730 | if repotagscache: # since 0.6 (or d7df759d0e97) | |
715 | return lambda: repotagscache.set(None) |
|
731 | return lambda: repotagscache.set(None) | |
716 |
|
732 | |||
717 | # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches |
|
733 | # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches | |
718 | # this point, but it isn't so problematic, because: |
|
734 | # this point, but it isn't so problematic, because: | |
719 | # - repo.tags of such Mercurial isn't "callable", and repo.tags() |
|
735 | # - repo.tags of such Mercurial isn't "callable", and repo.tags() | |
720 | # in perftags() causes failure soon |
|
736 | # in perftags() causes failure soon | |
721 | # - perf.py itself has been available since 1.1 (or eb240755386d) |
|
737 | # - perf.py itself has been available since 1.1 (or eb240755386d) | |
722 | raise error.Abort(b"tags API of this hg command is unknown") |
|
738 | raise error.Abort(b"tags API of this hg command is unknown") | |
723 |
|
739 | |||
724 |
|
740 | |||
725 | # utilities to clear cache |
|
741 | # utilities to clear cache | |
726 |
|
742 | |||
727 |
|
743 | |||
728 | def clearfilecache(obj, attrname): |
|
744 | def clearfilecache(obj, attrname): | |
729 | unfiltered = getattr(obj, 'unfiltered', None) |
|
745 | unfiltered = getattr(obj, 'unfiltered', None) | |
730 | if unfiltered is not None: |
|
746 | if unfiltered is not None: | |
731 | obj = obj.unfiltered() |
|
747 | obj = obj.unfiltered() | |
732 | if attrname in vars(obj): |
|
748 | if attrname in vars(obj): | |
733 | delattr(obj, attrname) |
|
749 | delattr(obj, attrname) | |
734 | obj._filecache.pop(attrname, None) |
|
750 | obj._filecache.pop(attrname, None) | |
735 |
|
751 | |||
736 |
|
752 | |||
737 | def clearchangelog(repo): |
|
753 | def clearchangelog(repo): | |
738 | if repo is not repo.unfiltered(): |
|
754 | if repo is not repo.unfiltered(): | |
739 | object.__setattr__(repo, '_clcachekey', None) |
|
755 | object.__setattr__(repo, '_clcachekey', None) | |
740 | object.__setattr__(repo, '_clcache', None) |
|
756 | object.__setattr__(repo, '_clcache', None) | |
741 | clearfilecache(repo.unfiltered(), 'changelog') |
|
757 | clearfilecache(repo.unfiltered(), 'changelog') | |
742 |
|
758 | |||
743 |
|
759 | |||
744 | # perf commands |
|
760 | # perf commands | |
745 |
|
761 | |||
746 |
|
762 | |||
747 | @command(b'perf::walk|perfwalk', formatteropts) |
|
763 | @command(b'perf::walk|perfwalk', formatteropts) | |
748 | def perfwalk(ui, repo, *pats, **opts): |
|
764 | def perfwalk(ui, repo, *pats, **opts): | |
749 | opts = _byteskwargs(opts) |
|
765 | opts = _byteskwargs(opts) | |
750 | timer, fm = gettimer(ui, opts) |
|
766 | timer, fm = gettimer(ui, opts) | |
751 | m = scmutil.match(repo[None], pats, {}) |
|
767 | m = scmutil.match(repo[None], pats, {}) | |
752 | timer( |
|
768 | timer( | |
753 | lambda: len( |
|
769 | lambda: len( | |
754 | list( |
|
770 | list( | |
755 | repo.dirstate.walk(m, subrepos=[], unknown=True, ignored=False) |
|
771 | repo.dirstate.walk(m, subrepos=[], unknown=True, ignored=False) | |
756 | ) |
|
772 | ) | |
757 | ) |
|
773 | ) | |
758 | ) |
|
774 | ) | |
759 | fm.end() |
|
775 | fm.end() | |
760 |
|
776 | |||
761 |
|
777 | |||
762 | @command(b'perf::annotate|perfannotate', formatteropts) |
|
778 | @command(b'perf::annotate|perfannotate', formatteropts) | |
763 | def perfannotate(ui, repo, f, **opts): |
|
779 | def perfannotate(ui, repo, f, **opts): | |
764 | opts = _byteskwargs(opts) |
|
780 | opts = _byteskwargs(opts) | |
765 | timer, fm = gettimer(ui, opts) |
|
781 | timer, fm = gettimer(ui, opts) | |
766 | fc = repo[b'.'][f] |
|
782 | fc = repo[b'.'][f] | |
767 | timer(lambda: len(fc.annotate(True))) |
|
783 | timer(lambda: len(fc.annotate(True))) | |
768 | fm.end() |
|
784 | fm.end() | |
769 |
|
785 | |||
770 |
|
786 | |||
771 | @command( |
|
787 | @command( | |
772 | b'perf::status|perfstatus', |
|
788 | b'perf::status|perfstatus', | |
773 | [ |
|
789 | [ | |
774 | (b'u', b'unknown', False, b'ask status to look for unknown files'), |
|
790 | (b'u', b'unknown', False, b'ask status to look for unknown files'), | |
775 | (b'', b'dirstate', False, b'benchmark the internal dirstate call'), |
|
791 | (b'', b'dirstate', False, b'benchmark the internal dirstate call'), | |
776 | ] |
|
792 | ] | |
777 | + formatteropts, |
|
793 | + formatteropts, | |
778 | ) |
|
794 | ) | |
779 | def perfstatus(ui, repo, **opts): |
|
795 | def perfstatus(ui, repo, **opts): | |
780 | """benchmark the performance of a single status call |
|
796 | """benchmark the performance of a single status call | |
781 |
|
797 | |||
782 | The repository data are preserved between each call. |
|
798 | The repository data are preserved between each call. | |
783 |
|
799 | |||
784 | By default, only the status of the tracked file are requested. If |
|
800 | By default, only the status of the tracked file are requested. If | |
785 | `--unknown` is passed, the "unknown" files are also tracked. |
|
801 | `--unknown` is passed, the "unknown" files are also tracked. | |
786 | """ |
|
802 | """ | |
787 | opts = _byteskwargs(opts) |
|
803 | opts = _byteskwargs(opts) | |
788 | # m = match.always(repo.root, repo.getcwd()) |
|
804 | # m = match.always(repo.root, repo.getcwd()) | |
789 | # timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, |
|
805 | # timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, | |
790 | # False)))) |
|
806 | # False)))) | |
791 | timer, fm = gettimer(ui, opts) |
|
807 | timer, fm = gettimer(ui, opts) | |
792 | if opts[b'dirstate']: |
|
808 | if opts[b'dirstate']: | |
793 | dirstate = repo.dirstate |
|
809 | dirstate = repo.dirstate | |
794 | m = scmutil.matchall(repo) |
|
810 | m = scmutil.matchall(repo) | |
795 | unknown = opts[b'unknown'] |
|
811 | unknown = opts[b'unknown'] | |
796 |
|
812 | |||
797 | def status_dirstate(): |
|
813 | def status_dirstate(): | |
798 | s = dirstate.status( |
|
814 | s = dirstate.status( | |
799 | m, subrepos=[], ignored=False, clean=False, unknown=unknown |
|
815 | m, subrepos=[], ignored=False, clean=False, unknown=unknown | |
800 | ) |
|
816 | ) | |
801 | sum(map(bool, s)) |
|
817 | sum(map(bool, s)) | |
802 |
|
818 | |||
803 | timer(status_dirstate) |
|
819 | timer(status_dirstate) | |
804 | else: |
|
820 | else: | |
805 | timer(lambda: sum(map(len, repo.status(unknown=opts[b'unknown'])))) |
|
821 | timer(lambda: sum(map(len, repo.status(unknown=opts[b'unknown'])))) | |
806 | fm.end() |
|
822 | fm.end() | |
807 |
|
823 | |||
808 |
|
824 | |||
809 | @command(b'perf::addremove|perfaddremove', formatteropts) |
|
825 | @command(b'perf::addremove|perfaddremove', formatteropts) | |
810 | def perfaddremove(ui, repo, **opts): |
|
826 | def perfaddremove(ui, repo, **opts): | |
811 | opts = _byteskwargs(opts) |
|
827 | opts = _byteskwargs(opts) | |
812 | timer, fm = gettimer(ui, opts) |
|
828 | timer, fm = gettimer(ui, opts) | |
813 | try: |
|
829 | try: | |
814 | oldquiet = repo.ui.quiet |
|
830 | oldquiet = repo.ui.quiet | |
815 | repo.ui.quiet = True |
|
831 | repo.ui.quiet = True | |
816 | matcher = scmutil.match(repo[None]) |
|
832 | matcher = scmutil.match(repo[None]) | |
817 | opts[b'dry_run'] = True |
|
833 | opts[b'dry_run'] = True | |
818 | if 'uipathfn' in getargspec(scmutil.addremove).args: |
|
834 | if 'uipathfn' in getargspec(scmutil.addremove).args: | |
819 | uipathfn = scmutil.getuipathfn(repo) |
|
835 | uipathfn = scmutil.getuipathfn(repo) | |
820 | timer(lambda: scmutil.addremove(repo, matcher, b"", uipathfn, opts)) |
|
836 | timer(lambda: scmutil.addremove(repo, matcher, b"", uipathfn, opts)) | |
821 | else: |
|
837 | else: | |
822 | timer(lambda: scmutil.addremove(repo, matcher, b"", opts)) |
|
838 | timer(lambda: scmutil.addremove(repo, matcher, b"", opts)) | |
823 | finally: |
|
839 | finally: | |
824 | repo.ui.quiet = oldquiet |
|
840 | repo.ui.quiet = oldquiet | |
825 | fm.end() |
|
841 | fm.end() | |
826 |
|
842 | |||
827 |
|
843 | |||
828 | def clearcaches(cl): |
|
844 | def clearcaches(cl): | |
829 | # behave somewhat consistently across internal API changes |
|
845 | # behave somewhat consistently across internal API changes | |
830 | if util.safehasattr(cl, b'clearcaches'): |
|
846 | if util.safehasattr(cl, b'clearcaches'): | |
831 | cl.clearcaches() |
|
847 | cl.clearcaches() | |
832 | elif util.safehasattr(cl, b'_nodecache'): |
|
848 | elif util.safehasattr(cl, b'_nodecache'): | |
833 | # <= hg-5.2 |
|
849 | # <= hg-5.2 | |
834 | from mercurial.node import nullid, nullrev |
|
850 | from mercurial.node import nullid, nullrev | |
835 |
|
851 | |||
836 | cl._nodecache = {nullid: nullrev} |
|
852 | cl._nodecache = {nullid: nullrev} | |
837 | cl._nodepos = None |
|
853 | cl._nodepos = None | |
838 |
|
854 | |||
839 |
|
855 | |||
840 | @command(b'perf::heads|perfheads', formatteropts) |
|
856 | @command(b'perf::heads|perfheads', formatteropts) | |
841 | def perfheads(ui, repo, **opts): |
|
857 | def perfheads(ui, repo, **opts): | |
842 | """benchmark the computation of a changelog heads""" |
|
858 | """benchmark the computation of a changelog heads""" | |
843 | opts = _byteskwargs(opts) |
|
859 | opts = _byteskwargs(opts) | |
844 | timer, fm = gettimer(ui, opts) |
|
860 | timer, fm = gettimer(ui, opts) | |
845 | cl = repo.changelog |
|
861 | cl = repo.changelog | |
846 |
|
862 | |||
847 | def s(): |
|
863 | def s(): | |
848 | clearcaches(cl) |
|
864 | clearcaches(cl) | |
849 |
|
865 | |||
850 | def d(): |
|
866 | def d(): | |
851 | len(cl.headrevs()) |
|
867 | len(cl.headrevs()) | |
852 |
|
868 | |||
853 | timer(d, setup=s) |
|
869 | timer(d, setup=s) | |
854 | fm.end() |
|
870 | fm.end() | |
855 |
|
871 | |||
856 |
|
872 | |||
857 | @command( |
|
873 | @command( | |
858 | b'perf::tags|perftags', |
|
874 | b'perf::tags|perftags', | |
859 | formatteropts |
|
875 | formatteropts | |
860 | + [ |
|
876 | + [ | |
861 | (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), |
|
877 | (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), | |
862 | ], |
|
878 | ], | |
863 | ) |
|
879 | ) | |
864 | def perftags(ui, repo, **opts): |
|
880 | def perftags(ui, repo, **opts): | |
865 | opts = _byteskwargs(opts) |
|
881 | opts = _byteskwargs(opts) | |
866 | timer, fm = gettimer(ui, opts) |
|
882 | timer, fm = gettimer(ui, opts) | |
867 | repocleartagscache = repocleartagscachefunc(repo) |
|
883 | repocleartagscache = repocleartagscachefunc(repo) | |
868 | clearrevlogs = opts[b'clear_revlogs'] |
|
884 | clearrevlogs = opts[b'clear_revlogs'] | |
869 |
|
885 | |||
870 | def s(): |
|
886 | def s(): | |
871 | if clearrevlogs: |
|
887 | if clearrevlogs: | |
872 | clearchangelog(repo) |
|
888 | clearchangelog(repo) | |
873 | clearfilecache(repo.unfiltered(), 'manifest') |
|
889 | clearfilecache(repo.unfiltered(), 'manifest') | |
874 | repocleartagscache() |
|
890 | repocleartagscache() | |
875 |
|
891 | |||
876 | def t(): |
|
892 | def t(): | |
877 | return len(repo.tags()) |
|
893 | return len(repo.tags()) | |
878 |
|
894 | |||
879 | timer(t, setup=s) |
|
895 | timer(t, setup=s) | |
880 | fm.end() |
|
896 | fm.end() | |
881 |
|
897 | |||
882 |
|
898 | |||
883 | @command(b'perf::ancestors|perfancestors', formatteropts) |
|
899 | @command(b'perf::ancestors|perfancestors', formatteropts) | |
884 | def perfancestors(ui, repo, **opts): |
|
900 | def perfancestors(ui, repo, **opts): | |
885 | opts = _byteskwargs(opts) |
|
901 | opts = _byteskwargs(opts) | |
886 | timer, fm = gettimer(ui, opts) |
|
902 | timer, fm = gettimer(ui, opts) | |
887 | heads = repo.changelog.headrevs() |
|
903 | heads = repo.changelog.headrevs() | |
888 |
|
904 | |||
889 | def d(): |
|
905 | def d(): | |
890 | for a in repo.changelog.ancestors(heads): |
|
906 | for a in repo.changelog.ancestors(heads): | |
891 | pass |
|
907 | pass | |
892 |
|
908 | |||
893 | timer(d) |
|
909 | timer(d) | |
894 | fm.end() |
|
910 | fm.end() | |
895 |
|
911 | |||
896 |
|
912 | |||
897 | @command(b'perf::ancestorset|perfancestorset', formatteropts) |
|
913 | @command(b'perf::ancestorset|perfancestorset', formatteropts) | |
898 | def perfancestorset(ui, repo, revset, **opts): |
|
914 | def perfancestorset(ui, repo, revset, **opts): | |
899 | opts = _byteskwargs(opts) |
|
915 | opts = _byteskwargs(opts) | |
900 | timer, fm = gettimer(ui, opts) |
|
916 | timer, fm = gettimer(ui, opts) | |
901 | revs = repo.revs(revset) |
|
917 | revs = repo.revs(revset) | |
902 | heads = repo.changelog.headrevs() |
|
918 | heads = repo.changelog.headrevs() | |
903 |
|
919 | |||
904 | def d(): |
|
920 | def d(): | |
905 | s = repo.changelog.ancestors(heads) |
|
921 | s = repo.changelog.ancestors(heads) | |
906 | for rev in revs: |
|
922 | for rev in revs: | |
907 | rev in s |
|
923 | rev in s | |
908 |
|
924 | |||
909 | timer(d) |
|
925 | timer(d) | |
910 | fm.end() |
|
926 | fm.end() | |
911 |
|
927 | |||
912 |
|
928 | |||
913 | @command(b'perf::discovery|perfdiscovery', formatteropts, b'PATH') |
|
929 | @command(b'perf::discovery|perfdiscovery', formatteropts, b'PATH') | |
914 | def perfdiscovery(ui, repo, path, **opts): |
|
930 | def perfdiscovery(ui, repo, path, **opts): | |
915 | """benchmark discovery between local repo and the peer at given path""" |
|
931 | """benchmark discovery between local repo and the peer at given path""" | |
916 | repos = [repo, None] |
|
932 | repos = [repo, None] | |
917 | timer, fm = gettimer(ui, opts) |
|
933 | timer, fm = gettimer(ui, opts) | |
918 |
|
934 | |||
919 | try: |
|
935 | try: | |
920 | from mercurial.utils.urlutil import get_unique_pull_path |
|
936 | from mercurial.utils.urlutil import get_unique_pull_path | |
921 |
|
937 | |||
922 | path = get_unique_pull_path(b'perfdiscovery', repo, ui, path)[0] |
|
938 | path = get_unique_pull_path(b'perfdiscovery', repo, ui, path)[0] | |
923 | except ImportError: |
|
939 | except ImportError: | |
924 | path = ui.expandpath(path) |
|
940 | path = ui.expandpath(path) | |
925 |
|
941 | |||
926 | def s(): |
|
942 | def s(): | |
927 | repos[1] = hg.peer(ui, opts, path) |
|
943 | repos[1] = hg.peer(ui, opts, path) | |
928 |
|
944 | |||
929 | def d(): |
|
945 | def d(): | |
930 | setdiscovery.findcommonheads(ui, *repos) |
|
946 | setdiscovery.findcommonheads(ui, *repos) | |
931 |
|
947 | |||
932 | timer(d, setup=s) |
|
948 | timer(d, setup=s) | |
933 | fm.end() |
|
949 | fm.end() | |
934 |
|
950 | |||
935 |
|
951 | |||
936 | @command( |
|
952 | @command( | |
937 | b'perf::bookmarks|perfbookmarks', |
|
953 | b'perf::bookmarks|perfbookmarks', | |
938 | formatteropts |
|
954 | formatteropts | |
939 | + [ |
|
955 | + [ | |
940 | (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), |
|
956 | (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), | |
941 | ], |
|
957 | ], | |
942 | ) |
|
958 | ) | |
943 | def perfbookmarks(ui, repo, **opts): |
|
959 | def perfbookmarks(ui, repo, **opts): | |
944 | """benchmark parsing bookmarks from disk to memory""" |
|
960 | """benchmark parsing bookmarks from disk to memory""" | |
945 | opts = _byteskwargs(opts) |
|
961 | opts = _byteskwargs(opts) | |
946 | timer, fm = gettimer(ui, opts) |
|
962 | timer, fm = gettimer(ui, opts) | |
947 |
|
963 | |||
948 | clearrevlogs = opts[b'clear_revlogs'] |
|
964 | clearrevlogs = opts[b'clear_revlogs'] | |
949 |
|
965 | |||
950 | def s(): |
|
966 | def s(): | |
951 | if clearrevlogs: |
|
967 | if clearrevlogs: | |
952 | clearchangelog(repo) |
|
968 | clearchangelog(repo) | |
953 | clearfilecache(repo, b'_bookmarks') |
|
969 | clearfilecache(repo, b'_bookmarks') | |
954 |
|
970 | |||
955 | def d(): |
|
971 | def d(): | |
956 | repo._bookmarks |
|
972 | repo._bookmarks | |
957 |
|
973 | |||
958 | timer(d, setup=s) |
|
974 | timer(d, setup=s) | |
959 | fm.end() |
|
975 | fm.end() | |
960 |
|
976 | |||
961 |
|
977 | |||
962 | @command(b'perf::bundleread|perfbundleread', formatteropts, b'BUNDLE') |
|
978 | @command(b'perf::bundleread|perfbundleread', formatteropts, b'BUNDLE') | |
963 | def perfbundleread(ui, repo, bundlepath, **opts): |
|
979 | def perfbundleread(ui, repo, bundlepath, **opts): | |
964 | """Benchmark reading of bundle files. |
|
980 | """Benchmark reading of bundle files. | |
965 |
|
981 | |||
966 | This command is meant to isolate the I/O part of bundle reading as |
|
982 | This command is meant to isolate the I/O part of bundle reading as | |
967 | much as possible. |
|
983 | much as possible. | |
968 | """ |
|
984 | """ | |
969 | from mercurial import ( |
|
985 | from mercurial import ( | |
970 | bundle2, |
|
986 | bundle2, | |
971 | exchange, |
|
987 | exchange, | |
972 | streamclone, |
|
988 | streamclone, | |
973 | ) |
|
989 | ) | |
974 |
|
990 | |||
975 | opts = _byteskwargs(opts) |
|
991 | opts = _byteskwargs(opts) | |
976 |
|
992 | |||
977 | def makebench(fn): |
|
993 | def makebench(fn): | |
978 | def run(): |
|
994 | def run(): | |
979 | with open(bundlepath, b'rb') as fh: |
|
995 | with open(bundlepath, b'rb') as fh: | |
980 | bundle = exchange.readbundle(ui, fh, bundlepath) |
|
996 | bundle = exchange.readbundle(ui, fh, bundlepath) | |
981 | fn(bundle) |
|
997 | fn(bundle) | |
982 |
|
998 | |||
983 | return run |
|
999 | return run | |
984 |
|
1000 | |||
985 | def makereadnbytes(size): |
|
1001 | def makereadnbytes(size): | |
986 | def run(): |
|
1002 | def run(): | |
987 | with open(bundlepath, b'rb') as fh: |
|
1003 | with open(bundlepath, b'rb') as fh: | |
988 | bundle = exchange.readbundle(ui, fh, bundlepath) |
|
1004 | bundle = exchange.readbundle(ui, fh, bundlepath) | |
989 | while bundle.read(size): |
|
1005 | while bundle.read(size): | |
990 | pass |
|
1006 | pass | |
991 |
|
1007 | |||
992 | return run |
|
1008 | return run | |
993 |
|
1009 | |||
994 | def makestdioread(size): |
|
1010 | def makestdioread(size): | |
995 | def run(): |
|
1011 | def run(): | |
996 | with open(bundlepath, b'rb') as fh: |
|
1012 | with open(bundlepath, b'rb') as fh: | |
997 | while fh.read(size): |
|
1013 | while fh.read(size): | |
998 | pass |
|
1014 | pass | |
999 |
|
1015 | |||
1000 | return run |
|
1016 | return run | |
1001 |
|
1017 | |||
1002 | # bundle1 |
|
1018 | # bundle1 | |
1003 |
|
1019 | |||
1004 | def deltaiter(bundle): |
|
1020 | def deltaiter(bundle): | |
1005 | for delta in bundle.deltaiter(): |
|
1021 | for delta in bundle.deltaiter(): | |
1006 | pass |
|
1022 | pass | |
1007 |
|
1023 | |||
1008 | def iterchunks(bundle): |
|
1024 | def iterchunks(bundle): | |
1009 | for chunk in bundle.getchunks(): |
|
1025 | for chunk in bundle.getchunks(): | |
1010 | pass |
|
1026 | pass | |
1011 |
|
1027 | |||
1012 | # bundle2 |
|
1028 | # bundle2 | |
1013 |
|
1029 | |||
1014 | def forwardchunks(bundle): |
|
1030 | def forwardchunks(bundle): | |
1015 | for chunk in bundle._forwardchunks(): |
|
1031 | for chunk in bundle._forwardchunks(): | |
1016 | pass |
|
1032 | pass | |
1017 |
|
1033 | |||
1018 | def iterparts(bundle): |
|
1034 | def iterparts(bundle): | |
1019 | for part in bundle.iterparts(): |
|
1035 | for part in bundle.iterparts(): | |
1020 | pass |
|
1036 | pass | |
1021 |
|
1037 | |||
1022 | def iterpartsseekable(bundle): |
|
1038 | def iterpartsseekable(bundle): | |
1023 | for part in bundle.iterparts(seekable=True): |
|
1039 | for part in bundle.iterparts(seekable=True): | |
1024 | pass |
|
1040 | pass | |
1025 |
|
1041 | |||
1026 | def seek(bundle): |
|
1042 | def seek(bundle): | |
1027 | for part in bundle.iterparts(seekable=True): |
|
1043 | for part in bundle.iterparts(seekable=True): | |
1028 | part.seek(0, os.SEEK_END) |
|
1044 | part.seek(0, os.SEEK_END) | |
1029 |
|
1045 | |||
1030 | def makepartreadnbytes(size): |
|
1046 | def makepartreadnbytes(size): | |
1031 | def run(): |
|
1047 | def run(): | |
1032 | with open(bundlepath, b'rb') as fh: |
|
1048 | with open(bundlepath, b'rb') as fh: | |
1033 | bundle = exchange.readbundle(ui, fh, bundlepath) |
|
1049 | bundle = exchange.readbundle(ui, fh, bundlepath) | |
1034 | for part in bundle.iterparts(): |
|
1050 | for part in bundle.iterparts(): | |
1035 | while part.read(size): |
|
1051 | while part.read(size): | |
1036 | pass |
|
1052 | pass | |
1037 |
|
1053 | |||
1038 | return run |
|
1054 | return run | |
1039 |
|
1055 | |||
1040 | benches = [ |
|
1056 | benches = [ | |
1041 | (makestdioread(8192), b'read(8k)'), |
|
1057 | (makestdioread(8192), b'read(8k)'), | |
1042 | (makestdioread(16384), b'read(16k)'), |
|
1058 | (makestdioread(16384), b'read(16k)'), | |
1043 | (makestdioread(32768), b'read(32k)'), |
|
1059 | (makestdioread(32768), b'read(32k)'), | |
1044 | (makestdioread(131072), b'read(128k)'), |
|
1060 | (makestdioread(131072), b'read(128k)'), | |
1045 | ] |
|
1061 | ] | |
1046 |
|
1062 | |||
1047 | with open(bundlepath, b'rb') as fh: |
|
1063 | with open(bundlepath, b'rb') as fh: | |
1048 | bundle = exchange.readbundle(ui, fh, bundlepath) |
|
1064 | bundle = exchange.readbundle(ui, fh, bundlepath) | |
1049 |
|
1065 | |||
1050 | if isinstance(bundle, changegroup.cg1unpacker): |
|
1066 | if isinstance(bundle, changegroup.cg1unpacker): | |
1051 | benches.extend( |
|
1067 | benches.extend( | |
1052 | [ |
|
1068 | [ | |
1053 | (makebench(deltaiter), b'cg1 deltaiter()'), |
|
1069 | (makebench(deltaiter), b'cg1 deltaiter()'), | |
1054 | (makebench(iterchunks), b'cg1 getchunks()'), |
|
1070 | (makebench(iterchunks), b'cg1 getchunks()'), | |
1055 | (makereadnbytes(8192), b'cg1 read(8k)'), |
|
1071 | (makereadnbytes(8192), b'cg1 read(8k)'), | |
1056 | (makereadnbytes(16384), b'cg1 read(16k)'), |
|
1072 | (makereadnbytes(16384), b'cg1 read(16k)'), | |
1057 | (makereadnbytes(32768), b'cg1 read(32k)'), |
|
1073 | (makereadnbytes(32768), b'cg1 read(32k)'), | |
1058 | (makereadnbytes(131072), b'cg1 read(128k)'), |
|
1074 | (makereadnbytes(131072), b'cg1 read(128k)'), | |
1059 | ] |
|
1075 | ] | |
1060 | ) |
|
1076 | ) | |
1061 | elif isinstance(bundle, bundle2.unbundle20): |
|
1077 | elif isinstance(bundle, bundle2.unbundle20): | |
1062 | benches.extend( |
|
1078 | benches.extend( | |
1063 | [ |
|
1079 | [ | |
1064 | (makebench(forwardchunks), b'bundle2 forwardchunks()'), |
|
1080 | (makebench(forwardchunks), b'bundle2 forwardchunks()'), | |
1065 | (makebench(iterparts), b'bundle2 iterparts()'), |
|
1081 | (makebench(iterparts), b'bundle2 iterparts()'), | |
1066 | ( |
|
1082 | ( | |
1067 | makebench(iterpartsseekable), |
|
1083 | makebench(iterpartsseekable), | |
1068 | b'bundle2 iterparts() seekable', |
|
1084 | b'bundle2 iterparts() seekable', | |
1069 | ), |
|
1085 | ), | |
1070 | (makebench(seek), b'bundle2 part seek()'), |
|
1086 | (makebench(seek), b'bundle2 part seek()'), | |
1071 | (makepartreadnbytes(8192), b'bundle2 part read(8k)'), |
|
1087 | (makepartreadnbytes(8192), b'bundle2 part read(8k)'), | |
1072 | (makepartreadnbytes(16384), b'bundle2 part read(16k)'), |
|
1088 | (makepartreadnbytes(16384), b'bundle2 part read(16k)'), | |
1073 | (makepartreadnbytes(32768), b'bundle2 part read(32k)'), |
|
1089 | (makepartreadnbytes(32768), b'bundle2 part read(32k)'), | |
1074 | (makepartreadnbytes(131072), b'bundle2 part read(128k)'), |
|
1090 | (makepartreadnbytes(131072), b'bundle2 part read(128k)'), | |
1075 | ] |
|
1091 | ] | |
1076 | ) |
|
1092 | ) | |
1077 | elif isinstance(bundle, streamclone.streamcloneapplier): |
|
1093 | elif isinstance(bundle, streamclone.streamcloneapplier): | |
1078 | raise error.Abort(b'stream clone bundles not supported') |
|
1094 | raise error.Abort(b'stream clone bundles not supported') | |
1079 | else: |
|
1095 | else: | |
1080 | raise error.Abort(b'unhandled bundle type: %s' % type(bundle)) |
|
1096 | raise error.Abort(b'unhandled bundle type: %s' % type(bundle)) | |
1081 |
|
1097 | |||
1082 | for fn, title in benches: |
|
1098 | for fn, title in benches: | |
1083 | timer, fm = gettimer(ui, opts) |
|
1099 | timer, fm = gettimer(ui, opts) | |
1084 | timer(fn, title=title) |
|
1100 | timer(fn, title=title) | |
1085 | fm.end() |
|
1101 | fm.end() | |
1086 |
|
1102 | |||
1087 |
|
1103 | |||
1088 | @command( |
|
1104 | @command( | |
1089 | b'perf::changegroupchangelog|perfchangegroupchangelog', |
|
1105 | b'perf::changegroupchangelog|perfchangegroupchangelog', | |
1090 | formatteropts |
|
1106 | formatteropts | |
1091 | + [ |
|
1107 | + [ | |
1092 | (b'', b'cgversion', b'02', b'changegroup version'), |
|
1108 | (b'', b'cgversion', b'02', b'changegroup version'), | |
1093 | (b'r', b'rev', b'', b'revisions to add to changegroup'), |
|
1109 | (b'r', b'rev', b'', b'revisions to add to changegroup'), | |
1094 | ], |
|
1110 | ], | |
1095 | ) |
|
1111 | ) | |
1096 | def perfchangegroupchangelog(ui, repo, cgversion=b'02', rev=None, **opts): |
|
1112 | def perfchangegroupchangelog(ui, repo, cgversion=b'02', rev=None, **opts): | |
1097 | """Benchmark producing a changelog group for a changegroup. |
|
1113 | """Benchmark producing a changelog group for a changegroup. | |
1098 |
|
1114 | |||
1099 | This measures the time spent processing the changelog during a |
|
1115 | This measures the time spent processing the changelog during a | |
1100 | bundle operation. This occurs during `hg bundle` and on a server |
|
1116 | bundle operation. This occurs during `hg bundle` and on a server | |
1101 | processing a `getbundle` wire protocol request (handles clones |
|
1117 | processing a `getbundle` wire protocol request (handles clones | |
1102 | and pull requests). |
|
1118 | and pull requests). | |
1103 |
|
1119 | |||
1104 | By default, all revisions are added to the changegroup. |
|
1120 | By default, all revisions are added to the changegroup. | |
1105 | """ |
|
1121 | """ | |
1106 | opts = _byteskwargs(opts) |
|
1122 | opts = _byteskwargs(opts) | |
1107 | cl = repo.changelog |
|
1123 | cl = repo.changelog | |
1108 | nodes = [cl.lookup(r) for r in repo.revs(rev or b'all()')] |
|
1124 | nodes = [cl.lookup(r) for r in repo.revs(rev or b'all()')] | |
1109 | bundler = changegroup.getbundler(cgversion, repo) |
|
1125 | bundler = changegroup.getbundler(cgversion, repo) | |
1110 |
|
1126 | |||
1111 | def d(): |
|
1127 | def d(): | |
1112 | state, chunks = bundler._generatechangelog(cl, nodes) |
|
1128 | state, chunks = bundler._generatechangelog(cl, nodes) | |
1113 | for chunk in chunks: |
|
1129 | for chunk in chunks: | |
1114 | pass |
|
1130 | pass | |
1115 |
|
1131 | |||
1116 | timer, fm = gettimer(ui, opts) |
|
1132 | timer, fm = gettimer(ui, opts) | |
1117 |
|
1133 | |||
1118 | # Terminal printing can interfere with timing. So disable it. |
|
1134 | # Terminal printing can interfere with timing. So disable it. | |
1119 | with ui.configoverride({(b'progress', b'disable'): True}): |
|
1135 | with ui.configoverride({(b'progress', b'disable'): True}): | |
1120 | timer(d) |
|
1136 | timer(d) | |
1121 |
|
1137 | |||
1122 | fm.end() |
|
1138 | fm.end() | |
1123 |
|
1139 | |||
1124 |
|
1140 | |||
1125 | @command(b'perf::dirs|perfdirs', formatteropts) |
|
1141 | @command(b'perf::dirs|perfdirs', formatteropts) | |
1126 | def perfdirs(ui, repo, **opts): |
|
1142 | def perfdirs(ui, repo, **opts): | |
1127 | opts = _byteskwargs(opts) |
|
1143 | opts = _byteskwargs(opts) | |
1128 | timer, fm = gettimer(ui, opts) |
|
1144 | timer, fm = gettimer(ui, opts) | |
1129 | dirstate = repo.dirstate |
|
1145 | dirstate = repo.dirstate | |
1130 | b'a' in dirstate |
|
1146 | b'a' in dirstate | |
1131 |
|
1147 | |||
1132 | def d(): |
|
1148 | def d(): | |
1133 | dirstate.hasdir(b'a') |
|
1149 | dirstate.hasdir(b'a') | |
1134 | del dirstate._map._dirs |
|
1150 | try: | |
|
1151 | del dirstate._map._dirs | |||
|
1152 | except AttributeError: | |||
|
1153 | pass | |||
1135 |
|
1154 | |||
1136 | timer(d) |
|
1155 | timer(d) | |
1137 | fm.end() |
|
1156 | fm.end() | |
1138 |
|
1157 | |||
1139 |
|
1158 | |||
1140 | @command( |
|
1159 | @command( | |
1141 | b'perf::dirstate|perfdirstate', |
|
1160 | b'perf::dirstate|perfdirstate', | |
1142 | [ |
|
1161 | [ | |
1143 | ( |
|
1162 | ( | |
1144 | b'', |
|
1163 | b'', | |
1145 | b'iteration', |
|
1164 | b'iteration', | |
1146 | None, |
|
1165 | None, | |
1147 | b'benchmark a full iteration for the dirstate', |
|
1166 | b'benchmark a full iteration for the dirstate', | |
1148 | ), |
|
1167 | ), | |
1149 | ( |
|
1168 | ( | |
1150 | b'', |
|
1169 | b'', | |
1151 | b'contains', |
|
1170 | b'contains', | |
1152 | None, |
|
1171 | None, | |
1153 | b'benchmark a large amount of `nf in dirstate` calls', |
|
1172 | b'benchmark a large amount of `nf in dirstate` calls', | |
1154 | ), |
|
1173 | ), | |
1155 | ] |
|
1174 | ] | |
1156 | + formatteropts, |
|
1175 | + formatteropts, | |
1157 | ) |
|
1176 | ) | |
1158 | def perfdirstate(ui, repo, **opts): |
|
1177 | def perfdirstate(ui, repo, **opts): | |
1159 | """benchmap the time of various distate operations |
|
1178 | """benchmap the time of various distate operations | |
1160 |
|
1179 | |||
1161 | By default benchmark the time necessary to load a dirstate from scratch. |
|
1180 | By default benchmark the time necessary to load a dirstate from scratch. | |
1162 | The dirstate is loaded to the point were a "contains" request can be |
|
1181 | The dirstate is loaded to the point were a "contains" request can be | |
1163 | answered. |
|
1182 | answered. | |
1164 | """ |
|
1183 | """ | |
1165 | opts = _byteskwargs(opts) |
|
1184 | opts = _byteskwargs(opts) | |
1166 | timer, fm = gettimer(ui, opts) |
|
1185 | timer, fm = gettimer(ui, opts) | |
1167 | b"a" in repo.dirstate |
|
1186 | b"a" in repo.dirstate | |
1168 |
|
1187 | |||
1169 | if opts[b'iteration'] and opts[b'contains']: |
|
1188 | if opts[b'iteration'] and opts[b'contains']: | |
1170 | msg = b'only specify one of --iteration or --contains' |
|
1189 | msg = b'only specify one of --iteration or --contains' | |
1171 | raise error.Abort(msg) |
|
1190 | raise error.Abort(msg) | |
1172 |
|
1191 | |||
1173 | if opts[b'iteration']: |
|
1192 | if opts[b'iteration']: | |
1174 | setup = None |
|
1193 | setup = None | |
1175 | dirstate = repo.dirstate |
|
1194 | dirstate = repo.dirstate | |
1176 |
|
1195 | |||
1177 | def d(): |
|
1196 | def d(): | |
1178 | for f in dirstate: |
|
1197 | for f in dirstate: | |
1179 | pass |
|
1198 | pass | |
1180 |
|
1199 | |||
1181 | elif opts[b'contains']: |
|
1200 | elif opts[b'contains']: | |
1182 | setup = None |
|
1201 | setup = None | |
1183 | dirstate = repo.dirstate |
|
1202 | dirstate = repo.dirstate | |
1184 | allfiles = list(dirstate) |
|
1203 | allfiles = list(dirstate) | |
1185 | # also add file path that will be "missing" from the dirstate |
|
1204 | # also add file path that will be "missing" from the dirstate | |
1186 | allfiles.extend([f[::-1] for f in allfiles]) |
|
1205 | allfiles.extend([f[::-1] for f in allfiles]) | |
1187 |
|
1206 | |||
1188 | def d(): |
|
1207 | def d(): | |
1189 | for f in allfiles: |
|
1208 | for f in allfiles: | |
1190 | f in dirstate |
|
1209 | f in dirstate | |
1191 |
|
1210 | |||
1192 | else: |
|
1211 | else: | |
1193 |
|
1212 | |||
1194 | def setup(): |
|
1213 | def setup(): | |
1195 | repo.dirstate.invalidate() |
|
1214 | repo.dirstate.invalidate() | |
1196 |
|
1215 | |||
1197 | def d(): |
|
1216 | def d(): | |
1198 | b"a" in repo.dirstate |
|
1217 | b"a" in repo.dirstate | |
1199 |
|
1218 | |||
1200 | timer(d, setup=setup) |
|
1219 | timer(d, setup=setup) | |
1201 | fm.end() |
|
1220 | fm.end() | |
1202 |
|
1221 | |||
1203 |
|
1222 | |||
1204 | @command(b'perf::dirstatedirs|perfdirstatedirs', formatteropts) |
|
1223 | @command(b'perf::dirstatedirs|perfdirstatedirs', formatteropts) | |
1205 | def perfdirstatedirs(ui, repo, **opts): |
|
1224 | def perfdirstatedirs(ui, repo, **opts): | |
1206 | """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache""" |
|
1225 | """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache""" | |
1207 | opts = _byteskwargs(opts) |
|
1226 | opts = _byteskwargs(opts) | |
1208 | timer, fm = gettimer(ui, opts) |
|
1227 | timer, fm = gettimer(ui, opts) | |
1209 | repo.dirstate.hasdir(b"a") |
|
1228 | repo.dirstate.hasdir(b"a") | |
1210 |
|
1229 | |||
1211 | def setup(): |
|
1230 | def setup(): | |
1212 | del repo.dirstate._map._dirs |
|
1231 | try: | |
|
1232 | del repo.dirstate._map._dirs | |||
|
1233 | except AttributeError: | |||
|
1234 | pass | |||
1213 |
|
1235 | |||
1214 | def d(): |
|
1236 | def d(): | |
1215 | repo.dirstate.hasdir(b"a") |
|
1237 | repo.dirstate.hasdir(b"a") | |
1216 |
|
1238 | |||
1217 | timer(d, setup=setup) |
|
1239 | timer(d, setup=setup) | |
1218 | fm.end() |
|
1240 | fm.end() | |
1219 |
|
1241 | |||
1220 |
|
1242 | |||
1221 | @command(b'perf::dirstatefoldmap|perfdirstatefoldmap', formatteropts) |
|
1243 | @command(b'perf::dirstatefoldmap|perfdirstatefoldmap', formatteropts) | |
1222 | def perfdirstatefoldmap(ui, repo, **opts): |
|
1244 | def perfdirstatefoldmap(ui, repo, **opts): | |
1223 | """benchmap a `dirstate._map.filefoldmap.get()` request |
|
1245 | """benchmap a `dirstate._map.filefoldmap.get()` request | |
1224 |
|
1246 | |||
1225 | The dirstate filefoldmap cache is dropped between every request. |
|
1247 | The dirstate filefoldmap cache is dropped between every request. | |
1226 | """ |
|
1248 | """ | |
1227 | opts = _byteskwargs(opts) |
|
1249 | opts = _byteskwargs(opts) | |
1228 | timer, fm = gettimer(ui, opts) |
|
1250 | timer, fm = gettimer(ui, opts) | |
1229 | dirstate = repo.dirstate |
|
1251 | dirstate = repo.dirstate | |
1230 | dirstate._map.filefoldmap.get(b'a') |
|
1252 | dirstate._map.filefoldmap.get(b'a') | |
1231 |
|
1253 | |||
1232 | def setup(): |
|
1254 | def setup(): | |
1233 | del dirstate._map.filefoldmap |
|
1255 | del dirstate._map.filefoldmap | |
1234 |
|
1256 | |||
1235 | def d(): |
|
1257 | def d(): | |
1236 | dirstate._map.filefoldmap.get(b'a') |
|
1258 | dirstate._map.filefoldmap.get(b'a') | |
1237 |
|
1259 | |||
1238 | timer(d, setup=setup) |
|
1260 | timer(d, setup=setup) | |
1239 | fm.end() |
|
1261 | fm.end() | |
1240 |
|
1262 | |||
1241 |
|
1263 | |||
1242 | @command(b'perf::dirfoldmap|perfdirfoldmap', formatteropts) |
|
1264 | @command(b'perf::dirfoldmap|perfdirfoldmap', formatteropts) | |
1243 | def perfdirfoldmap(ui, repo, **opts): |
|
1265 | def perfdirfoldmap(ui, repo, **opts): | |
1244 | """benchmap a `dirstate._map.dirfoldmap.get()` request |
|
1266 | """benchmap a `dirstate._map.dirfoldmap.get()` request | |
1245 |
|
1267 | |||
1246 | The dirstate dirfoldmap cache is dropped between every request. |
|
1268 | The dirstate dirfoldmap cache is dropped between every request. | |
1247 | """ |
|
1269 | """ | |
1248 | opts = _byteskwargs(opts) |
|
1270 | opts = _byteskwargs(opts) | |
1249 | timer, fm = gettimer(ui, opts) |
|
1271 | timer, fm = gettimer(ui, opts) | |
1250 | dirstate = repo.dirstate |
|
1272 | dirstate = repo.dirstate | |
1251 | dirstate._map.dirfoldmap.get(b'a') |
|
1273 | dirstate._map.dirfoldmap.get(b'a') | |
1252 |
|
1274 | |||
1253 | def setup(): |
|
1275 | def setup(): | |
1254 | del dirstate._map.dirfoldmap |
|
1276 | del dirstate._map.dirfoldmap | |
1255 | del dirstate._map._dirs |
|
1277 | try: | |
|
1278 | del dirstate._map._dirs | |||
|
1279 | except AttributeError: | |||
|
1280 | pass | |||
1256 |
|
1281 | |||
1257 | def d(): |
|
1282 | def d(): | |
1258 | dirstate._map.dirfoldmap.get(b'a') |
|
1283 | dirstate._map.dirfoldmap.get(b'a') | |
1259 |
|
1284 | |||
1260 | timer(d, setup=setup) |
|
1285 | timer(d, setup=setup) | |
1261 | fm.end() |
|
1286 | fm.end() | |
1262 |
|
1287 | |||
1263 |
|
1288 | |||
1264 | @command(b'perf::dirstatewrite|perfdirstatewrite', formatteropts) |
|
1289 | @command(b'perf::dirstatewrite|perfdirstatewrite', formatteropts) | |
1265 | def perfdirstatewrite(ui, repo, **opts): |
|
1290 | def perfdirstatewrite(ui, repo, **opts): | |
1266 | """benchmap the time it take to write a dirstate on disk""" |
|
1291 | """benchmap the time it take to write a dirstate on disk""" | |
1267 | opts = _byteskwargs(opts) |
|
1292 | opts = _byteskwargs(opts) | |
1268 | timer, fm = gettimer(ui, opts) |
|
1293 | timer, fm = gettimer(ui, opts) | |
1269 | ds = repo.dirstate |
|
1294 | ds = repo.dirstate | |
1270 | b"a" in ds |
|
1295 | b"a" in ds | |
1271 |
|
1296 | |||
1272 | def setup(): |
|
1297 | def setup(): | |
1273 | ds._dirty = True |
|
1298 | ds._dirty = True | |
1274 |
|
1299 | |||
1275 | def d(): |
|
1300 | def d(): | |
1276 | ds.write(repo.currenttransaction()) |
|
1301 | ds.write(repo.currenttransaction()) | |
1277 |
|
1302 | |||
1278 | timer(d, setup=setup) |
|
1303 | timer(d, setup=setup) | |
1279 | fm.end() |
|
1304 | fm.end() | |
1280 |
|
1305 | |||
1281 |
|
1306 | |||
1282 | def _getmergerevs(repo, opts): |
|
1307 | def _getmergerevs(repo, opts): | |
1283 | """parse command argument to return rev involved in merge |
|
1308 | """parse command argument to return rev involved in merge | |
1284 |
|
1309 | |||
1285 | input: options dictionnary with `rev`, `from` and `bse` |
|
1310 | input: options dictionnary with `rev`, `from` and `bse` | |
1286 | output: (localctx, otherctx, basectx) |
|
1311 | output: (localctx, otherctx, basectx) | |
1287 | """ |
|
1312 | """ | |
1288 | if opts[b'from']: |
|
1313 | if opts[b'from']: | |
1289 | fromrev = scmutil.revsingle(repo, opts[b'from']) |
|
1314 | fromrev = scmutil.revsingle(repo, opts[b'from']) | |
1290 | wctx = repo[fromrev] |
|
1315 | wctx = repo[fromrev] | |
1291 | else: |
|
1316 | else: | |
1292 | wctx = repo[None] |
|
1317 | wctx = repo[None] | |
1293 | # we don't want working dir files to be stat'd in the benchmark, so |
|
1318 | # we don't want working dir files to be stat'd in the benchmark, so | |
1294 | # prime that cache |
|
1319 | # prime that cache | |
1295 | wctx.dirty() |
|
1320 | wctx.dirty() | |
1296 | rctx = scmutil.revsingle(repo, opts[b'rev'], opts[b'rev']) |
|
1321 | rctx = scmutil.revsingle(repo, opts[b'rev'], opts[b'rev']) | |
1297 | if opts[b'base']: |
|
1322 | if opts[b'base']: | |
1298 | fromrev = scmutil.revsingle(repo, opts[b'base']) |
|
1323 | fromrev = scmutil.revsingle(repo, opts[b'base']) | |
1299 | ancestor = repo[fromrev] |
|
1324 | ancestor = repo[fromrev] | |
1300 | else: |
|
1325 | else: | |
1301 | ancestor = wctx.ancestor(rctx) |
|
1326 | ancestor = wctx.ancestor(rctx) | |
1302 | return (wctx, rctx, ancestor) |
|
1327 | return (wctx, rctx, ancestor) | |
1303 |
|
1328 | |||
1304 |
|
1329 | |||
1305 | @command( |
|
1330 | @command( | |
1306 | b'perf::mergecalculate|perfmergecalculate', |
|
1331 | b'perf::mergecalculate|perfmergecalculate', | |
1307 | [ |
|
1332 | [ | |
1308 | (b'r', b'rev', b'.', b'rev to merge against'), |
|
1333 | (b'r', b'rev', b'.', b'rev to merge against'), | |
1309 | (b'', b'from', b'', b'rev to merge from'), |
|
1334 | (b'', b'from', b'', b'rev to merge from'), | |
1310 | (b'', b'base', b'', b'the revision to use as base'), |
|
1335 | (b'', b'base', b'', b'the revision to use as base'), | |
1311 | ] |
|
1336 | ] | |
1312 | + formatteropts, |
|
1337 | + formatteropts, | |
1313 | ) |
|
1338 | ) | |
1314 | def perfmergecalculate(ui, repo, **opts): |
|
1339 | def perfmergecalculate(ui, repo, **opts): | |
1315 | opts = _byteskwargs(opts) |
|
1340 | opts = _byteskwargs(opts) | |
1316 | timer, fm = gettimer(ui, opts) |
|
1341 | timer, fm = gettimer(ui, opts) | |
1317 |
|
1342 | |||
1318 | wctx, rctx, ancestor = _getmergerevs(repo, opts) |
|
1343 | wctx, rctx, ancestor = _getmergerevs(repo, opts) | |
1319 |
|
1344 | |||
1320 | def d(): |
|
1345 | def d(): | |
1321 | # acceptremote is True because we don't want prompts in the middle of |
|
1346 | # acceptremote is True because we don't want prompts in the middle of | |
1322 | # our benchmark |
|
1347 | # our benchmark | |
1323 | merge.calculateupdates( |
|
1348 | merge.calculateupdates( | |
1324 | repo, |
|
1349 | repo, | |
1325 | wctx, |
|
1350 | wctx, | |
1326 | rctx, |
|
1351 | rctx, | |
1327 | [ancestor], |
|
1352 | [ancestor], | |
1328 | branchmerge=False, |
|
1353 | branchmerge=False, | |
1329 | force=False, |
|
1354 | force=False, | |
1330 | acceptremote=True, |
|
1355 | acceptremote=True, | |
1331 | followcopies=True, |
|
1356 | followcopies=True, | |
1332 | ) |
|
1357 | ) | |
1333 |
|
1358 | |||
1334 | timer(d) |
|
1359 | timer(d) | |
1335 | fm.end() |
|
1360 | fm.end() | |
1336 |
|
1361 | |||
1337 |
|
1362 | |||
1338 | @command( |
|
1363 | @command( | |
1339 | b'perf::mergecopies|perfmergecopies', |
|
1364 | b'perf::mergecopies|perfmergecopies', | |
1340 | [ |
|
1365 | [ | |
1341 | (b'r', b'rev', b'.', b'rev to merge against'), |
|
1366 | (b'r', b'rev', b'.', b'rev to merge against'), | |
1342 | (b'', b'from', b'', b'rev to merge from'), |
|
1367 | (b'', b'from', b'', b'rev to merge from'), | |
1343 | (b'', b'base', b'', b'the revision to use as base'), |
|
1368 | (b'', b'base', b'', b'the revision to use as base'), | |
1344 | ] |
|
1369 | ] | |
1345 | + formatteropts, |
|
1370 | + formatteropts, | |
1346 | ) |
|
1371 | ) | |
1347 | def perfmergecopies(ui, repo, **opts): |
|
1372 | def perfmergecopies(ui, repo, **opts): | |
1348 | """measure runtime of `copies.mergecopies`""" |
|
1373 | """measure runtime of `copies.mergecopies`""" | |
1349 | opts = _byteskwargs(opts) |
|
1374 | opts = _byteskwargs(opts) | |
1350 | timer, fm = gettimer(ui, opts) |
|
1375 | timer, fm = gettimer(ui, opts) | |
1351 | wctx, rctx, ancestor = _getmergerevs(repo, opts) |
|
1376 | wctx, rctx, ancestor = _getmergerevs(repo, opts) | |
1352 |
|
1377 | |||
1353 | def d(): |
|
1378 | def d(): | |
1354 | # acceptremote is True because we don't want prompts in the middle of |
|
1379 | # acceptremote is True because we don't want prompts in the middle of | |
1355 | # our benchmark |
|
1380 | # our benchmark | |
1356 | copies.mergecopies(repo, wctx, rctx, ancestor) |
|
1381 | copies.mergecopies(repo, wctx, rctx, ancestor) | |
1357 |
|
1382 | |||
1358 | timer(d) |
|
1383 | timer(d) | |
1359 | fm.end() |
|
1384 | fm.end() | |
1360 |
|
1385 | |||
1361 |
|
1386 | |||
1362 | @command(b'perf::pathcopies|perfpathcopies', [], b"REV REV") |
|
1387 | @command(b'perf::pathcopies|perfpathcopies', [], b"REV REV") | |
1363 | def perfpathcopies(ui, repo, rev1, rev2, **opts): |
|
1388 | def perfpathcopies(ui, repo, rev1, rev2, **opts): | |
1364 | """benchmark the copy tracing logic""" |
|
1389 | """benchmark the copy tracing logic""" | |
1365 | opts = _byteskwargs(opts) |
|
1390 | opts = _byteskwargs(opts) | |
1366 | timer, fm = gettimer(ui, opts) |
|
1391 | timer, fm = gettimer(ui, opts) | |
1367 | ctx1 = scmutil.revsingle(repo, rev1, rev1) |
|
1392 | ctx1 = scmutil.revsingle(repo, rev1, rev1) | |
1368 | ctx2 = scmutil.revsingle(repo, rev2, rev2) |
|
1393 | ctx2 = scmutil.revsingle(repo, rev2, rev2) | |
1369 |
|
1394 | |||
1370 | def d(): |
|
1395 | def d(): | |
1371 | copies.pathcopies(ctx1, ctx2) |
|
1396 | copies.pathcopies(ctx1, ctx2) | |
1372 |
|
1397 | |||
1373 | timer(d) |
|
1398 | timer(d) | |
1374 | fm.end() |
|
1399 | fm.end() | |
1375 |
|
1400 | |||
1376 |
|
1401 | |||
1377 | @command( |
|
1402 | @command( | |
1378 | b'perf::phases|perfphases', |
|
1403 | b'perf::phases|perfphases', | |
1379 | [ |
|
1404 | [ | |
1380 | (b'', b'full', False, b'include file reading time too'), |
|
1405 | (b'', b'full', False, b'include file reading time too'), | |
1381 | ], |
|
1406 | ], | |
1382 | b"", |
|
1407 | b"", | |
1383 | ) |
|
1408 | ) | |
1384 | def perfphases(ui, repo, **opts): |
|
1409 | def perfphases(ui, repo, **opts): | |
1385 | """benchmark phasesets computation""" |
|
1410 | """benchmark phasesets computation""" | |
1386 | opts = _byteskwargs(opts) |
|
1411 | opts = _byteskwargs(opts) | |
1387 | timer, fm = gettimer(ui, opts) |
|
1412 | timer, fm = gettimer(ui, opts) | |
1388 | _phases = repo._phasecache |
|
1413 | _phases = repo._phasecache | |
1389 | full = opts.get(b'full') |
|
1414 | full = opts.get(b'full') | |
1390 |
|
1415 | |||
1391 | def d(): |
|
1416 | def d(): | |
1392 | phases = _phases |
|
1417 | phases = _phases | |
1393 | if full: |
|
1418 | if full: | |
1394 | clearfilecache(repo, b'_phasecache') |
|
1419 | clearfilecache(repo, b'_phasecache') | |
1395 | phases = repo._phasecache |
|
1420 | phases = repo._phasecache | |
1396 | phases.invalidate() |
|
1421 | phases.invalidate() | |
1397 | phases.loadphaserevs(repo) |
|
1422 | phases.loadphaserevs(repo) | |
1398 |
|
1423 | |||
1399 | timer(d) |
|
1424 | timer(d) | |
1400 | fm.end() |
|
1425 | fm.end() | |
1401 |
|
1426 | |||
1402 |
|
1427 | |||
1403 | @command(b'perf::phasesremote|perfphasesremote', [], b"[DEST]") |
|
1428 | @command(b'perf::phasesremote|perfphasesremote', [], b"[DEST]") | |
1404 | def perfphasesremote(ui, repo, dest=None, **opts): |
|
1429 | def perfphasesremote(ui, repo, dest=None, **opts): | |
1405 | """benchmark time needed to analyse phases of the remote server""" |
|
1430 | """benchmark time needed to analyse phases of the remote server""" | |
1406 | from mercurial.node import bin |
|
1431 | from mercurial.node import bin | |
1407 | from mercurial import ( |
|
1432 | from mercurial import ( | |
1408 | exchange, |
|
1433 | exchange, | |
1409 | hg, |
|
1434 | hg, | |
1410 | phases, |
|
1435 | phases, | |
1411 | ) |
|
1436 | ) | |
1412 |
|
1437 | |||
1413 | opts = _byteskwargs(opts) |
|
1438 | opts = _byteskwargs(opts) | |
1414 | timer, fm = gettimer(ui, opts) |
|
1439 | timer, fm = gettimer(ui, opts) | |
1415 |
|
1440 | |||
1416 | path = ui.getpath(dest, default=(b'default-push', b'default')) |
|
1441 | path = ui.getpath(dest, default=(b'default-push', b'default')) | |
1417 | if not path: |
|
1442 | if not path: | |
1418 | raise error.Abort( |
|
1443 | raise error.Abort( | |
1419 | b'default repository not configured!', |
|
1444 | b'default repository not configured!', | |
1420 | hint=b"see 'hg help config.paths'", |
|
1445 | hint=b"see 'hg help config.paths'", | |
1421 | ) |
|
1446 | ) | |
1422 | dest = path.pushloc or path.loc |
|
1447 | dest = path.pushloc or path.loc | |
1423 | ui.statusnoi18n(b'analysing phase of %s\n' % util.hidepassword(dest)) |
|
1448 | ui.statusnoi18n(b'analysing phase of %s\n' % util.hidepassword(dest)) | |
1424 | other = hg.peer(repo, opts, dest) |
|
1449 | other = hg.peer(repo, opts, dest) | |
1425 |
|
1450 | |||
1426 | # easier to perform discovery through the operation |
|
1451 | # easier to perform discovery through the operation | |
1427 | op = exchange.pushoperation(repo, other) |
|
1452 | op = exchange.pushoperation(repo, other) | |
1428 | exchange._pushdiscoverychangeset(op) |
|
1453 | exchange._pushdiscoverychangeset(op) | |
1429 |
|
1454 | |||
1430 | remotesubset = op.fallbackheads |
|
1455 | remotesubset = op.fallbackheads | |
1431 |
|
1456 | |||
1432 | with other.commandexecutor() as e: |
|
1457 | with other.commandexecutor() as e: | |
1433 | remotephases = e.callcommand( |
|
1458 | remotephases = e.callcommand( | |
1434 | b'listkeys', {b'namespace': b'phases'} |
|
1459 | b'listkeys', {b'namespace': b'phases'} | |
1435 | ).result() |
|
1460 | ).result() | |
1436 | del other |
|
1461 | del other | |
1437 | publishing = remotephases.get(b'publishing', False) |
|
1462 | publishing = remotephases.get(b'publishing', False) | |
1438 | if publishing: |
|
1463 | if publishing: | |
1439 | ui.statusnoi18n(b'publishing: yes\n') |
|
1464 | ui.statusnoi18n(b'publishing: yes\n') | |
1440 | else: |
|
1465 | else: | |
1441 | ui.statusnoi18n(b'publishing: no\n') |
|
1466 | ui.statusnoi18n(b'publishing: no\n') | |
1442 |
|
1467 | |||
1443 | has_node = getattr(repo.changelog.index, 'has_node', None) |
|
1468 | has_node = getattr(repo.changelog.index, 'has_node', None) | |
1444 | if has_node is None: |
|
1469 | if has_node is None: | |
1445 | has_node = repo.changelog.nodemap.__contains__ |
|
1470 | has_node = repo.changelog.nodemap.__contains__ | |
1446 | nonpublishroots = 0 |
|
1471 | nonpublishroots = 0 | |
1447 | for nhex, phase in remotephases.iteritems(): |
|
1472 | for nhex, phase in remotephases.iteritems(): | |
1448 | if nhex == b'publishing': # ignore data related to publish option |
|
1473 | if nhex == b'publishing': # ignore data related to publish option | |
1449 | continue |
|
1474 | continue | |
1450 | node = bin(nhex) |
|
1475 | node = bin(nhex) | |
1451 | if has_node(node) and int(phase): |
|
1476 | if has_node(node) and int(phase): | |
1452 | nonpublishroots += 1 |
|
1477 | nonpublishroots += 1 | |
1453 | ui.statusnoi18n(b'number of roots: %d\n' % len(remotephases)) |
|
1478 | ui.statusnoi18n(b'number of roots: %d\n' % len(remotephases)) | |
1454 | ui.statusnoi18n(b'number of known non public roots: %d\n' % nonpublishroots) |
|
1479 | ui.statusnoi18n(b'number of known non public roots: %d\n' % nonpublishroots) | |
1455 |
|
1480 | |||
1456 | def d(): |
|
1481 | def d(): | |
1457 | phases.remotephasessummary(repo, remotesubset, remotephases) |
|
1482 | phases.remotephasessummary(repo, remotesubset, remotephases) | |
1458 |
|
1483 | |||
1459 | timer(d) |
|
1484 | timer(d) | |
1460 | fm.end() |
|
1485 | fm.end() | |
1461 |
|
1486 | |||
1462 |
|
1487 | |||
1463 | @command( |
|
1488 | @command( | |
1464 | b'perf::manifest|perfmanifest', |
|
1489 | b'perf::manifest|perfmanifest', | |
1465 | [ |
|
1490 | [ | |
1466 | (b'm', b'manifest-rev', False, b'Look up a manifest node revision'), |
|
1491 | (b'm', b'manifest-rev', False, b'Look up a manifest node revision'), | |
1467 | (b'', b'clear-disk', False, b'clear on-disk caches too'), |
|
1492 | (b'', b'clear-disk', False, b'clear on-disk caches too'), | |
1468 | ] |
|
1493 | ] | |
1469 | + formatteropts, |
|
1494 | + formatteropts, | |
1470 | b'REV|NODE', |
|
1495 | b'REV|NODE', | |
1471 | ) |
|
1496 | ) | |
1472 | def perfmanifest(ui, repo, rev, manifest_rev=False, clear_disk=False, **opts): |
|
1497 | def perfmanifest(ui, repo, rev, manifest_rev=False, clear_disk=False, **opts): | |
1473 | """benchmark the time to read a manifest from disk and return a usable |
|
1498 | """benchmark the time to read a manifest from disk and return a usable | |
1474 | dict-like object |
|
1499 | dict-like object | |
1475 |
|
1500 | |||
1476 | Manifest caches are cleared before retrieval.""" |
|
1501 | Manifest caches are cleared before retrieval.""" | |
1477 | opts = _byteskwargs(opts) |
|
1502 | opts = _byteskwargs(opts) | |
1478 | timer, fm = gettimer(ui, opts) |
|
1503 | timer, fm = gettimer(ui, opts) | |
1479 | if not manifest_rev: |
|
1504 | if not manifest_rev: | |
1480 | ctx = scmutil.revsingle(repo, rev, rev) |
|
1505 | ctx = scmutil.revsingle(repo, rev, rev) | |
1481 | t = ctx.manifestnode() |
|
1506 | t = ctx.manifestnode() | |
1482 | else: |
|
1507 | else: | |
1483 | from mercurial.node import bin |
|
1508 | from mercurial.node import bin | |
1484 |
|
1509 | |||
1485 | if len(rev) == 40: |
|
1510 | if len(rev) == 40: | |
1486 | t = bin(rev) |
|
1511 | t = bin(rev) | |
1487 | else: |
|
1512 | else: | |
1488 | try: |
|
1513 | try: | |
1489 | rev = int(rev) |
|
1514 | rev = int(rev) | |
1490 |
|
1515 | |||
1491 | if util.safehasattr(repo.manifestlog, b'getstorage'): |
|
1516 | if util.safehasattr(repo.manifestlog, b'getstorage'): | |
1492 | t = repo.manifestlog.getstorage(b'').node(rev) |
|
1517 | t = repo.manifestlog.getstorage(b'').node(rev) | |
1493 | else: |
|
1518 | else: | |
1494 | t = repo.manifestlog._revlog.lookup(rev) |
|
1519 | t = repo.manifestlog._revlog.lookup(rev) | |
1495 | except ValueError: |
|
1520 | except ValueError: | |
1496 | raise error.Abort( |
|
1521 | raise error.Abort( | |
1497 | b'manifest revision must be integer or full node' |
|
1522 | b'manifest revision must be integer or full node' | |
1498 | ) |
|
1523 | ) | |
1499 |
|
1524 | |||
1500 | def d(): |
|
1525 | def d(): | |
1501 | repo.manifestlog.clearcaches(clear_persisted_data=clear_disk) |
|
1526 | repo.manifestlog.clearcaches(clear_persisted_data=clear_disk) | |
1502 | repo.manifestlog[t].read() |
|
1527 | repo.manifestlog[t].read() | |
1503 |
|
1528 | |||
1504 | timer(d) |
|
1529 | timer(d) | |
1505 | fm.end() |
|
1530 | fm.end() | |
1506 |
|
1531 | |||
1507 |
|
1532 | |||
1508 | @command(b'perf::changeset|perfchangeset', formatteropts) |
|
1533 | @command(b'perf::changeset|perfchangeset', formatteropts) | |
1509 | def perfchangeset(ui, repo, rev, **opts): |
|
1534 | def perfchangeset(ui, repo, rev, **opts): | |
1510 | opts = _byteskwargs(opts) |
|
1535 | opts = _byteskwargs(opts) | |
1511 | timer, fm = gettimer(ui, opts) |
|
1536 | timer, fm = gettimer(ui, opts) | |
1512 | n = scmutil.revsingle(repo, rev).node() |
|
1537 | n = scmutil.revsingle(repo, rev).node() | |
1513 |
|
1538 | |||
1514 | def d(): |
|
1539 | def d(): | |
1515 | repo.changelog.read(n) |
|
1540 | repo.changelog.read(n) | |
1516 | # repo.changelog._cache = None |
|
1541 | # repo.changelog._cache = None | |
1517 |
|
1542 | |||
1518 | timer(d) |
|
1543 | timer(d) | |
1519 | fm.end() |
|
1544 | fm.end() | |
1520 |
|
1545 | |||
1521 |
|
1546 | |||
1522 | @command(b'perf::ignore|perfignore', formatteropts) |
|
1547 | @command(b'perf::ignore|perfignore', formatteropts) | |
1523 | def perfignore(ui, repo, **opts): |
|
1548 | def perfignore(ui, repo, **opts): | |
1524 | """benchmark operation related to computing ignore""" |
|
1549 | """benchmark operation related to computing ignore""" | |
1525 | opts = _byteskwargs(opts) |
|
1550 | opts = _byteskwargs(opts) | |
1526 | timer, fm = gettimer(ui, opts) |
|
1551 | timer, fm = gettimer(ui, opts) | |
1527 | dirstate = repo.dirstate |
|
1552 | dirstate = repo.dirstate | |
1528 |
|
1553 | |||
1529 | def setupone(): |
|
1554 | def setupone(): | |
1530 | dirstate.invalidate() |
|
1555 | dirstate.invalidate() | |
1531 | clearfilecache(dirstate, b'_ignore') |
|
1556 | clearfilecache(dirstate, b'_ignore') | |
1532 |
|
1557 | |||
1533 | def runone(): |
|
1558 | def runone(): | |
1534 | dirstate._ignore |
|
1559 | dirstate._ignore | |
1535 |
|
1560 | |||
1536 | timer(runone, setup=setupone, title=b"load") |
|
1561 | timer(runone, setup=setupone, title=b"load") | |
1537 | fm.end() |
|
1562 | fm.end() | |
1538 |
|
1563 | |||
1539 |
|
1564 | |||
1540 | @command( |
|
1565 | @command( | |
1541 | b'perf::index|perfindex', |
|
1566 | b'perf::index|perfindex', | |
1542 | [ |
|
1567 | [ | |
1543 | (b'', b'rev', [], b'revision to be looked up (default tip)'), |
|
1568 | (b'', b'rev', [], b'revision to be looked up (default tip)'), | |
1544 | (b'', b'no-lookup', None, b'do not revision lookup post creation'), |
|
1569 | (b'', b'no-lookup', None, b'do not revision lookup post creation'), | |
1545 | ] |
|
1570 | ] | |
1546 | + formatteropts, |
|
1571 | + formatteropts, | |
1547 | ) |
|
1572 | ) | |
1548 | def perfindex(ui, repo, **opts): |
|
1573 | def perfindex(ui, repo, **opts): | |
1549 | """benchmark index creation time followed by a lookup |
|
1574 | """benchmark index creation time followed by a lookup | |
1550 |
|
1575 | |||
1551 | The default is to look `tip` up. Depending on the index implementation, |
|
1576 | The default is to look `tip` up. Depending on the index implementation, | |
1552 | the revision looked up can matters. For example, an implementation |
|
1577 | the revision looked up can matters. For example, an implementation | |
1553 | scanning the index will have a faster lookup time for `--rev tip` than for |
|
1578 | scanning the index will have a faster lookup time for `--rev tip` than for | |
1554 | `--rev 0`. The number of looked up revisions and their order can also |
|
1579 | `--rev 0`. The number of looked up revisions and their order can also | |
1555 | matters. |
|
1580 | matters. | |
1556 |
|
1581 | |||
1557 | Example of useful set to test: |
|
1582 | Example of useful set to test: | |
1558 |
|
1583 | |||
1559 | * tip |
|
1584 | * tip | |
1560 | * 0 |
|
1585 | * 0 | |
1561 | * -10: |
|
1586 | * -10: | |
1562 | * :10 |
|
1587 | * :10 | |
1563 | * -10: + :10 |
|
1588 | * -10: + :10 | |
1564 | * :10: + -10: |
|
1589 | * :10: + -10: | |
1565 | * -10000: |
|
1590 | * -10000: | |
1566 | * -10000: + 0 |
|
1591 | * -10000: + 0 | |
1567 |
|
1592 | |||
1568 | It is not currently possible to check for lookup of a missing node. For |
|
1593 | It is not currently possible to check for lookup of a missing node. For | |
1569 | deeper lookup benchmarking, checkout the `perfnodemap` command.""" |
|
1594 | deeper lookup benchmarking, checkout the `perfnodemap` command.""" | |
1570 | import mercurial.revlog |
|
1595 | import mercurial.revlog | |
1571 |
|
1596 | |||
1572 | opts = _byteskwargs(opts) |
|
1597 | opts = _byteskwargs(opts) | |
1573 | timer, fm = gettimer(ui, opts) |
|
1598 | timer, fm = gettimer(ui, opts) | |
1574 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg |
|
1599 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg | |
1575 | if opts[b'no_lookup']: |
|
1600 | if opts[b'no_lookup']: | |
1576 | if opts['rev']: |
|
1601 | if opts['rev']: | |
1577 | raise error.Abort('--no-lookup and --rev are mutually exclusive') |
|
1602 | raise error.Abort('--no-lookup and --rev are mutually exclusive') | |
1578 | nodes = [] |
|
1603 | nodes = [] | |
1579 | elif not opts[b'rev']: |
|
1604 | elif not opts[b'rev']: | |
1580 | nodes = [repo[b"tip"].node()] |
|
1605 | nodes = [repo[b"tip"].node()] | |
1581 | else: |
|
1606 | else: | |
1582 | revs = scmutil.revrange(repo, opts[b'rev']) |
|
1607 | revs = scmutil.revrange(repo, opts[b'rev']) | |
1583 | cl = repo.changelog |
|
1608 | cl = repo.changelog | |
1584 | nodes = [cl.node(r) for r in revs] |
|
1609 | nodes = [cl.node(r) for r in revs] | |
1585 |
|
1610 | |||
1586 | unfi = repo.unfiltered() |
|
1611 | unfi = repo.unfiltered() | |
1587 | # find the filecache func directly |
|
1612 | # find the filecache func directly | |
1588 | # This avoid polluting the benchmark with the filecache logic |
|
1613 | # This avoid polluting the benchmark with the filecache logic | |
1589 | makecl = unfi.__class__.changelog.func |
|
1614 | makecl = unfi.__class__.changelog.func | |
1590 |
|
1615 | |||
1591 | def setup(): |
|
1616 | def setup(): | |
1592 | # probably not necessary, but for good measure |
|
1617 | # probably not necessary, but for good measure | |
1593 | clearchangelog(unfi) |
|
1618 | clearchangelog(unfi) | |
1594 |
|
1619 | |||
1595 | def d(): |
|
1620 | def d(): | |
1596 | cl = makecl(unfi) |
|
1621 | cl = makecl(unfi) | |
1597 | for n in nodes: |
|
1622 | for n in nodes: | |
1598 | cl.rev(n) |
|
1623 | cl.rev(n) | |
1599 |
|
1624 | |||
1600 | timer(d, setup=setup) |
|
1625 | timer(d, setup=setup) | |
1601 | fm.end() |
|
1626 | fm.end() | |
1602 |
|
1627 | |||
1603 |
|
1628 | |||
1604 | @command( |
|
1629 | @command( | |
1605 | b'perf::nodemap|perfnodemap', |
|
1630 | b'perf::nodemap|perfnodemap', | |
1606 | [ |
|
1631 | [ | |
1607 | (b'', b'rev', [], b'revision to be looked up (default tip)'), |
|
1632 | (b'', b'rev', [], b'revision to be looked up (default tip)'), | |
1608 | (b'', b'clear-caches', True, b'clear revlog cache between calls'), |
|
1633 | (b'', b'clear-caches', True, b'clear revlog cache between calls'), | |
1609 | ] |
|
1634 | ] | |
1610 | + formatteropts, |
|
1635 | + formatteropts, | |
1611 | ) |
|
1636 | ) | |
1612 | def perfnodemap(ui, repo, **opts): |
|
1637 | def perfnodemap(ui, repo, **opts): | |
1613 | """benchmark the time necessary to look up revision from a cold nodemap |
|
1638 | """benchmark the time necessary to look up revision from a cold nodemap | |
1614 |
|
1639 | |||
1615 | Depending on the implementation, the amount and order of revision we look |
|
1640 | Depending on the implementation, the amount and order of revision we look | |
1616 | up can varies. Example of useful set to test: |
|
1641 | up can varies. Example of useful set to test: | |
1617 | * tip |
|
1642 | * tip | |
1618 | * 0 |
|
1643 | * 0 | |
1619 | * -10: |
|
1644 | * -10: | |
1620 | * :10 |
|
1645 | * :10 | |
1621 | * -10: + :10 |
|
1646 | * -10: + :10 | |
1622 | * :10: + -10: |
|
1647 | * :10: + -10: | |
1623 | * -10000: |
|
1648 | * -10000: | |
1624 | * -10000: + 0 |
|
1649 | * -10000: + 0 | |
1625 |
|
1650 | |||
1626 | The command currently focus on valid binary lookup. Benchmarking for |
|
1651 | The command currently focus on valid binary lookup. Benchmarking for | |
1627 | hexlookup, prefix lookup and missing lookup would also be valuable. |
|
1652 | hexlookup, prefix lookup and missing lookup would also be valuable. | |
1628 | """ |
|
1653 | """ | |
1629 | import mercurial.revlog |
|
1654 | import mercurial.revlog | |
1630 |
|
1655 | |||
1631 | opts = _byteskwargs(opts) |
|
1656 | opts = _byteskwargs(opts) | |
1632 | timer, fm = gettimer(ui, opts) |
|
1657 | timer, fm = gettimer(ui, opts) | |
1633 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg |
|
1658 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg | |
1634 |
|
1659 | |||
1635 | unfi = repo.unfiltered() |
|
1660 | unfi = repo.unfiltered() | |
1636 | clearcaches = opts[b'clear_caches'] |
|
1661 | clearcaches = opts[b'clear_caches'] | |
1637 | # find the filecache func directly |
|
1662 | # find the filecache func directly | |
1638 | # This avoid polluting the benchmark with the filecache logic |
|
1663 | # This avoid polluting the benchmark with the filecache logic | |
1639 | makecl = unfi.__class__.changelog.func |
|
1664 | makecl = unfi.__class__.changelog.func | |
1640 | if not opts[b'rev']: |
|
1665 | if not opts[b'rev']: | |
1641 | raise error.Abort(b'use --rev to specify revisions to look up') |
|
1666 | raise error.Abort(b'use --rev to specify revisions to look up') | |
1642 | revs = scmutil.revrange(repo, opts[b'rev']) |
|
1667 | revs = scmutil.revrange(repo, opts[b'rev']) | |
1643 | cl = repo.changelog |
|
1668 | cl = repo.changelog | |
1644 | nodes = [cl.node(r) for r in revs] |
|
1669 | nodes = [cl.node(r) for r in revs] | |
1645 |
|
1670 | |||
1646 | # use a list to pass reference to a nodemap from one closure to the next |
|
1671 | # use a list to pass reference to a nodemap from one closure to the next | |
1647 | nodeget = [None] |
|
1672 | nodeget = [None] | |
1648 |
|
1673 | |||
1649 | def setnodeget(): |
|
1674 | def setnodeget(): | |
1650 | # probably not necessary, but for good measure |
|
1675 | # probably not necessary, but for good measure | |
1651 | clearchangelog(unfi) |
|
1676 | clearchangelog(unfi) | |
1652 | cl = makecl(unfi) |
|
1677 | cl = makecl(unfi) | |
1653 | if util.safehasattr(cl.index, 'get_rev'): |
|
1678 | if util.safehasattr(cl.index, 'get_rev'): | |
1654 | nodeget[0] = cl.index.get_rev |
|
1679 | nodeget[0] = cl.index.get_rev | |
1655 | else: |
|
1680 | else: | |
1656 | nodeget[0] = cl.nodemap.get |
|
1681 | nodeget[0] = cl.nodemap.get | |
1657 |
|
1682 | |||
1658 | def d(): |
|
1683 | def d(): | |
1659 | get = nodeget[0] |
|
1684 | get = nodeget[0] | |
1660 | for n in nodes: |
|
1685 | for n in nodes: | |
1661 | get(n) |
|
1686 | get(n) | |
1662 |
|
1687 | |||
1663 | setup = None |
|
1688 | setup = None | |
1664 | if clearcaches: |
|
1689 | if clearcaches: | |
1665 |
|
1690 | |||
1666 | def setup(): |
|
1691 | def setup(): | |
1667 | setnodeget() |
|
1692 | setnodeget() | |
1668 |
|
1693 | |||
1669 | else: |
|
1694 | else: | |
1670 | setnodeget() |
|
1695 | setnodeget() | |
1671 | d() # prewarm the data structure |
|
1696 | d() # prewarm the data structure | |
1672 | timer(d, setup=setup) |
|
1697 | timer(d, setup=setup) | |
1673 | fm.end() |
|
1698 | fm.end() | |
1674 |
|
1699 | |||
1675 |
|
1700 | |||
1676 | @command(b'perf::startup|perfstartup', formatteropts) |
|
1701 | @command(b'perf::startup|perfstartup', formatteropts) | |
1677 | def perfstartup(ui, repo, **opts): |
|
1702 | def perfstartup(ui, repo, **opts): | |
1678 | opts = _byteskwargs(opts) |
|
1703 | opts = _byteskwargs(opts) | |
1679 | timer, fm = gettimer(ui, opts) |
|
1704 | timer, fm = gettimer(ui, opts) | |
1680 |
|
1705 | |||
1681 | def d(): |
|
1706 | def d(): | |
1682 | if os.name != 'nt': |
|
1707 | if os.name != 'nt': | |
1683 | os.system( |
|
1708 | os.system( | |
1684 | b"HGRCPATH= %s version -q > /dev/null" % fsencode(sys.argv[0]) |
|
1709 | b"HGRCPATH= %s version -q > /dev/null" % fsencode(sys.argv[0]) | |
1685 | ) |
|
1710 | ) | |
1686 | else: |
|
1711 | else: | |
1687 | os.environ['HGRCPATH'] = r' ' |
|
1712 | os.environ['HGRCPATH'] = r' ' | |
1688 | os.system("%s version -q > NUL" % sys.argv[0]) |
|
1713 | os.system("%s version -q > NUL" % sys.argv[0]) | |
1689 |
|
1714 | |||
1690 | timer(d) |
|
1715 | timer(d) | |
1691 | fm.end() |
|
1716 | fm.end() | |
1692 |
|
1717 | |||
1693 |
|
1718 | |||
1694 | @command(b'perf::parents|perfparents', formatteropts) |
|
1719 | @command(b'perf::parents|perfparents', formatteropts) | |
1695 | def perfparents(ui, repo, **opts): |
|
1720 | def perfparents(ui, repo, **opts): | |
1696 | """benchmark the time necessary to fetch one changeset's parents. |
|
1721 | """benchmark the time necessary to fetch one changeset's parents. | |
1697 |
|
1722 | |||
1698 | The fetch is done using the `node identifier`, traversing all object layers |
|
1723 | The fetch is done using the `node identifier`, traversing all object layers | |
1699 | from the repository object. The first N revisions will be used for this |
|
1724 | from the repository object. The first N revisions will be used for this | |
1700 | benchmark. N is controlled by the ``perf.parentscount`` config option |
|
1725 | benchmark. N is controlled by the ``perf.parentscount`` config option | |
1701 | (default: 1000). |
|
1726 | (default: 1000). | |
1702 | """ |
|
1727 | """ | |
1703 | opts = _byteskwargs(opts) |
|
1728 | opts = _byteskwargs(opts) | |
1704 | timer, fm = gettimer(ui, opts) |
|
1729 | timer, fm = gettimer(ui, opts) | |
1705 | # control the number of commits perfparents iterates over |
|
1730 | # control the number of commits perfparents iterates over | |
1706 | # experimental config: perf.parentscount |
|
1731 | # experimental config: perf.parentscount | |
1707 | count = getint(ui, b"perf", b"parentscount", 1000) |
|
1732 | count = getint(ui, b"perf", b"parentscount", 1000) | |
1708 | if len(repo.changelog) < count: |
|
1733 | if len(repo.changelog) < count: | |
1709 | raise error.Abort(b"repo needs %d commits for this test" % count) |
|
1734 | raise error.Abort(b"repo needs %d commits for this test" % count) | |
1710 | repo = repo.unfiltered() |
|
1735 | repo = repo.unfiltered() | |
1711 | nl = [repo.changelog.node(i) for i in _xrange(count)] |
|
1736 | nl = [repo.changelog.node(i) for i in _xrange(count)] | |
1712 |
|
1737 | |||
1713 | def d(): |
|
1738 | def d(): | |
1714 | for n in nl: |
|
1739 | for n in nl: | |
1715 | repo.changelog.parents(n) |
|
1740 | repo.changelog.parents(n) | |
1716 |
|
1741 | |||
1717 | timer(d) |
|
1742 | timer(d) | |
1718 | fm.end() |
|
1743 | fm.end() | |
1719 |
|
1744 | |||
1720 |
|
1745 | |||
1721 | @command(b'perf::ctxfiles|perfctxfiles', formatteropts) |
|
1746 | @command(b'perf::ctxfiles|perfctxfiles', formatteropts) | |
1722 | def perfctxfiles(ui, repo, x, **opts): |
|
1747 | def perfctxfiles(ui, repo, x, **opts): | |
1723 | opts = _byteskwargs(opts) |
|
1748 | opts = _byteskwargs(opts) | |
1724 | x = int(x) |
|
1749 | x = int(x) | |
1725 | timer, fm = gettimer(ui, opts) |
|
1750 | timer, fm = gettimer(ui, opts) | |
1726 |
|
1751 | |||
1727 | def d(): |
|
1752 | def d(): | |
1728 | len(repo[x].files()) |
|
1753 | len(repo[x].files()) | |
1729 |
|
1754 | |||
1730 | timer(d) |
|
1755 | timer(d) | |
1731 | fm.end() |
|
1756 | fm.end() | |
1732 |
|
1757 | |||
1733 |
|
1758 | |||
1734 | @command(b'perf::rawfiles|perfrawfiles', formatteropts) |
|
1759 | @command(b'perf::rawfiles|perfrawfiles', formatteropts) | |
1735 | def perfrawfiles(ui, repo, x, **opts): |
|
1760 | def perfrawfiles(ui, repo, x, **opts): | |
1736 | opts = _byteskwargs(opts) |
|
1761 | opts = _byteskwargs(opts) | |
1737 | x = int(x) |
|
1762 | x = int(x) | |
1738 | timer, fm = gettimer(ui, opts) |
|
1763 | timer, fm = gettimer(ui, opts) | |
1739 | cl = repo.changelog |
|
1764 | cl = repo.changelog | |
1740 |
|
1765 | |||
1741 | def d(): |
|
1766 | def d(): | |
1742 | len(cl.read(x)[3]) |
|
1767 | len(cl.read(x)[3]) | |
1743 |
|
1768 | |||
1744 | timer(d) |
|
1769 | timer(d) | |
1745 | fm.end() |
|
1770 | fm.end() | |
1746 |
|
1771 | |||
1747 |
|
1772 | |||
1748 | @command(b'perf::lookup|perflookup', formatteropts) |
|
1773 | @command(b'perf::lookup|perflookup', formatteropts) | |
1749 | def perflookup(ui, repo, rev, **opts): |
|
1774 | def perflookup(ui, repo, rev, **opts): | |
1750 | opts = _byteskwargs(opts) |
|
1775 | opts = _byteskwargs(opts) | |
1751 | timer, fm = gettimer(ui, opts) |
|
1776 | timer, fm = gettimer(ui, opts) | |
1752 | timer(lambda: len(repo.lookup(rev))) |
|
1777 | timer(lambda: len(repo.lookup(rev))) | |
1753 | fm.end() |
|
1778 | fm.end() | |
1754 |
|
1779 | |||
1755 |
|
1780 | |||
1756 | @command( |
|
1781 | @command( | |
1757 | b'perf::linelogedits|perflinelogedits', |
|
1782 | b'perf::linelogedits|perflinelogedits', | |
1758 | [ |
|
1783 | [ | |
1759 | (b'n', b'edits', 10000, b'number of edits'), |
|
1784 | (b'n', b'edits', 10000, b'number of edits'), | |
1760 | (b'', b'max-hunk-lines', 10, b'max lines in a hunk'), |
|
1785 | (b'', b'max-hunk-lines', 10, b'max lines in a hunk'), | |
1761 | ], |
|
1786 | ], | |
1762 | norepo=True, |
|
1787 | norepo=True, | |
1763 | ) |
|
1788 | ) | |
1764 | def perflinelogedits(ui, **opts): |
|
1789 | def perflinelogedits(ui, **opts): | |
1765 | from mercurial import linelog |
|
1790 | from mercurial import linelog | |
1766 |
|
1791 | |||
1767 | opts = _byteskwargs(opts) |
|
1792 | opts = _byteskwargs(opts) | |
1768 |
|
1793 | |||
1769 | edits = opts[b'edits'] |
|
1794 | edits = opts[b'edits'] | |
1770 | maxhunklines = opts[b'max_hunk_lines'] |
|
1795 | maxhunklines = opts[b'max_hunk_lines'] | |
1771 |
|
1796 | |||
1772 | maxb1 = 100000 |
|
1797 | maxb1 = 100000 | |
1773 | random.seed(0) |
|
1798 | random.seed(0) | |
1774 | randint = random.randint |
|
1799 | randint = random.randint | |
1775 | currentlines = 0 |
|
1800 | currentlines = 0 | |
1776 | arglist = [] |
|
1801 | arglist = [] | |
1777 | for rev in _xrange(edits): |
|
1802 | for rev in _xrange(edits): | |
1778 | a1 = randint(0, currentlines) |
|
1803 | a1 = randint(0, currentlines) | |
1779 | a2 = randint(a1, min(currentlines, a1 + maxhunklines)) |
|
1804 | a2 = randint(a1, min(currentlines, a1 + maxhunklines)) | |
1780 | b1 = randint(0, maxb1) |
|
1805 | b1 = randint(0, maxb1) | |
1781 | b2 = randint(b1, b1 + maxhunklines) |
|
1806 | b2 = randint(b1, b1 + maxhunklines) | |
1782 | currentlines += (b2 - b1) - (a2 - a1) |
|
1807 | currentlines += (b2 - b1) - (a2 - a1) | |
1783 | arglist.append((rev, a1, a2, b1, b2)) |
|
1808 | arglist.append((rev, a1, a2, b1, b2)) | |
1784 |
|
1809 | |||
1785 | def d(): |
|
1810 | def d(): | |
1786 | ll = linelog.linelog() |
|
1811 | ll = linelog.linelog() | |
1787 | for args in arglist: |
|
1812 | for args in arglist: | |
1788 | ll.replacelines(*args) |
|
1813 | ll.replacelines(*args) | |
1789 |
|
1814 | |||
1790 | timer, fm = gettimer(ui, opts) |
|
1815 | timer, fm = gettimer(ui, opts) | |
1791 | timer(d) |
|
1816 | timer(d) | |
1792 | fm.end() |
|
1817 | fm.end() | |
1793 |
|
1818 | |||
1794 |
|
1819 | |||
1795 | @command(b'perf::revrange|perfrevrange', formatteropts) |
|
1820 | @command(b'perf::revrange|perfrevrange', formatteropts) | |
1796 | def perfrevrange(ui, repo, *specs, **opts): |
|
1821 | def perfrevrange(ui, repo, *specs, **opts): | |
1797 | opts = _byteskwargs(opts) |
|
1822 | opts = _byteskwargs(opts) | |
1798 | timer, fm = gettimer(ui, opts) |
|
1823 | timer, fm = gettimer(ui, opts) | |
1799 | revrange = scmutil.revrange |
|
1824 | revrange = scmutil.revrange | |
1800 | timer(lambda: len(revrange(repo, specs))) |
|
1825 | timer(lambda: len(revrange(repo, specs))) | |
1801 | fm.end() |
|
1826 | fm.end() | |
1802 |
|
1827 | |||
1803 |
|
1828 | |||
1804 | @command(b'perf::nodelookup|perfnodelookup', formatteropts) |
|
1829 | @command(b'perf::nodelookup|perfnodelookup', formatteropts) | |
1805 | def perfnodelookup(ui, repo, rev, **opts): |
|
1830 | def perfnodelookup(ui, repo, rev, **opts): | |
1806 | opts = _byteskwargs(opts) |
|
1831 | opts = _byteskwargs(opts) | |
1807 | timer, fm = gettimer(ui, opts) |
|
1832 | timer, fm = gettimer(ui, opts) | |
1808 | import mercurial.revlog |
|
1833 | import mercurial.revlog | |
1809 |
|
1834 | |||
1810 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg |
|
1835 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg | |
1811 | n = scmutil.revsingle(repo, rev).node() |
|
1836 | n = scmutil.revsingle(repo, rev).node() | |
1812 | cl = mercurial.revlog.revlog(getsvfs(repo), b"00changelog.i") |
|
1837 | ||
|
1838 | try: | |||
|
1839 | cl = revlog(getsvfs(repo), radix=b"00changelog") | |||
|
1840 | except TypeError: | |||
|
1841 | cl = revlog(getsvfs(repo), indexfile=b"00changelog.i") | |||
1813 |
|
1842 | |||
1814 | def d(): |
|
1843 | def d(): | |
1815 | cl.rev(n) |
|
1844 | cl.rev(n) | |
1816 | clearcaches(cl) |
|
1845 | clearcaches(cl) | |
1817 |
|
1846 | |||
1818 | timer(d) |
|
1847 | timer(d) | |
1819 | fm.end() |
|
1848 | fm.end() | |
1820 |
|
1849 | |||
1821 |
|
1850 | |||
1822 | @command( |
|
1851 | @command( | |
1823 | b'perf::log|perflog', |
|
1852 | b'perf::log|perflog', | |
1824 | [(b'', b'rename', False, b'ask log to follow renames')] + formatteropts, |
|
1853 | [(b'', b'rename', False, b'ask log to follow renames')] + formatteropts, | |
1825 | ) |
|
1854 | ) | |
1826 | def perflog(ui, repo, rev=None, **opts): |
|
1855 | def perflog(ui, repo, rev=None, **opts): | |
1827 | opts = _byteskwargs(opts) |
|
1856 | opts = _byteskwargs(opts) | |
1828 | if rev is None: |
|
1857 | if rev is None: | |
1829 | rev = [] |
|
1858 | rev = [] | |
1830 | timer, fm = gettimer(ui, opts) |
|
1859 | timer, fm = gettimer(ui, opts) | |
1831 | ui.pushbuffer() |
|
1860 | ui.pushbuffer() | |
1832 | timer( |
|
1861 | timer( | |
1833 | lambda: commands.log( |
|
1862 | lambda: commands.log( | |
1834 | ui, repo, rev=rev, date=b'', user=b'', copies=opts.get(b'rename') |
|
1863 | ui, repo, rev=rev, date=b'', user=b'', copies=opts.get(b'rename') | |
1835 | ) |
|
1864 | ) | |
1836 | ) |
|
1865 | ) | |
1837 | ui.popbuffer() |
|
1866 | ui.popbuffer() | |
1838 | fm.end() |
|
1867 | fm.end() | |
1839 |
|
1868 | |||
1840 |
|
1869 | |||
1841 | @command(b'perf::moonwalk|perfmoonwalk', formatteropts) |
|
1870 | @command(b'perf::moonwalk|perfmoonwalk', formatteropts) | |
1842 | def perfmoonwalk(ui, repo, **opts): |
|
1871 | def perfmoonwalk(ui, repo, **opts): | |
1843 | """benchmark walking the changelog backwards |
|
1872 | """benchmark walking the changelog backwards | |
1844 |
|
1873 | |||
1845 | This also loads the changelog data for each revision in the changelog. |
|
1874 | This also loads the changelog data for each revision in the changelog. | |
1846 | """ |
|
1875 | """ | |
1847 | opts = _byteskwargs(opts) |
|
1876 | opts = _byteskwargs(opts) | |
1848 | timer, fm = gettimer(ui, opts) |
|
1877 | timer, fm = gettimer(ui, opts) | |
1849 |
|
1878 | |||
1850 | def moonwalk(): |
|
1879 | def moonwalk(): | |
1851 | for i in repo.changelog.revs(start=(len(repo) - 1), stop=-1): |
|
1880 | for i in repo.changelog.revs(start=(len(repo) - 1), stop=-1): | |
1852 | ctx = repo[i] |
|
1881 | ctx = repo[i] | |
1853 | ctx.branch() # read changelog data (in addition to the index) |
|
1882 | ctx.branch() # read changelog data (in addition to the index) | |
1854 |
|
1883 | |||
1855 | timer(moonwalk) |
|
1884 | timer(moonwalk) | |
1856 | fm.end() |
|
1885 | fm.end() | |
1857 |
|
1886 | |||
1858 |
|
1887 | |||
1859 | @command( |
|
1888 | @command( | |
1860 | b'perf::templating|perftemplating', |
|
1889 | b'perf::templating|perftemplating', | |
1861 | [ |
|
1890 | [ | |
1862 | (b'r', b'rev', [], b'revisions to run the template on'), |
|
1891 | (b'r', b'rev', [], b'revisions to run the template on'), | |
1863 | ] |
|
1892 | ] | |
1864 | + formatteropts, |
|
1893 | + formatteropts, | |
1865 | ) |
|
1894 | ) | |
1866 | def perftemplating(ui, repo, testedtemplate=None, **opts): |
|
1895 | def perftemplating(ui, repo, testedtemplate=None, **opts): | |
1867 | """test the rendering time of a given template""" |
|
1896 | """test the rendering time of a given template""" | |
1868 | if makelogtemplater is None: |
|
1897 | if makelogtemplater is None: | |
1869 | raise error.Abort( |
|
1898 | raise error.Abort( | |
1870 | b"perftemplating not available with this Mercurial", |
|
1899 | b"perftemplating not available with this Mercurial", | |
1871 | hint=b"use 4.3 or later", |
|
1900 | hint=b"use 4.3 or later", | |
1872 | ) |
|
1901 | ) | |
1873 |
|
1902 | |||
1874 | opts = _byteskwargs(opts) |
|
1903 | opts = _byteskwargs(opts) | |
1875 |
|
1904 | |||
1876 | nullui = ui.copy() |
|
1905 | nullui = ui.copy() | |
1877 | nullui.fout = open(os.devnull, 'wb') |
|
1906 | nullui.fout = open(os.devnull, 'wb') | |
1878 | nullui.disablepager() |
|
1907 | nullui.disablepager() | |
1879 | revs = opts.get(b'rev') |
|
1908 | revs = opts.get(b'rev') | |
1880 | if not revs: |
|
1909 | if not revs: | |
1881 | revs = [b'all()'] |
|
1910 | revs = [b'all()'] | |
1882 | revs = list(scmutil.revrange(repo, revs)) |
|
1911 | revs = list(scmutil.revrange(repo, revs)) | |
1883 |
|
1912 | |||
1884 | defaulttemplate = ( |
|
1913 | defaulttemplate = ( | |
1885 | b'{date|shortdate} [{rev}:{node|short}]' |
|
1914 | b'{date|shortdate} [{rev}:{node|short}]' | |
1886 | b' {author|person}: {desc|firstline}\n' |
|
1915 | b' {author|person}: {desc|firstline}\n' | |
1887 | ) |
|
1916 | ) | |
1888 | if testedtemplate is None: |
|
1917 | if testedtemplate is None: | |
1889 | testedtemplate = defaulttemplate |
|
1918 | testedtemplate = defaulttemplate | |
1890 | displayer = makelogtemplater(nullui, repo, testedtemplate) |
|
1919 | displayer = makelogtemplater(nullui, repo, testedtemplate) | |
1891 |
|
1920 | |||
1892 | def format(): |
|
1921 | def format(): | |
1893 | for r in revs: |
|
1922 | for r in revs: | |
1894 | ctx = repo[r] |
|
1923 | ctx = repo[r] | |
1895 | displayer.show(ctx) |
|
1924 | displayer.show(ctx) | |
1896 | displayer.flush(ctx) |
|
1925 | displayer.flush(ctx) | |
1897 |
|
1926 | |||
1898 | timer, fm = gettimer(ui, opts) |
|
1927 | timer, fm = gettimer(ui, opts) | |
1899 | timer(format) |
|
1928 | timer(format) | |
1900 | fm.end() |
|
1929 | fm.end() | |
1901 |
|
1930 | |||
1902 |
|
1931 | |||
1903 | def _displaystats(ui, opts, entries, data): |
|
1932 | def _displaystats(ui, opts, entries, data): | |
1904 | # use a second formatter because the data are quite different, not sure |
|
1933 | # use a second formatter because the data are quite different, not sure | |
1905 | # how it flies with the templater. |
|
1934 | # how it flies with the templater. | |
1906 | fm = ui.formatter(b'perf-stats', opts) |
|
1935 | fm = ui.formatter(b'perf-stats', opts) | |
1907 | for key, title in entries: |
|
1936 | for key, title in entries: | |
1908 | values = data[key] |
|
1937 | values = data[key] | |
1909 | nbvalues = len(data) |
|
1938 | nbvalues = len(data) | |
1910 | values.sort() |
|
1939 | values.sort() | |
1911 | stats = { |
|
1940 | stats = { | |
1912 | 'key': key, |
|
1941 | 'key': key, | |
1913 | 'title': title, |
|
1942 | 'title': title, | |
1914 | 'nbitems': len(values), |
|
1943 | 'nbitems': len(values), | |
1915 | 'min': values[0][0], |
|
1944 | 'min': values[0][0], | |
1916 | '10%': values[(nbvalues * 10) // 100][0], |
|
1945 | '10%': values[(nbvalues * 10) // 100][0], | |
1917 | '25%': values[(nbvalues * 25) // 100][0], |
|
1946 | '25%': values[(nbvalues * 25) // 100][0], | |
1918 | '50%': values[(nbvalues * 50) // 100][0], |
|
1947 | '50%': values[(nbvalues * 50) // 100][0], | |
1919 | '75%': values[(nbvalues * 75) // 100][0], |
|
1948 | '75%': values[(nbvalues * 75) // 100][0], | |
1920 | '80%': values[(nbvalues * 80) // 100][0], |
|
1949 | '80%': values[(nbvalues * 80) // 100][0], | |
1921 | '85%': values[(nbvalues * 85) // 100][0], |
|
1950 | '85%': values[(nbvalues * 85) // 100][0], | |
1922 | '90%': values[(nbvalues * 90) // 100][0], |
|
1951 | '90%': values[(nbvalues * 90) // 100][0], | |
1923 | '95%': values[(nbvalues * 95) // 100][0], |
|
1952 | '95%': values[(nbvalues * 95) // 100][0], | |
1924 | '99%': values[(nbvalues * 99) // 100][0], |
|
1953 | '99%': values[(nbvalues * 99) // 100][0], | |
1925 | 'max': values[-1][0], |
|
1954 | 'max': values[-1][0], | |
1926 | } |
|
1955 | } | |
1927 | fm.startitem() |
|
1956 | fm.startitem() | |
1928 | fm.data(**stats) |
|
1957 | fm.data(**stats) | |
1929 | # make node pretty for the human output |
|
1958 | # make node pretty for the human output | |
1930 | fm.plain('### %s (%d items)\n' % (title, len(values))) |
|
1959 | fm.plain('### %s (%d items)\n' % (title, len(values))) | |
1931 | lines = [ |
|
1960 | lines = [ | |
1932 | 'min', |
|
1961 | 'min', | |
1933 | '10%', |
|
1962 | '10%', | |
1934 | '25%', |
|
1963 | '25%', | |
1935 | '50%', |
|
1964 | '50%', | |
1936 | '75%', |
|
1965 | '75%', | |
1937 | '80%', |
|
1966 | '80%', | |
1938 | '85%', |
|
1967 | '85%', | |
1939 | '90%', |
|
1968 | '90%', | |
1940 | '95%', |
|
1969 | '95%', | |
1941 | '99%', |
|
1970 | '99%', | |
1942 | 'max', |
|
1971 | 'max', | |
1943 | ] |
|
1972 | ] | |
1944 | for l in lines: |
|
1973 | for l in lines: | |
1945 | fm.plain('%s: %s\n' % (l, stats[l])) |
|
1974 | fm.plain('%s: %s\n' % (l, stats[l])) | |
1946 | fm.end() |
|
1975 | fm.end() | |
1947 |
|
1976 | |||
1948 |
|
1977 | |||
1949 | @command( |
|
1978 | @command( | |
1950 | b'perf::helper-mergecopies|perfhelper-mergecopies', |
|
1979 | b'perf::helper-mergecopies|perfhelper-mergecopies', | |
1951 | formatteropts |
|
1980 | formatteropts | |
1952 | + [ |
|
1981 | + [ | |
1953 | (b'r', b'revs', [], b'restrict search to these revisions'), |
|
1982 | (b'r', b'revs', [], b'restrict search to these revisions'), | |
1954 | (b'', b'timing', False, b'provides extra data (costly)'), |
|
1983 | (b'', b'timing', False, b'provides extra data (costly)'), | |
1955 | (b'', b'stats', False, b'provides statistic about the measured data'), |
|
1984 | (b'', b'stats', False, b'provides statistic about the measured data'), | |
1956 | ], |
|
1985 | ], | |
1957 | ) |
|
1986 | ) | |
1958 | def perfhelpermergecopies(ui, repo, revs=[], **opts): |
|
1987 | def perfhelpermergecopies(ui, repo, revs=[], **opts): | |
1959 | """find statistics about potential parameters for `perfmergecopies` |
|
1988 | """find statistics about potential parameters for `perfmergecopies` | |
1960 |
|
1989 | |||
1961 | This command find (base, p1, p2) triplet relevant for copytracing |
|
1990 | This command find (base, p1, p2) triplet relevant for copytracing | |
1962 | benchmarking in the context of a merge. It reports values for some of the |
|
1991 | benchmarking in the context of a merge. It reports values for some of the | |
1963 | parameters that impact merge copy tracing time during merge. |
|
1992 | parameters that impact merge copy tracing time during merge. | |
1964 |
|
1993 | |||
1965 | If `--timing` is set, rename detection is run and the associated timing |
|
1994 | If `--timing` is set, rename detection is run and the associated timing | |
1966 | will be reported. The extra details come at the cost of slower command |
|
1995 | will be reported. The extra details come at the cost of slower command | |
1967 | execution. |
|
1996 | execution. | |
1968 |
|
1997 | |||
1969 | Since rename detection is only run once, other factors might easily |
|
1998 | Since rename detection is only run once, other factors might easily | |
1970 | affect the precision of the timing. However it should give a good |
|
1999 | affect the precision of the timing. However it should give a good | |
1971 | approximation of which revision triplets are very costly. |
|
2000 | approximation of which revision triplets are very costly. | |
1972 | """ |
|
2001 | """ | |
1973 | opts = _byteskwargs(opts) |
|
2002 | opts = _byteskwargs(opts) | |
1974 | fm = ui.formatter(b'perf', opts) |
|
2003 | fm = ui.formatter(b'perf', opts) | |
1975 | dotiming = opts[b'timing'] |
|
2004 | dotiming = opts[b'timing'] | |
1976 | dostats = opts[b'stats'] |
|
2005 | dostats = opts[b'stats'] | |
1977 |
|
2006 | |||
1978 | output_template = [ |
|
2007 | output_template = [ | |
1979 | ("base", "%(base)12s"), |
|
2008 | ("base", "%(base)12s"), | |
1980 | ("p1", "%(p1.node)12s"), |
|
2009 | ("p1", "%(p1.node)12s"), | |
1981 | ("p2", "%(p2.node)12s"), |
|
2010 | ("p2", "%(p2.node)12s"), | |
1982 | ("p1.nb-revs", "%(p1.nbrevs)12d"), |
|
2011 | ("p1.nb-revs", "%(p1.nbrevs)12d"), | |
1983 | ("p1.nb-files", "%(p1.nbmissingfiles)12d"), |
|
2012 | ("p1.nb-files", "%(p1.nbmissingfiles)12d"), | |
1984 | ("p1.renames", "%(p1.renamedfiles)12d"), |
|
2013 | ("p1.renames", "%(p1.renamedfiles)12d"), | |
1985 | ("p1.time", "%(p1.time)12.3f"), |
|
2014 | ("p1.time", "%(p1.time)12.3f"), | |
1986 | ("p2.nb-revs", "%(p2.nbrevs)12d"), |
|
2015 | ("p2.nb-revs", "%(p2.nbrevs)12d"), | |
1987 | ("p2.nb-files", "%(p2.nbmissingfiles)12d"), |
|
2016 | ("p2.nb-files", "%(p2.nbmissingfiles)12d"), | |
1988 | ("p2.renames", "%(p2.renamedfiles)12d"), |
|
2017 | ("p2.renames", "%(p2.renamedfiles)12d"), | |
1989 | ("p2.time", "%(p2.time)12.3f"), |
|
2018 | ("p2.time", "%(p2.time)12.3f"), | |
1990 | ("renames", "%(nbrenamedfiles)12d"), |
|
2019 | ("renames", "%(nbrenamedfiles)12d"), | |
1991 | ("total.time", "%(time)12.3f"), |
|
2020 | ("total.time", "%(time)12.3f"), | |
1992 | ] |
|
2021 | ] | |
1993 | if not dotiming: |
|
2022 | if not dotiming: | |
1994 | output_template = [ |
|
2023 | output_template = [ | |
1995 | i |
|
2024 | i | |
1996 | for i in output_template |
|
2025 | for i in output_template | |
1997 | if not ('time' in i[0] or 'renames' in i[0]) |
|
2026 | if not ('time' in i[0] or 'renames' in i[0]) | |
1998 | ] |
|
2027 | ] | |
1999 | header_names = [h for (h, v) in output_template] |
|
2028 | header_names = [h for (h, v) in output_template] | |
2000 | output = ' '.join([v for (h, v) in output_template]) + '\n' |
|
2029 | output = ' '.join([v for (h, v) in output_template]) + '\n' | |
2001 | header = ' '.join(['%12s'] * len(header_names)) + '\n' |
|
2030 | header = ' '.join(['%12s'] * len(header_names)) + '\n' | |
2002 | fm.plain(header % tuple(header_names)) |
|
2031 | fm.plain(header % tuple(header_names)) | |
2003 |
|
2032 | |||
2004 | if not revs: |
|
2033 | if not revs: | |
2005 | revs = ['all()'] |
|
2034 | revs = ['all()'] | |
2006 | revs = scmutil.revrange(repo, revs) |
|
2035 | revs = scmutil.revrange(repo, revs) | |
2007 |
|
2036 | |||
2008 | if dostats: |
|
2037 | if dostats: | |
2009 | alldata = { |
|
2038 | alldata = { | |
2010 | 'nbrevs': [], |
|
2039 | 'nbrevs': [], | |
2011 | 'nbmissingfiles': [], |
|
2040 | 'nbmissingfiles': [], | |
2012 | } |
|
2041 | } | |
2013 | if dotiming: |
|
2042 | if dotiming: | |
2014 | alldata['parentnbrenames'] = [] |
|
2043 | alldata['parentnbrenames'] = [] | |
2015 | alldata['totalnbrenames'] = [] |
|
2044 | alldata['totalnbrenames'] = [] | |
2016 | alldata['parenttime'] = [] |
|
2045 | alldata['parenttime'] = [] | |
2017 | alldata['totaltime'] = [] |
|
2046 | alldata['totaltime'] = [] | |
2018 |
|
2047 | |||
2019 | roi = repo.revs('merge() and %ld', revs) |
|
2048 | roi = repo.revs('merge() and %ld', revs) | |
2020 | for r in roi: |
|
2049 | for r in roi: | |
2021 | ctx = repo[r] |
|
2050 | ctx = repo[r] | |
2022 | p1 = ctx.p1() |
|
2051 | p1 = ctx.p1() | |
2023 | p2 = ctx.p2() |
|
2052 | p2 = ctx.p2() | |
2024 | bases = repo.changelog._commonancestorsheads(p1.rev(), p2.rev()) |
|
2053 | bases = repo.changelog._commonancestorsheads(p1.rev(), p2.rev()) | |
2025 | for b in bases: |
|
2054 | for b in bases: | |
2026 | b = repo[b] |
|
2055 | b = repo[b] | |
2027 | p1missing = copies._computeforwardmissing(b, p1) |
|
2056 | p1missing = copies._computeforwardmissing(b, p1) | |
2028 | p2missing = copies._computeforwardmissing(b, p2) |
|
2057 | p2missing = copies._computeforwardmissing(b, p2) | |
2029 | data = { |
|
2058 | data = { | |
2030 | b'base': b.hex(), |
|
2059 | b'base': b.hex(), | |
2031 | b'p1.node': p1.hex(), |
|
2060 | b'p1.node': p1.hex(), | |
2032 | b'p1.nbrevs': len(repo.revs('only(%d, %d)', p1.rev(), b.rev())), |
|
2061 | b'p1.nbrevs': len(repo.revs('only(%d, %d)', p1.rev(), b.rev())), | |
2033 | b'p1.nbmissingfiles': len(p1missing), |
|
2062 | b'p1.nbmissingfiles': len(p1missing), | |
2034 | b'p2.node': p2.hex(), |
|
2063 | b'p2.node': p2.hex(), | |
2035 | b'p2.nbrevs': len(repo.revs('only(%d, %d)', p2.rev(), b.rev())), |
|
2064 | b'p2.nbrevs': len(repo.revs('only(%d, %d)', p2.rev(), b.rev())), | |
2036 | b'p2.nbmissingfiles': len(p2missing), |
|
2065 | b'p2.nbmissingfiles': len(p2missing), | |
2037 | } |
|
2066 | } | |
2038 | if dostats: |
|
2067 | if dostats: | |
2039 | if p1missing: |
|
2068 | if p1missing: | |
2040 | alldata['nbrevs'].append( |
|
2069 | alldata['nbrevs'].append( | |
2041 | (data['p1.nbrevs'], b.hex(), p1.hex()) |
|
2070 | (data['p1.nbrevs'], b.hex(), p1.hex()) | |
2042 | ) |
|
2071 | ) | |
2043 | alldata['nbmissingfiles'].append( |
|
2072 | alldata['nbmissingfiles'].append( | |
2044 | (data['p1.nbmissingfiles'], b.hex(), p1.hex()) |
|
2073 | (data['p1.nbmissingfiles'], b.hex(), p1.hex()) | |
2045 | ) |
|
2074 | ) | |
2046 | if p2missing: |
|
2075 | if p2missing: | |
2047 | alldata['nbrevs'].append( |
|
2076 | alldata['nbrevs'].append( | |
2048 | (data['p2.nbrevs'], b.hex(), p2.hex()) |
|
2077 | (data['p2.nbrevs'], b.hex(), p2.hex()) | |
2049 | ) |
|
2078 | ) | |
2050 | alldata['nbmissingfiles'].append( |
|
2079 | alldata['nbmissingfiles'].append( | |
2051 | (data['p2.nbmissingfiles'], b.hex(), p2.hex()) |
|
2080 | (data['p2.nbmissingfiles'], b.hex(), p2.hex()) | |
2052 | ) |
|
2081 | ) | |
2053 | if dotiming: |
|
2082 | if dotiming: | |
2054 | begin = util.timer() |
|
2083 | begin = util.timer() | |
2055 | mergedata = copies.mergecopies(repo, p1, p2, b) |
|
2084 | mergedata = copies.mergecopies(repo, p1, p2, b) | |
2056 | end = util.timer() |
|
2085 | end = util.timer() | |
2057 | # not very stable timing since we did only one run |
|
2086 | # not very stable timing since we did only one run | |
2058 | data['time'] = end - begin |
|
2087 | data['time'] = end - begin | |
2059 | # mergedata contains five dicts: "copy", "movewithdir", |
|
2088 | # mergedata contains five dicts: "copy", "movewithdir", | |
2060 | # "diverge", "renamedelete" and "dirmove". |
|
2089 | # "diverge", "renamedelete" and "dirmove". | |
2061 | # The first 4 are about renamed file so lets count that. |
|
2090 | # The first 4 are about renamed file so lets count that. | |
2062 | renames = len(mergedata[0]) |
|
2091 | renames = len(mergedata[0]) | |
2063 | renames += len(mergedata[1]) |
|
2092 | renames += len(mergedata[1]) | |
2064 | renames += len(mergedata[2]) |
|
2093 | renames += len(mergedata[2]) | |
2065 | renames += len(mergedata[3]) |
|
2094 | renames += len(mergedata[3]) | |
2066 | data['nbrenamedfiles'] = renames |
|
2095 | data['nbrenamedfiles'] = renames | |
2067 | begin = util.timer() |
|
2096 | begin = util.timer() | |
2068 | p1renames = copies.pathcopies(b, p1) |
|
2097 | p1renames = copies.pathcopies(b, p1) | |
2069 | end = util.timer() |
|
2098 | end = util.timer() | |
2070 | data['p1.time'] = end - begin |
|
2099 | data['p1.time'] = end - begin | |
2071 | begin = util.timer() |
|
2100 | begin = util.timer() | |
2072 | p2renames = copies.pathcopies(b, p2) |
|
2101 | p2renames = copies.pathcopies(b, p2) | |
2073 | end = util.timer() |
|
2102 | end = util.timer() | |
2074 | data['p2.time'] = end - begin |
|
2103 | data['p2.time'] = end - begin | |
2075 | data['p1.renamedfiles'] = len(p1renames) |
|
2104 | data['p1.renamedfiles'] = len(p1renames) | |
2076 | data['p2.renamedfiles'] = len(p2renames) |
|
2105 | data['p2.renamedfiles'] = len(p2renames) | |
2077 |
|
2106 | |||
2078 | if dostats: |
|
2107 | if dostats: | |
2079 | if p1missing: |
|
2108 | if p1missing: | |
2080 | alldata['parentnbrenames'].append( |
|
2109 | alldata['parentnbrenames'].append( | |
2081 | (data['p1.renamedfiles'], b.hex(), p1.hex()) |
|
2110 | (data['p1.renamedfiles'], b.hex(), p1.hex()) | |
2082 | ) |
|
2111 | ) | |
2083 | alldata['parenttime'].append( |
|
2112 | alldata['parenttime'].append( | |
2084 | (data['p1.time'], b.hex(), p1.hex()) |
|
2113 | (data['p1.time'], b.hex(), p1.hex()) | |
2085 | ) |
|
2114 | ) | |
2086 | if p2missing: |
|
2115 | if p2missing: | |
2087 | alldata['parentnbrenames'].append( |
|
2116 | alldata['parentnbrenames'].append( | |
2088 | (data['p2.renamedfiles'], b.hex(), p2.hex()) |
|
2117 | (data['p2.renamedfiles'], b.hex(), p2.hex()) | |
2089 | ) |
|
2118 | ) | |
2090 | alldata['parenttime'].append( |
|
2119 | alldata['parenttime'].append( | |
2091 | (data['p2.time'], b.hex(), p2.hex()) |
|
2120 | (data['p2.time'], b.hex(), p2.hex()) | |
2092 | ) |
|
2121 | ) | |
2093 | if p1missing or p2missing: |
|
2122 | if p1missing or p2missing: | |
2094 | alldata['totalnbrenames'].append( |
|
2123 | alldata['totalnbrenames'].append( | |
2095 | ( |
|
2124 | ( | |
2096 | data['nbrenamedfiles'], |
|
2125 | data['nbrenamedfiles'], | |
2097 | b.hex(), |
|
2126 | b.hex(), | |
2098 | p1.hex(), |
|
2127 | p1.hex(), | |
2099 | p2.hex(), |
|
2128 | p2.hex(), | |
2100 | ) |
|
2129 | ) | |
2101 | ) |
|
2130 | ) | |
2102 | alldata['totaltime'].append( |
|
2131 | alldata['totaltime'].append( | |
2103 | (data['time'], b.hex(), p1.hex(), p2.hex()) |
|
2132 | (data['time'], b.hex(), p1.hex(), p2.hex()) | |
2104 | ) |
|
2133 | ) | |
2105 | fm.startitem() |
|
2134 | fm.startitem() | |
2106 | fm.data(**data) |
|
2135 | fm.data(**data) | |
2107 | # make node pretty for the human output |
|
2136 | # make node pretty for the human output | |
2108 | out = data.copy() |
|
2137 | out = data.copy() | |
2109 | out['base'] = fm.hexfunc(b.node()) |
|
2138 | out['base'] = fm.hexfunc(b.node()) | |
2110 | out['p1.node'] = fm.hexfunc(p1.node()) |
|
2139 | out['p1.node'] = fm.hexfunc(p1.node()) | |
2111 | out['p2.node'] = fm.hexfunc(p2.node()) |
|
2140 | out['p2.node'] = fm.hexfunc(p2.node()) | |
2112 | fm.plain(output % out) |
|
2141 | fm.plain(output % out) | |
2113 |
|
2142 | |||
2114 | fm.end() |
|
2143 | fm.end() | |
2115 | if dostats: |
|
2144 | if dostats: | |
2116 | # use a second formatter because the data are quite different, not sure |
|
2145 | # use a second formatter because the data are quite different, not sure | |
2117 | # how it flies with the templater. |
|
2146 | # how it flies with the templater. | |
2118 | entries = [ |
|
2147 | entries = [ | |
2119 | ('nbrevs', 'number of revision covered'), |
|
2148 | ('nbrevs', 'number of revision covered'), | |
2120 | ('nbmissingfiles', 'number of missing files at head'), |
|
2149 | ('nbmissingfiles', 'number of missing files at head'), | |
2121 | ] |
|
2150 | ] | |
2122 | if dotiming: |
|
2151 | if dotiming: | |
2123 | entries.append( |
|
2152 | entries.append( | |
2124 | ('parentnbrenames', 'rename from one parent to base') |
|
2153 | ('parentnbrenames', 'rename from one parent to base') | |
2125 | ) |
|
2154 | ) | |
2126 | entries.append(('totalnbrenames', 'total number of renames')) |
|
2155 | entries.append(('totalnbrenames', 'total number of renames')) | |
2127 | entries.append(('parenttime', 'time for one parent')) |
|
2156 | entries.append(('parenttime', 'time for one parent')) | |
2128 | entries.append(('totaltime', 'time for both parents')) |
|
2157 | entries.append(('totaltime', 'time for both parents')) | |
2129 | _displaystats(ui, opts, entries, alldata) |
|
2158 | _displaystats(ui, opts, entries, alldata) | |
2130 |
|
2159 | |||
2131 |
|
2160 | |||
2132 | @command( |
|
2161 | @command( | |
2133 | b'perf::helper-pathcopies|perfhelper-pathcopies', |
|
2162 | b'perf::helper-pathcopies|perfhelper-pathcopies', | |
2134 | formatteropts |
|
2163 | formatteropts | |
2135 | + [ |
|
2164 | + [ | |
2136 | (b'r', b'revs', [], b'restrict search to these revisions'), |
|
2165 | (b'r', b'revs', [], b'restrict search to these revisions'), | |
2137 | (b'', b'timing', False, b'provides extra data (costly)'), |
|
2166 | (b'', b'timing', False, b'provides extra data (costly)'), | |
2138 | (b'', b'stats', False, b'provides statistic about the measured data'), |
|
2167 | (b'', b'stats', False, b'provides statistic about the measured data'), | |
2139 | ], |
|
2168 | ], | |
2140 | ) |
|
2169 | ) | |
2141 | def perfhelperpathcopies(ui, repo, revs=[], **opts): |
|
2170 | def perfhelperpathcopies(ui, repo, revs=[], **opts): | |
2142 | """find statistic about potential parameters for the `perftracecopies` |
|
2171 | """find statistic about potential parameters for the `perftracecopies` | |
2143 |
|
2172 | |||
2144 | This command find source-destination pair relevant for copytracing testing. |
|
2173 | This command find source-destination pair relevant for copytracing testing. | |
2145 | It report value for some of the parameters that impact copy tracing time. |
|
2174 | It report value for some of the parameters that impact copy tracing time. | |
2146 |
|
2175 | |||
2147 | If `--timing` is set, rename detection is run and the associated timing |
|
2176 | If `--timing` is set, rename detection is run and the associated timing | |
2148 | will be reported. The extra details comes at the cost of a slower command |
|
2177 | will be reported. The extra details comes at the cost of a slower command | |
2149 | execution. |
|
2178 | execution. | |
2150 |
|
2179 | |||
2151 | Since the rename detection is only run once, other factors might easily |
|
2180 | Since the rename detection is only run once, other factors might easily | |
2152 | affect the precision of the timing. However it should give a good |
|
2181 | affect the precision of the timing. However it should give a good | |
2153 | approximation of which revision pairs are very costly. |
|
2182 | approximation of which revision pairs are very costly. | |
2154 | """ |
|
2183 | """ | |
2155 | opts = _byteskwargs(opts) |
|
2184 | opts = _byteskwargs(opts) | |
2156 | fm = ui.formatter(b'perf', opts) |
|
2185 | fm = ui.formatter(b'perf', opts) | |
2157 | dotiming = opts[b'timing'] |
|
2186 | dotiming = opts[b'timing'] | |
2158 | dostats = opts[b'stats'] |
|
2187 | dostats = opts[b'stats'] | |
2159 |
|
2188 | |||
2160 | if dotiming: |
|
2189 | if dotiming: | |
2161 | header = '%12s %12s %12s %12s %12s %12s\n' |
|
2190 | header = '%12s %12s %12s %12s %12s %12s\n' | |
2162 | output = ( |
|
2191 | output = ( | |
2163 | "%(source)12s %(destination)12s " |
|
2192 | "%(source)12s %(destination)12s " | |
2164 | "%(nbrevs)12d %(nbmissingfiles)12d " |
|
2193 | "%(nbrevs)12d %(nbmissingfiles)12d " | |
2165 | "%(nbrenamedfiles)12d %(time)18.5f\n" |
|
2194 | "%(nbrenamedfiles)12d %(time)18.5f\n" | |
2166 | ) |
|
2195 | ) | |
2167 | header_names = ( |
|
2196 | header_names = ( | |
2168 | "source", |
|
2197 | "source", | |
2169 | "destination", |
|
2198 | "destination", | |
2170 | "nb-revs", |
|
2199 | "nb-revs", | |
2171 | "nb-files", |
|
2200 | "nb-files", | |
2172 | "nb-renames", |
|
2201 | "nb-renames", | |
2173 | "time", |
|
2202 | "time", | |
2174 | ) |
|
2203 | ) | |
2175 | fm.plain(header % header_names) |
|
2204 | fm.plain(header % header_names) | |
2176 | else: |
|
2205 | else: | |
2177 | header = '%12s %12s %12s %12s\n' |
|
2206 | header = '%12s %12s %12s %12s\n' | |
2178 | output = ( |
|
2207 | output = ( | |
2179 | "%(source)12s %(destination)12s " |
|
2208 | "%(source)12s %(destination)12s " | |
2180 | "%(nbrevs)12d %(nbmissingfiles)12d\n" |
|
2209 | "%(nbrevs)12d %(nbmissingfiles)12d\n" | |
2181 | ) |
|
2210 | ) | |
2182 | fm.plain(header % ("source", "destination", "nb-revs", "nb-files")) |
|
2211 | fm.plain(header % ("source", "destination", "nb-revs", "nb-files")) | |
2183 |
|
2212 | |||
2184 | if not revs: |
|
2213 | if not revs: | |
2185 | revs = ['all()'] |
|
2214 | revs = ['all()'] | |
2186 | revs = scmutil.revrange(repo, revs) |
|
2215 | revs = scmutil.revrange(repo, revs) | |
2187 |
|
2216 | |||
2188 | if dostats: |
|
2217 | if dostats: | |
2189 | alldata = { |
|
2218 | alldata = { | |
2190 | 'nbrevs': [], |
|
2219 | 'nbrevs': [], | |
2191 | 'nbmissingfiles': [], |
|
2220 | 'nbmissingfiles': [], | |
2192 | } |
|
2221 | } | |
2193 | if dotiming: |
|
2222 | if dotiming: | |
2194 | alldata['nbrenames'] = [] |
|
2223 | alldata['nbrenames'] = [] | |
2195 | alldata['time'] = [] |
|
2224 | alldata['time'] = [] | |
2196 |
|
2225 | |||
2197 | roi = repo.revs('merge() and %ld', revs) |
|
2226 | roi = repo.revs('merge() and %ld', revs) | |
2198 | for r in roi: |
|
2227 | for r in roi: | |
2199 | ctx = repo[r] |
|
2228 | ctx = repo[r] | |
2200 | p1 = ctx.p1().rev() |
|
2229 | p1 = ctx.p1().rev() | |
2201 | p2 = ctx.p2().rev() |
|
2230 | p2 = ctx.p2().rev() | |
2202 | bases = repo.changelog._commonancestorsheads(p1, p2) |
|
2231 | bases = repo.changelog._commonancestorsheads(p1, p2) | |
2203 | for p in (p1, p2): |
|
2232 | for p in (p1, p2): | |
2204 | for b in bases: |
|
2233 | for b in bases: | |
2205 | base = repo[b] |
|
2234 | base = repo[b] | |
2206 | parent = repo[p] |
|
2235 | parent = repo[p] | |
2207 | missing = copies._computeforwardmissing(base, parent) |
|
2236 | missing = copies._computeforwardmissing(base, parent) | |
2208 | if not missing: |
|
2237 | if not missing: | |
2209 | continue |
|
2238 | continue | |
2210 | data = { |
|
2239 | data = { | |
2211 | b'source': base.hex(), |
|
2240 | b'source': base.hex(), | |
2212 | b'destination': parent.hex(), |
|
2241 | b'destination': parent.hex(), | |
2213 | b'nbrevs': len(repo.revs('only(%d, %d)', p, b)), |
|
2242 | b'nbrevs': len(repo.revs('only(%d, %d)', p, b)), | |
2214 | b'nbmissingfiles': len(missing), |
|
2243 | b'nbmissingfiles': len(missing), | |
2215 | } |
|
2244 | } | |
2216 | if dostats: |
|
2245 | if dostats: | |
2217 | alldata['nbrevs'].append( |
|
2246 | alldata['nbrevs'].append( | |
2218 | ( |
|
2247 | ( | |
2219 | data['nbrevs'], |
|
2248 | data['nbrevs'], | |
2220 | base.hex(), |
|
2249 | base.hex(), | |
2221 | parent.hex(), |
|
2250 | parent.hex(), | |
2222 | ) |
|
2251 | ) | |
2223 | ) |
|
2252 | ) | |
2224 | alldata['nbmissingfiles'].append( |
|
2253 | alldata['nbmissingfiles'].append( | |
2225 | ( |
|
2254 | ( | |
2226 | data['nbmissingfiles'], |
|
2255 | data['nbmissingfiles'], | |
2227 | base.hex(), |
|
2256 | base.hex(), | |
2228 | parent.hex(), |
|
2257 | parent.hex(), | |
2229 | ) |
|
2258 | ) | |
2230 | ) |
|
2259 | ) | |
2231 | if dotiming: |
|
2260 | if dotiming: | |
2232 | begin = util.timer() |
|
2261 | begin = util.timer() | |
2233 | renames = copies.pathcopies(base, parent) |
|
2262 | renames = copies.pathcopies(base, parent) | |
2234 | end = util.timer() |
|
2263 | end = util.timer() | |
2235 | # not very stable timing since we did only one run |
|
2264 | # not very stable timing since we did only one run | |
2236 | data['time'] = end - begin |
|
2265 | data['time'] = end - begin | |
2237 | data['nbrenamedfiles'] = len(renames) |
|
2266 | data['nbrenamedfiles'] = len(renames) | |
2238 | if dostats: |
|
2267 | if dostats: | |
2239 | alldata['time'].append( |
|
2268 | alldata['time'].append( | |
2240 | ( |
|
2269 | ( | |
2241 | data['time'], |
|
2270 | data['time'], | |
2242 | base.hex(), |
|
2271 | base.hex(), | |
2243 | parent.hex(), |
|
2272 | parent.hex(), | |
2244 | ) |
|
2273 | ) | |
2245 | ) |
|
2274 | ) | |
2246 | alldata['nbrenames'].append( |
|
2275 | alldata['nbrenames'].append( | |
2247 | ( |
|
2276 | ( | |
2248 | data['nbrenamedfiles'], |
|
2277 | data['nbrenamedfiles'], | |
2249 | base.hex(), |
|
2278 | base.hex(), | |
2250 | parent.hex(), |
|
2279 | parent.hex(), | |
2251 | ) |
|
2280 | ) | |
2252 | ) |
|
2281 | ) | |
2253 | fm.startitem() |
|
2282 | fm.startitem() | |
2254 | fm.data(**data) |
|
2283 | fm.data(**data) | |
2255 | out = data.copy() |
|
2284 | out = data.copy() | |
2256 | out['source'] = fm.hexfunc(base.node()) |
|
2285 | out['source'] = fm.hexfunc(base.node()) | |
2257 | out['destination'] = fm.hexfunc(parent.node()) |
|
2286 | out['destination'] = fm.hexfunc(parent.node()) | |
2258 | fm.plain(output % out) |
|
2287 | fm.plain(output % out) | |
2259 |
|
2288 | |||
2260 | fm.end() |
|
2289 | fm.end() | |
2261 | if dostats: |
|
2290 | if dostats: | |
2262 | entries = [ |
|
2291 | entries = [ | |
2263 | ('nbrevs', 'number of revision covered'), |
|
2292 | ('nbrevs', 'number of revision covered'), | |
2264 | ('nbmissingfiles', 'number of missing files at head'), |
|
2293 | ('nbmissingfiles', 'number of missing files at head'), | |
2265 | ] |
|
2294 | ] | |
2266 | if dotiming: |
|
2295 | if dotiming: | |
2267 | entries.append(('nbrenames', 'renamed files')) |
|
2296 | entries.append(('nbrenames', 'renamed files')) | |
2268 | entries.append(('time', 'time')) |
|
2297 | entries.append(('time', 'time')) | |
2269 | _displaystats(ui, opts, entries, alldata) |
|
2298 | _displaystats(ui, opts, entries, alldata) | |
2270 |
|
2299 | |||
2271 |
|
2300 | |||
2272 | @command(b'perf::cca|perfcca', formatteropts) |
|
2301 | @command(b'perf::cca|perfcca', formatteropts) | |
2273 | def perfcca(ui, repo, **opts): |
|
2302 | def perfcca(ui, repo, **opts): | |
2274 | opts = _byteskwargs(opts) |
|
2303 | opts = _byteskwargs(opts) | |
2275 | timer, fm = gettimer(ui, opts) |
|
2304 | timer, fm = gettimer(ui, opts) | |
2276 | timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate)) |
|
2305 | timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate)) | |
2277 | fm.end() |
|
2306 | fm.end() | |
2278 |
|
2307 | |||
2279 |
|
2308 | |||
2280 | @command(b'perf::fncacheload|perffncacheload', formatteropts) |
|
2309 | @command(b'perf::fncacheload|perffncacheload', formatteropts) | |
2281 | def perffncacheload(ui, repo, **opts): |
|
2310 | def perffncacheload(ui, repo, **opts): | |
2282 | opts = _byteskwargs(opts) |
|
2311 | opts = _byteskwargs(opts) | |
2283 | timer, fm = gettimer(ui, opts) |
|
2312 | timer, fm = gettimer(ui, opts) | |
2284 | s = repo.store |
|
2313 | s = repo.store | |
2285 |
|
2314 | |||
2286 | def d(): |
|
2315 | def d(): | |
2287 | s.fncache._load() |
|
2316 | s.fncache._load() | |
2288 |
|
2317 | |||
2289 | timer(d) |
|
2318 | timer(d) | |
2290 | fm.end() |
|
2319 | fm.end() | |
2291 |
|
2320 | |||
2292 |
|
2321 | |||
2293 | @command(b'perf::fncachewrite|perffncachewrite', formatteropts) |
|
2322 | @command(b'perf::fncachewrite|perffncachewrite', formatteropts) | |
2294 | def perffncachewrite(ui, repo, **opts): |
|
2323 | def perffncachewrite(ui, repo, **opts): | |
2295 | opts = _byteskwargs(opts) |
|
2324 | opts = _byteskwargs(opts) | |
2296 | timer, fm = gettimer(ui, opts) |
|
2325 | timer, fm = gettimer(ui, opts) | |
2297 | s = repo.store |
|
2326 | s = repo.store | |
2298 | lock = repo.lock() |
|
2327 | lock = repo.lock() | |
2299 | s.fncache._load() |
|
2328 | s.fncache._load() | |
2300 | tr = repo.transaction(b'perffncachewrite') |
|
2329 | tr = repo.transaction(b'perffncachewrite') | |
2301 | tr.addbackup(b'fncache') |
|
2330 | tr.addbackup(b'fncache') | |
2302 |
|
2331 | |||
2303 | def d(): |
|
2332 | def d(): | |
2304 | s.fncache._dirty = True |
|
2333 | s.fncache._dirty = True | |
2305 | s.fncache.write(tr) |
|
2334 | s.fncache.write(tr) | |
2306 |
|
2335 | |||
2307 | timer(d) |
|
2336 | timer(d) | |
2308 | tr.close() |
|
2337 | tr.close() | |
2309 | lock.release() |
|
2338 | lock.release() | |
2310 | fm.end() |
|
2339 | fm.end() | |
2311 |
|
2340 | |||
2312 |
|
2341 | |||
2313 | @command(b'perf::fncacheencode|perffncacheencode', formatteropts) |
|
2342 | @command(b'perf::fncacheencode|perffncacheencode', formatteropts) | |
2314 | def perffncacheencode(ui, repo, **opts): |
|
2343 | def perffncacheencode(ui, repo, **opts): | |
2315 | opts = _byteskwargs(opts) |
|
2344 | opts = _byteskwargs(opts) | |
2316 | timer, fm = gettimer(ui, opts) |
|
2345 | timer, fm = gettimer(ui, opts) | |
2317 | s = repo.store |
|
2346 | s = repo.store | |
2318 | s.fncache._load() |
|
2347 | s.fncache._load() | |
2319 |
|
2348 | |||
2320 | def d(): |
|
2349 | def d(): | |
2321 | for p in s.fncache.entries: |
|
2350 | for p in s.fncache.entries: | |
2322 | s.encode(p) |
|
2351 | s.encode(p) | |
2323 |
|
2352 | |||
2324 | timer(d) |
|
2353 | timer(d) | |
2325 | fm.end() |
|
2354 | fm.end() | |
2326 |
|
2355 | |||
2327 |
|
2356 | |||
2328 | def _bdiffworker(q, blocks, xdiff, ready, done): |
|
2357 | def _bdiffworker(q, blocks, xdiff, ready, done): | |
2329 | while not done.is_set(): |
|
2358 | while not done.is_set(): | |
2330 | pair = q.get() |
|
2359 | pair = q.get() | |
2331 | while pair is not None: |
|
2360 | while pair is not None: | |
2332 | if xdiff: |
|
2361 | if xdiff: | |
2333 | mdiff.bdiff.xdiffblocks(*pair) |
|
2362 | mdiff.bdiff.xdiffblocks(*pair) | |
2334 | elif blocks: |
|
2363 | elif blocks: | |
2335 | mdiff.bdiff.blocks(*pair) |
|
2364 | mdiff.bdiff.blocks(*pair) | |
2336 | else: |
|
2365 | else: | |
2337 | mdiff.textdiff(*pair) |
|
2366 | mdiff.textdiff(*pair) | |
2338 | q.task_done() |
|
2367 | q.task_done() | |
2339 | pair = q.get() |
|
2368 | pair = q.get() | |
2340 | q.task_done() # for the None one |
|
2369 | q.task_done() # for the None one | |
2341 | with ready: |
|
2370 | with ready: | |
2342 | ready.wait() |
|
2371 | ready.wait() | |
2343 |
|
2372 | |||
2344 |
|
2373 | |||
2345 | def _manifestrevision(repo, mnode): |
|
2374 | def _manifestrevision(repo, mnode): | |
2346 | ml = repo.manifestlog |
|
2375 | ml = repo.manifestlog | |
2347 |
|
2376 | |||
2348 | if util.safehasattr(ml, b'getstorage'): |
|
2377 | if util.safehasattr(ml, b'getstorage'): | |
2349 | store = ml.getstorage(b'') |
|
2378 | store = ml.getstorage(b'') | |
2350 | else: |
|
2379 | else: | |
2351 | store = ml._revlog |
|
2380 | store = ml._revlog | |
2352 |
|
2381 | |||
2353 | return store.revision(mnode) |
|
2382 | return store.revision(mnode) | |
2354 |
|
2383 | |||
2355 |
|
2384 | |||
2356 | @command( |
|
2385 | @command( | |
2357 | b'perf::bdiff|perfbdiff', |
|
2386 | b'perf::bdiff|perfbdiff', | |
2358 | revlogopts |
|
2387 | revlogopts | |
2359 | + formatteropts |
|
2388 | + formatteropts | |
2360 | + [ |
|
2389 | + [ | |
2361 | ( |
|
2390 | ( | |
2362 | b'', |
|
2391 | b'', | |
2363 | b'count', |
|
2392 | b'count', | |
2364 | 1, |
|
2393 | 1, | |
2365 | b'number of revisions to test (when using --startrev)', |
|
2394 | b'number of revisions to test (when using --startrev)', | |
2366 | ), |
|
2395 | ), | |
2367 | (b'', b'alldata', False, b'test bdiffs for all associated revisions'), |
|
2396 | (b'', b'alldata', False, b'test bdiffs for all associated revisions'), | |
2368 | (b'', b'threads', 0, b'number of thread to use (disable with 0)'), |
|
2397 | (b'', b'threads', 0, b'number of thread to use (disable with 0)'), | |
2369 | (b'', b'blocks', False, b'test computing diffs into blocks'), |
|
2398 | (b'', b'blocks', False, b'test computing diffs into blocks'), | |
2370 | (b'', b'xdiff', False, b'use xdiff algorithm'), |
|
2399 | (b'', b'xdiff', False, b'use xdiff algorithm'), | |
2371 | ], |
|
2400 | ], | |
2372 | b'-c|-m|FILE REV', |
|
2401 | b'-c|-m|FILE REV', | |
2373 | ) |
|
2402 | ) | |
2374 | def perfbdiff(ui, repo, file_, rev=None, count=None, threads=0, **opts): |
|
2403 | def perfbdiff(ui, repo, file_, rev=None, count=None, threads=0, **opts): | |
2375 | """benchmark a bdiff between revisions |
|
2404 | """benchmark a bdiff between revisions | |
2376 |
|
2405 | |||
2377 | By default, benchmark a bdiff between its delta parent and itself. |
|
2406 | By default, benchmark a bdiff between its delta parent and itself. | |
2378 |
|
2407 | |||
2379 | With ``--count``, benchmark bdiffs between delta parents and self for N |
|
2408 | With ``--count``, benchmark bdiffs between delta parents and self for N | |
2380 | revisions starting at the specified revision. |
|
2409 | revisions starting at the specified revision. | |
2381 |
|
2410 | |||
2382 | With ``--alldata``, assume the requested revision is a changeset and |
|
2411 | With ``--alldata``, assume the requested revision is a changeset and | |
2383 | measure bdiffs for all changes related to that changeset (manifest |
|
2412 | measure bdiffs for all changes related to that changeset (manifest | |
2384 | and filelogs). |
|
2413 | and filelogs). | |
2385 | """ |
|
2414 | """ | |
2386 | opts = _byteskwargs(opts) |
|
2415 | opts = _byteskwargs(opts) | |
2387 |
|
2416 | |||
2388 | if opts[b'xdiff'] and not opts[b'blocks']: |
|
2417 | if opts[b'xdiff'] and not opts[b'blocks']: | |
2389 | raise error.CommandError(b'perfbdiff', b'--xdiff requires --blocks') |
|
2418 | raise error.CommandError(b'perfbdiff', b'--xdiff requires --blocks') | |
2390 |
|
2419 | |||
2391 | if opts[b'alldata']: |
|
2420 | if opts[b'alldata']: | |
2392 | opts[b'changelog'] = True |
|
2421 | opts[b'changelog'] = True | |
2393 |
|
2422 | |||
2394 | if opts.get(b'changelog') or opts.get(b'manifest'): |
|
2423 | if opts.get(b'changelog') or opts.get(b'manifest'): | |
2395 | file_, rev = None, file_ |
|
2424 | file_, rev = None, file_ | |
2396 | elif rev is None: |
|
2425 | elif rev is None: | |
2397 | raise error.CommandError(b'perfbdiff', b'invalid arguments') |
|
2426 | raise error.CommandError(b'perfbdiff', b'invalid arguments') | |
2398 |
|
2427 | |||
2399 | blocks = opts[b'blocks'] |
|
2428 | blocks = opts[b'blocks'] | |
2400 | xdiff = opts[b'xdiff'] |
|
2429 | xdiff = opts[b'xdiff'] | |
2401 | textpairs = [] |
|
2430 | textpairs = [] | |
2402 |
|
2431 | |||
2403 | r = cmdutil.openrevlog(repo, b'perfbdiff', file_, opts) |
|
2432 | r = cmdutil.openrevlog(repo, b'perfbdiff', file_, opts) | |
2404 |
|
2433 | |||
2405 | startrev = r.rev(r.lookup(rev)) |
|
2434 | startrev = r.rev(r.lookup(rev)) | |
2406 | for rev in range(startrev, min(startrev + count, len(r) - 1)): |
|
2435 | for rev in range(startrev, min(startrev + count, len(r) - 1)): | |
2407 | if opts[b'alldata']: |
|
2436 | if opts[b'alldata']: | |
2408 | # Load revisions associated with changeset. |
|
2437 | # Load revisions associated with changeset. | |
2409 | ctx = repo[rev] |
|
2438 | ctx = repo[rev] | |
2410 | mtext = _manifestrevision(repo, ctx.manifestnode()) |
|
2439 | mtext = _manifestrevision(repo, ctx.manifestnode()) | |
2411 | for pctx in ctx.parents(): |
|
2440 | for pctx in ctx.parents(): | |
2412 | pman = _manifestrevision(repo, pctx.manifestnode()) |
|
2441 | pman = _manifestrevision(repo, pctx.manifestnode()) | |
2413 | textpairs.append((pman, mtext)) |
|
2442 | textpairs.append((pman, mtext)) | |
2414 |
|
2443 | |||
2415 | # Load filelog revisions by iterating manifest delta. |
|
2444 | # Load filelog revisions by iterating manifest delta. | |
2416 | man = ctx.manifest() |
|
2445 | man = ctx.manifest() | |
2417 | pman = ctx.p1().manifest() |
|
2446 | pman = ctx.p1().manifest() | |
2418 | for filename, change in pman.diff(man).items(): |
|
2447 | for filename, change in pman.diff(man).items(): | |
2419 | fctx = repo.file(filename) |
|
2448 | fctx = repo.file(filename) | |
2420 | f1 = fctx.revision(change[0][0] or -1) |
|
2449 | f1 = fctx.revision(change[0][0] or -1) | |
2421 | f2 = fctx.revision(change[1][0] or -1) |
|
2450 | f2 = fctx.revision(change[1][0] or -1) | |
2422 | textpairs.append((f1, f2)) |
|
2451 | textpairs.append((f1, f2)) | |
2423 | else: |
|
2452 | else: | |
2424 | dp = r.deltaparent(rev) |
|
2453 | dp = r.deltaparent(rev) | |
2425 | textpairs.append((r.revision(dp), r.revision(rev))) |
|
2454 | textpairs.append((r.revision(dp), r.revision(rev))) | |
2426 |
|
2455 | |||
2427 | withthreads = threads > 0 |
|
2456 | withthreads = threads > 0 | |
2428 | if not withthreads: |
|
2457 | if not withthreads: | |
2429 |
|
2458 | |||
2430 | def d(): |
|
2459 | def d(): | |
2431 | for pair in textpairs: |
|
2460 | for pair in textpairs: | |
2432 | if xdiff: |
|
2461 | if xdiff: | |
2433 | mdiff.bdiff.xdiffblocks(*pair) |
|
2462 | mdiff.bdiff.xdiffblocks(*pair) | |
2434 | elif blocks: |
|
2463 | elif blocks: | |
2435 | mdiff.bdiff.blocks(*pair) |
|
2464 | mdiff.bdiff.blocks(*pair) | |
2436 | else: |
|
2465 | else: | |
2437 | mdiff.textdiff(*pair) |
|
2466 | mdiff.textdiff(*pair) | |
2438 |
|
2467 | |||
2439 | else: |
|
2468 | else: | |
2440 | q = queue() |
|
2469 | q = queue() | |
2441 | for i in _xrange(threads): |
|
2470 | for i in _xrange(threads): | |
2442 | q.put(None) |
|
2471 | q.put(None) | |
2443 | ready = threading.Condition() |
|
2472 | ready = threading.Condition() | |
2444 | done = threading.Event() |
|
2473 | done = threading.Event() | |
2445 | for i in _xrange(threads): |
|
2474 | for i in _xrange(threads): | |
2446 | threading.Thread( |
|
2475 | threading.Thread( | |
2447 | target=_bdiffworker, args=(q, blocks, xdiff, ready, done) |
|
2476 | target=_bdiffworker, args=(q, blocks, xdiff, ready, done) | |
2448 | ).start() |
|
2477 | ).start() | |
2449 | q.join() |
|
2478 | q.join() | |
2450 |
|
2479 | |||
2451 | def d(): |
|
2480 | def d(): | |
2452 | for pair in textpairs: |
|
2481 | for pair in textpairs: | |
2453 | q.put(pair) |
|
2482 | q.put(pair) | |
2454 | for i in _xrange(threads): |
|
2483 | for i in _xrange(threads): | |
2455 | q.put(None) |
|
2484 | q.put(None) | |
2456 | with ready: |
|
2485 | with ready: | |
2457 | ready.notify_all() |
|
2486 | ready.notify_all() | |
2458 | q.join() |
|
2487 | q.join() | |
2459 |
|
2488 | |||
2460 | timer, fm = gettimer(ui, opts) |
|
2489 | timer, fm = gettimer(ui, opts) | |
2461 | timer(d) |
|
2490 | timer(d) | |
2462 | fm.end() |
|
2491 | fm.end() | |
2463 |
|
2492 | |||
2464 | if withthreads: |
|
2493 | if withthreads: | |
2465 | done.set() |
|
2494 | done.set() | |
2466 | for i in _xrange(threads): |
|
2495 | for i in _xrange(threads): | |
2467 | q.put(None) |
|
2496 | q.put(None) | |
2468 | with ready: |
|
2497 | with ready: | |
2469 | ready.notify_all() |
|
2498 | ready.notify_all() | |
2470 |
|
2499 | |||
2471 |
|
2500 | |||
2472 | @command( |
|
2501 | @command( | |
2473 | b'perf::unidiff|perfunidiff', |
|
2502 | b'perf::unidiff|perfunidiff', | |
2474 | revlogopts |
|
2503 | revlogopts | |
2475 | + formatteropts |
|
2504 | + formatteropts | |
2476 | + [ |
|
2505 | + [ | |
2477 | ( |
|
2506 | ( | |
2478 | b'', |
|
2507 | b'', | |
2479 | b'count', |
|
2508 | b'count', | |
2480 | 1, |
|
2509 | 1, | |
2481 | b'number of revisions to test (when using --startrev)', |
|
2510 | b'number of revisions to test (when using --startrev)', | |
2482 | ), |
|
2511 | ), | |
2483 | (b'', b'alldata', False, b'test unidiffs for all associated revisions'), |
|
2512 | (b'', b'alldata', False, b'test unidiffs for all associated revisions'), | |
2484 | ], |
|
2513 | ], | |
2485 | b'-c|-m|FILE REV', |
|
2514 | b'-c|-m|FILE REV', | |
2486 | ) |
|
2515 | ) | |
2487 | def perfunidiff(ui, repo, file_, rev=None, count=None, **opts): |
|
2516 | def perfunidiff(ui, repo, file_, rev=None, count=None, **opts): | |
2488 | """benchmark a unified diff between revisions |
|
2517 | """benchmark a unified diff between revisions | |
2489 |
|
2518 | |||
2490 | This doesn't include any copy tracing - it's just a unified diff |
|
2519 | This doesn't include any copy tracing - it's just a unified diff | |
2491 | of the texts. |
|
2520 | of the texts. | |
2492 |
|
2521 | |||
2493 | By default, benchmark a diff between its delta parent and itself. |
|
2522 | By default, benchmark a diff between its delta parent and itself. | |
2494 |
|
2523 | |||
2495 | With ``--count``, benchmark diffs between delta parents and self for N |
|
2524 | With ``--count``, benchmark diffs between delta parents and self for N | |
2496 | revisions starting at the specified revision. |
|
2525 | revisions starting at the specified revision. | |
2497 |
|
2526 | |||
2498 | With ``--alldata``, assume the requested revision is a changeset and |
|
2527 | With ``--alldata``, assume the requested revision is a changeset and | |
2499 | measure diffs for all changes related to that changeset (manifest |
|
2528 | measure diffs for all changes related to that changeset (manifest | |
2500 | and filelogs). |
|
2529 | and filelogs). | |
2501 | """ |
|
2530 | """ | |
2502 | opts = _byteskwargs(opts) |
|
2531 | opts = _byteskwargs(opts) | |
2503 | if opts[b'alldata']: |
|
2532 | if opts[b'alldata']: | |
2504 | opts[b'changelog'] = True |
|
2533 | opts[b'changelog'] = True | |
2505 |
|
2534 | |||
2506 | if opts.get(b'changelog') or opts.get(b'manifest'): |
|
2535 | if opts.get(b'changelog') or opts.get(b'manifest'): | |
2507 | file_, rev = None, file_ |
|
2536 | file_, rev = None, file_ | |
2508 | elif rev is None: |
|
2537 | elif rev is None: | |
2509 | raise error.CommandError(b'perfunidiff', b'invalid arguments') |
|
2538 | raise error.CommandError(b'perfunidiff', b'invalid arguments') | |
2510 |
|
2539 | |||
2511 | textpairs = [] |
|
2540 | textpairs = [] | |
2512 |
|
2541 | |||
2513 | r = cmdutil.openrevlog(repo, b'perfunidiff', file_, opts) |
|
2542 | r = cmdutil.openrevlog(repo, b'perfunidiff', file_, opts) | |
2514 |
|
2543 | |||
2515 | startrev = r.rev(r.lookup(rev)) |
|
2544 | startrev = r.rev(r.lookup(rev)) | |
2516 | for rev in range(startrev, min(startrev + count, len(r) - 1)): |
|
2545 | for rev in range(startrev, min(startrev + count, len(r) - 1)): | |
2517 | if opts[b'alldata']: |
|
2546 | if opts[b'alldata']: | |
2518 | # Load revisions associated with changeset. |
|
2547 | # Load revisions associated with changeset. | |
2519 | ctx = repo[rev] |
|
2548 | ctx = repo[rev] | |
2520 | mtext = _manifestrevision(repo, ctx.manifestnode()) |
|
2549 | mtext = _manifestrevision(repo, ctx.manifestnode()) | |
2521 | for pctx in ctx.parents(): |
|
2550 | for pctx in ctx.parents(): | |
2522 | pman = _manifestrevision(repo, pctx.manifestnode()) |
|
2551 | pman = _manifestrevision(repo, pctx.manifestnode()) | |
2523 | textpairs.append((pman, mtext)) |
|
2552 | textpairs.append((pman, mtext)) | |
2524 |
|
2553 | |||
2525 | # Load filelog revisions by iterating manifest delta. |
|
2554 | # Load filelog revisions by iterating manifest delta. | |
2526 | man = ctx.manifest() |
|
2555 | man = ctx.manifest() | |
2527 | pman = ctx.p1().manifest() |
|
2556 | pman = ctx.p1().manifest() | |
2528 | for filename, change in pman.diff(man).items(): |
|
2557 | for filename, change in pman.diff(man).items(): | |
2529 | fctx = repo.file(filename) |
|
2558 | fctx = repo.file(filename) | |
2530 | f1 = fctx.revision(change[0][0] or -1) |
|
2559 | f1 = fctx.revision(change[0][0] or -1) | |
2531 | f2 = fctx.revision(change[1][0] or -1) |
|
2560 | f2 = fctx.revision(change[1][0] or -1) | |
2532 | textpairs.append((f1, f2)) |
|
2561 | textpairs.append((f1, f2)) | |
2533 | else: |
|
2562 | else: | |
2534 | dp = r.deltaparent(rev) |
|
2563 | dp = r.deltaparent(rev) | |
2535 | textpairs.append((r.revision(dp), r.revision(rev))) |
|
2564 | textpairs.append((r.revision(dp), r.revision(rev))) | |
2536 |
|
2565 | |||
2537 | def d(): |
|
2566 | def d(): | |
2538 | for left, right in textpairs: |
|
2567 | for left, right in textpairs: | |
2539 | # The date strings don't matter, so we pass empty strings. |
|
2568 | # The date strings don't matter, so we pass empty strings. | |
2540 | headerlines, hunks = mdiff.unidiff( |
|
2569 | headerlines, hunks = mdiff.unidiff( | |
2541 | left, b'', right, b'', b'left', b'right', binary=False |
|
2570 | left, b'', right, b'', b'left', b'right', binary=False | |
2542 | ) |
|
2571 | ) | |
2543 | # consume iterators in roughly the way patch.py does |
|
2572 | # consume iterators in roughly the way patch.py does | |
2544 | b'\n'.join(headerlines) |
|
2573 | b'\n'.join(headerlines) | |
2545 | b''.join(sum((list(hlines) for hrange, hlines in hunks), [])) |
|
2574 | b''.join(sum((list(hlines) for hrange, hlines in hunks), [])) | |
2546 |
|
2575 | |||
2547 | timer, fm = gettimer(ui, opts) |
|
2576 | timer, fm = gettimer(ui, opts) | |
2548 | timer(d) |
|
2577 | timer(d) | |
2549 | fm.end() |
|
2578 | fm.end() | |
2550 |
|
2579 | |||
2551 |
|
2580 | |||
2552 | @command(b'perf::diffwd|perfdiffwd', formatteropts) |
|
2581 | @command(b'perf::diffwd|perfdiffwd', formatteropts) | |
2553 | def perfdiffwd(ui, repo, **opts): |
|
2582 | def perfdiffwd(ui, repo, **opts): | |
2554 | """Profile diff of working directory changes""" |
|
2583 | """Profile diff of working directory changes""" | |
2555 | opts = _byteskwargs(opts) |
|
2584 | opts = _byteskwargs(opts) | |
2556 | timer, fm = gettimer(ui, opts) |
|
2585 | timer, fm = gettimer(ui, opts) | |
2557 | options = { |
|
2586 | options = { | |
2558 | 'w': 'ignore_all_space', |
|
2587 | 'w': 'ignore_all_space', | |
2559 | 'b': 'ignore_space_change', |
|
2588 | 'b': 'ignore_space_change', | |
2560 | 'B': 'ignore_blank_lines', |
|
2589 | 'B': 'ignore_blank_lines', | |
2561 | } |
|
2590 | } | |
2562 |
|
2591 | |||
2563 | for diffopt in ('', 'w', 'b', 'B', 'wB'): |
|
2592 | for diffopt in ('', 'w', 'b', 'B', 'wB'): | |
2564 | opts = {options[c]: b'1' for c in diffopt} |
|
2593 | opts = {options[c]: b'1' for c in diffopt} | |
2565 |
|
2594 | |||
2566 | def d(): |
|
2595 | def d(): | |
2567 | ui.pushbuffer() |
|
2596 | ui.pushbuffer() | |
2568 | commands.diff(ui, repo, **opts) |
|
2597 | commands.diff(ui, repo, **opts) | |
2569 | ui.popbuffer() |
|
2598 | ui.popbuffer() | |
2570 |
|
2599 | |||
2571 | diffopt = diffopt.encode('ascii') |
|
2600 | diffopt = diffopt.encode('ascii') | |
2572 | title = b'diffopts: %s' % (diffopt and (b'-' + diffopt) or b'none') |
|
2601 | title = b'diffopts: %s' % (diffopt and (b'-' + diffopt) or b'none') | |
2573 | timer(d, title=title) |
|
2602 | timer(d, title=title) | |
2574 | fm.end() |
|
2603 | fm.end() | |
2575 |
|
2604 | |||
2576 |
|
2605 | |||
2577 | @command( |
|
2606 | @command( | |
2578 | b'perf::revlogindex|perfrevlogindex', |
|
2607 | b'perf::revlogindex|perfrevlogindex', | |
2579 | revlogopts + formatteropts, |
|
2608 | revlogopts + formatteropts, | |
2580 | b'-c|-m|FILE', |
|
2609 | b'-c|-m|FILE', | |
2581 | ) |
|
2610 | ) | |
2582 | def perfrevlogindex(ui, repo, file_=None, **opts): |
|
2611 | def perfrevlogindex(ui, repo, file_=None, **opts): | |
2583 | """Benchmark operations against a revlog index. |
|
2612 | """Benchmark operations against a revlog index. | |
2584 |
|
2613 | |||
2585 | This tests constructing a revlog instance, reading index data, |
|
2614 | This tests constructing a revlog instance, reading index data, | |
2586 | parsing index data, and performing various operations related to |
|
2615 | parsing index data, and performing various operations related to | |
2587 | index data. |
|
2616 | index data. | |
2588 | """ |
|
2617 | """ | |
2589 |
|
2618 | |||
2590 | opts = _byteskwargs(opts) |
|
2619 | opts = _byteskwargs(opts) | |
2591 |
|
2620 | |||
2592 | rl = cmdutil.openrevlog(repo, b'perfrevlogindex', file_, opts) |
|
2621 | rl = cmdutil.openrevlog(repo, b'perfrevlogindex', file_, opts) | |
2593 |
|
2622 | |||
2594 | opener = getattr(rl, 'opener') # trick linter |
|
2623 | opener = getattr(rl, 'opener') # trick linter | |
2595 | indexfile = rl.indexfile |
|
2624 | # compat with hg <= 5.8 | |
|
2625 | radix = getattr(rl, 'radix', None) | |||
|
2626 | indexfile = getattr(rl, '_indexfile', None) | |||
|
2627 | if indexfile is None: | |||
|
2628 | # compatibility with <= hg-5.8 | |||
|
2629 | indexfile = getattr(rl, 'indexfile') | |||
2596 | data = opener.read(indexfile) |
|
2630 | data = opener.read(indexfile) | |
2597 |
|
2631 | |||
2598 | header = struct.unpack(b'>I', data[0:4])[0] |
|
2632 | header = struct.unpack(b'>I', data[0:4])[0] | |
2599 | version = header & 0xFFFF |
|
2633 | version = header & 0xFFFF | |
2600 | if version == 1: |
|
2634 | if version == 1: | |
2601 | revlogio = revlog.revlogio() |
|
|||
2602 | inline = header & (1 << 16) |
|
2635 | inline = header & (1 << 16) | |
2603 | else: |
|
2636 | else: | |
2604 | raise error.Abort(b'unsupported revlog version: %d' % version) |
|
2637 | raise error.Abort(b'unsupported revlog version: %d' % version) | |
2605 |
|
2638 | |||
|
2639 | parse_index_v1 = getattr(mercurial.revlog, 'parse_index_v1', None) | |||
|
2640 | if parse_index_v1 is None: | |||
|
2641 | parse_index_v1 = mercurial.revlog.revlogio().parseindex | |||
|
2642 | ||||
2606 | rllen = len(rl) |
|
2643 | rllen = len(rl) | |
2607 |
|
2644 | |||
2608 | node0 = rl.node(0) |
|
2645 | node0 = rl.node(0) | |
2609 | node25 = rl.node(rllen // 4) |
|
2646 | node25 = rl.node(rllen // 4) | |
2610 | node50 = rl.node(rllen // 2) |
|
2647 | node50 = rl.node(rllen // 2) | |
2611 | node75 = rl.node(rllen // 4 * 3) |
|
2648 | node75 = rl.node(rllen // 4 * 3) | |
2612 | node100 = rl.node(rllen - 1) |
|
2649 | node100 = rl.node(rllen - 1) | |
2613 |
|
2650 | |||
2614 | allrevs = range(rllen) |
|
2651 | allrevs = range(rllen) | |
2615 | allrevsrev = list(reversed(allrevs)) |
|
2652 | allrevsrev = list(reversed(allrevs)) | |
2616 | allnodes = [rl.node(rev) for rev in range(rllen)] |
|
2653 | allnodes = [rl.node(rev) for rev in range(rllen)] | |
2617 | allnodesrev = list(reversed(allnodes)) |
|
2654 | allnodesrev = list(reversed(allnodes)) | |
2618 |
|
2655 | |||
2619 | def constructor(): |
|
2656 | def constructor(): | |
2620 | revlog.revlog(opener, indexfile) |
|
2657 | if radix is not None: | |
|
2658 | revlog(opener, radix=radix) | |||
|
2659 | else: | |||
|
2660 | # hg <= 5.8 | |||
|
2661 | revlog(opener, indexfile=indexfile) | |||
2621 |
|
2662 | |||
2622 | def read(): |
|
2663 | def read(): | |
2623 | with opener(indexfile) as fh: |
|
2664 | with opener(indexfile) as fh: | |
2624 | fh.read() |
|
2665 | fh.read() | |
2625 |
|
2666 | |||
2626 | def parseindex(): |
|
2667 | def parseindex(): | |
2627 |
|
|
2668 | parse_index_v1(data, inline) | |
2628 |
|
2669 | |||
2629 | def getentry(revornode): |
|
2670 | def getentry(revornode): | |
2630 |
index = |
|
2671 | index = parse_index_v1(data, inline)[0] | |
2631 | index[revornode] |
|
2672 | index[revornode] | |
2632 |
|
2673 | |||
2633 | def getentries(revs, count=1): |
|
2674 | def getentries(revs, count=1): | |
2634 |
index = |
|
2675 | index = parse_index_v1(data, inline)[0] | |
2635 |
|
2676 | |||
2636 | for i in range(count): |
|
2677 | for i in range(count): | |
2637 | for rev in revs: |
|
2678 | for rev in revs: | |
2638 | index[rev] |
|
2679 | index[rev] | |
2639 |
|
2680 | |||
2640 | def resolvenode(node): |
|
2681 | def resolvenode(node): | |
2641 |
index = |
|
2682 | index = parse_index_v1(data, inline)[0] | |
2642 | rev = getattr(index, 'rev', None) |
|
2683 | rev = getattr(index, 'rev', None) | |
2643 | if rev is None: |
|
2684 | if rev is None: | |
2644 | nodemap = getattr( |
|
2685 | nodemap = getattr(parse_index_v1(data, inline)[0], 'nodemap', None) | |
2645 | revlogio.parseindex(data, inline)[0], 'nodemap', None |
|
|||
2646 | ) |
|
|||
2647 | # This only works for the C code. |
|
2686 | # This only works for the C code. | |
2648 | if nodemap is None: |
|
2687 | if nodemap is None: | |
2649 | return |
|
2688 | return | |
2650 | rev = nodemap.__getitem__ |
|
2689 | rev = nodemap.__getitem__ | |
2651 |
|
2690 | |||
2652 | try: |
|
2691 | try: | |
2653 | rev(node) |
|
2692 | rev(node) | |
2654 | except error.RevlogError: |
|
2693 | except error.RevlogError: | |
2655 | pass |
|
2694 | pass | |
2656 |
|
2695 | |||
2657 | def resolvenodes(nodes, count=1): |
|
2696 | def resolvenodes(nodes, count=1): | |
2658 |
index = |
|
2697 | index = parse_index_v1(data, inline)[0] | |
2659 | rev = getattr(index, 'rev', None) |
|
2698 | rev = getattr(index, 'rev', None) | |
2660 | if rev is None: |
|
2699 | if rev is None: | |
2661 | nodemap = getattr( |
|
2700 | nodemap = getattr(parse_index_v1(data, inline)[0], 'nodemap', None) | |
2662 | revlogio.parseindex(data, inline)[0], 'nodemap', None |
|
|||
2663 | ) |
|
|||
2664 | # This only works for the C code. |
|
2701 | # This only works for the C code. | |
2665 | if nodemap is None: |
|
2702 | if nodemap is None: | |
2666 | return |
|
2703 | return | |
2667 | rev = nodemap.__getitem__ |
|
2704 | rev = nodemap.__getitem__ | |
2668 |
|
2705 | |||
2669 | for i in range(count): |
|
2706 | for i in range(count): | |
2670 | for node in nodes: |
|
2707 | for node in nodes: | |
2671 | try: |
|
2708 | try: | |
2672 | rev(node) |
|
2709 | rev(node) | |
2673 | except error.RevlogError: |
|
2710 | except error.RevlogError: | |
2674 | pass |
|
2711 | pass | |
2675 |
|
2712 | |||
2676 | benches = [ |
|
2713 | benches = [ | |
2677 | (constructor, b'revlog constructor'), |
|
2714 | (constructor, b'revlog constructor'), | |
2678 | (read, b'read'), |
|
2715 | (read, b'read'), | |
2679 | (parseindex, b'create index object'), |
|
2716 | (parseindex, b'create index object'), | |
2680 | (lambda: getentry(0), b'retrieve index entry for rev 0'), |
|
2717 | (lambda: getentry(0), b'retrieve index entry for rev 0'), | |
2681 | (lambda: resolvenode(b'a' * 20), b'look up missing node'), |
|
2718 | (lambda: resolvenode(b'a' * 20), b'look up missing node'), | |
2682 | (lambda: resolvenode(node0), b'look up node at rev 0'), |
|
2719 | (lambda: resolvenode(node0), b'look up node at rev 0'), | |
2683 | (lambda: resolvenode(node25), b'look up node at 1/4 len'), |
|
2720 | (lambda: resolvenode(node25), b'look up node at 1/4 len'), | |
2684 | (lambda: resolvenode(node50), b'look up node at 1/2 len'), |
|
2721 | (lambda: resolvenode(node50), b'look up node at 1/2 len'), | |
2685 | (lambda: resolvenode(node75), b'look up node at 3/4 len'), |
|
2722 | (lambda: resolvenode(node75), b'look up node at 3/4 len'), | |
2686 | (lambda: resolvenode(node100), b'look up node at tip'), |
|
2723 | (lambda: resolvenode(node100), b'look up node at tip'), | |
2687 | # 2x variation is to measure caching impact. |
|
2724 | # 2x variation is to measure caching impact. | |
2688 | (lambda: resolvenodes(allnodes), b'look up all nodes (forward)'), |
|
2725 | (lambda: resolvenodes(allnodes), b'look up all nodes (forward)'), | |
2689 | (lambda: resolvenodes(allnodes, 2), b'look up all nodes 2x (forward)'), |
|
2726 | (lambda: resolvenodes(allnodes, 2), b'look up all nodes 2x (forward)'), | |
2690 | (lambda: resolvenodes(allnodesrev), b'look up all nodes (reverse)'), |
|
2727 | (lambda: resolvenodes(allnodesrev), b'look up all nodes (reverse)'), | |
2691 | ( |
|
2728 | ( | |
2692 | lambda: resolvenodes(allnodesrev, 2), |
|
2729 | lambda: resolvenodes(allnodesrev, 2), | |
2693 | b'look up all nodes 2x (reverse)', |
|
2730 | b'look up all nodes 2x (reverse)', | |
2694 | ), |
|
2731 | ), | |
2695 | (lambda: getentries(allrevs), b'retrieve all index entries (forward)'), |
|
2732 | (lambda: getentries(allrevs), b'retrieve all index entries (forward)'), | |
2696 | ( |
|
2733 | ( | |
2697 | lambda: getentries(allrevs, 2), |
|
2734 | lambda: getentries(allrevs, 2), | |
2698 | b'retrieve all index entries 2x (forward)', |
|
2735 | b'retrieve all index entries 2x (forward)', | |
2699 | ), |
|
2736 | ), | |
2700 | ( |
|
2737 | ( | |
2701 | lambda: getentries(allrevsrev), |
|
2738 | lambda: getentries(allrevsrev), | |
2702 | b'retrieve all index entries (reverse)', |
|
2739 | b'retrieve all index entries (reverse)', | |
2703 | ), |
|
2740 | ), | |
2704 | ( |
|
2741 | ( | |
2705 | lambda: getentries(allrevsrev, 2), |
|
2742 | lambda: getentries(allrevsrev, 2), | |
2706 | b'retrieve all index entries 2x (reverse)', |
|
2743 | b'retrieve all index entries 2x (reverse)', | |
2707 | ), |
|
2744 | ), | |
2708 | ] |
|
2745 | ] | |
2709 |
|
2746 | |||
2710 | for fn, title in benches: |
|
2747 | for fn, title in benches: | |
2711 | timer, fm = gettimer(ui, opts) |
|
2748 | timer, fm = gettimer(ui, opts) | |
2712 | timer(fn, title=title) |
|
2749 | timer(fn, title=title) | |
2713 | fm.end() |
|
2750 | fm.end() | |
2714 |
|
2751 | |||
2715 |
|
2752 | |||
2716 | @command( |
|
2753 | @command( | |
2717 | b'perf::revlogrevisions|perfrevlogrevisions', |
|
2754 | b'perf::revlogrevisions|perfrevlogrevisions', | |
2718 | revlogopts |
|
2755 | revlogopts | |
2719 | + formatteropts |
|
2756 | + formatteropts | |
2720 | + [ |
|
2757 | + [ | |
2721 | (b'd', b'dist', 100, b'distance between the revisions'), |
|
2758 | (b'd', b'dist', 100, b'distance between the revisions'), | |
2722 | (b's', b'startrev', 0, b'revision to start reading at'), |
|
2759 | (b's', b'startrev', 0, b'revision to start reading at'), | |
2723 | (b'', b'reverse', False, b'read in reverse'), |
|
2760 | (b'', b'reverse', False, b'read in reverse'), | |
2724 | ], |
|
2761 | ], | |
2725 | b'-c|-m|FILE', |
|
2762 | b'-c|-m|FILE', | |
2726 | ) |
|
2763 | ) | |
2727 | def perfrevlogrevisions( |
|
2764 | def perfrevlogrevisions( | |
2728 | ui, repo, file_=None, startrev=0, reverse=False, **opts |
|
2765 | ui, repo, file_=None, startrev=0, reverse=False, **opts | |
2729 | ): |
|
2766 | ): | |
2730 | """Benchmark reading a series of revisions from a revlog. |
|
2767 | """Benchmark reading a series of revisions from a revlog. | |
2731 |
|
2768 | |||
2732 | By default, we read every ``-d/--dist`` revision from 0 to tip of |
|
2769 | By default, we read every ``-d/--dist`` revision from 0 to tip of | |
2733 | the specified revlog. |
|
2770 | the specified revlog. | |
2734 |
|
2771 | |||
2735 | The start revision can be defined via ``-s/--startrev``. |
|
2772 | The start revision can be defined via ``-s/--startrev``. | |
2736 | """ |
|
2773 | """ | |
2737 | opts = _byteskwargs(opts) |
|
2774 | opts = _byteskwargs(opts) | |
2738 |
|
2775 | |||
2739 | rl = cmdutil.openrevlog(repo, b'perfrevlogrevisions', file_, opts) |
|
2776 | rl = cmdutil.openrevlog(repo, b'perfrevlogrevisions', file_, opts) | |
2740 | rllen = getlen(ui)(rl) |
|
2777 | rllen = getlen(ui)(rl) | |
2741 |
|
2778 | |||
2742 | if startrev < 0: |
|
2779 | if startrev < 0: | |
2743 | startrev = rllen + startrev |
|
2780 | startrev = rllen + startrev | |
2744 |
|
2781 | |||
2745 | def d(): |
|
2782 | def d(): | |
2746 | rl.clearcaches() |
|
2783 | rl.clearcaches() | |
2747 |
|
2784 | |||
2748 | beginrev = startrev |
|
2785 | beginrev = startrev | |
2749 | endrev = rllen |
|
2786 | endrev = rllen | |
2750 | dist = opts[b'dist'] |
|
2787 | dist = opts[b'dist'] | |
2751 |
|
2788 | |||
2752 | if reverse: |
|
2789 | if reverse: | |
2753 | beginrev, endrev = endrev - 1, beginrev - 1 |
|
2790 | beginrev, endrev = endrev - 1, beginrev - 1 | |
2754 | dist = -1 * dist |
|
2791 | dist = -1 * dist | |
2755 |
|
2792 | |||
2756 | for x in _xrange(beginrev, endrev, dist): |
|
2793 | for x in _xrange(beginrev, endrev, dist): | |
2757 | # Old revisions don't support passing int. |
|
2794 | # Old revisions don't support passing int. | |
2758 | n = rl.node(x) |
|
2795 | n = rl.node(x) | |
2759 | rl.revision(n) |
|
2796 | rl.revision(n) | |
2760 |
|
2797 | |||
2761 | timer, fm = gettimer(ui, opts) |
|
2798 | timer, fm = gettimer(ui, opts) | |
2762 | timer(d) |
|
2799 | timer(d) | |
2763 | fm.end() |
|
2800 | fm.end() | |
2764 |
|
2801 | |||
2765 |
|
2802 | |||
2766 | @command( |
|
2803 | @command( | |
2767 | b'perf::revlogwrite|perfrevlogwrite', |
|
2804 | b'perf::revlogwrite|perfrevlogwrite', | |
2768 | revlogopts |
|
2805 | revlogopts | |
2769 | + formatteropts |
|
2806 | + formatteropts | |
2770 | + [ |
|
2807 | + [ | |
2771 | (b's', b'startrev', 1000, b'revision to start writing at'), |
|
2808 | (b's', b'startrev', 1000, b'revision to start writing at'), | |
2772 | (b'', b'stoprev', -1, b'last revision to write'), |
|
2809 | (b'', b'stoprev', -1, b'last revision to write'), | |
2773 | (b'', b'count', 3, b'number of passes to perform'), |
|
2810 | (b'', b'count', 3, b'number of passes to perform'), | |
2774 | (b'', b'details', False, b'print timing for every revisions tested'), |
|
2811 | (b'', b'details', False, b'print timing for every revisions tested'), | |
2775 | (b'', b'source', b'full', b'the kind of data feed in the revlog'), |
|
2812 | (b'', b'source', b'full', b'the kind of data feed in the revlog'), | |
2776 | (b'', b'lazydeltabase', True, b'try the provided delta first'), |
|
2813 | (b'', b'lazydeltabase', True, b'try the provided delta first'), | |
2777 | (b'', b'clear-caches', True, b'clear revlog cache between calls'), |
|
2814 | (b'', b'clear-caches', True, b'clear revlog cache between calls'), | |
2778 | ], |
|
2815 | ], | |
2779 | b'-c|-m|FILE', |
|
2816 | b'-c|-m|FILE', | |
2780 | ) |
|
2817 | ) | |
2781 | def perfrevlogwrite(ui, repo, file_=None, startrev=1000, stoprev=-1, **opts): |
|
2818 | def perfrevlogwrite(ui, repo, file_=None, startrev=1000, stoprev=-1, **opts): | |
2782 | """Benchmark writing a series of revisions to a revlog. |
|
2819 | """Benchmark writing a series of revisions to a revlog. | |
2783 |
|
2820 | |||
2784 | Possible source values are: |
|
2821 | Possible source values are: | |
2785 | * `full`: add from a full text (default). |
|
2822 | * `full`: add from a full text (default). | |
2786 | * `parent-1`: add from a delta to the first parent |
|
2823 | * `parent-1`: add from a delta to the first parent | |
2787 | * `parent-2`: add from a delta to the second parent if it exists |
|
2824 | * `parent-2`: add from a delta to the second parent if it exists | |
2788 | (use a delta from the first parent otherwise) |
|
2825 | (use a delta from the first parent otherwise) | |
2789 | * `parent-smallest`: add from the smallest delta (either p1 or p2) |
|
2826 | * `parent-smallest`: add from the smallest delta (either p1 or p2) | |
2790 | * `storage`: add from the existing precomputed deltas |
|
2827 | * `storage`: add from the existing precomputed deltas | |
2791 |
|
2828 | |||
2792 | Note: This performance command measures performance in a custom way. As a |
|
2829 | Note: This performance command measures performance in a custom way. As a | |
2793 | result some of the global configuration of the 'perf' command does not |
|
2830 | result some of the global configuration of the 'perf' command does not | |
2794 | apply to it: |
|
2831 | apply to it: | |
2795 |
|
2832 | |||
2796 | * ``pre-run``: disabled |
|
2833 | * ``pre-run``: disabled | |
2797 |
|
2834 | |||
2798 | * ``profile-benchmark``: disabled |
|
2835 | * ``profile-benchmark``: disabled | |
2799 |
|
2836 | |||
2800 | * ``run-limits``: disabled use --count instead |
|
2837 | * ``run-limits``: disabled use --count instead | |
2801 | """ |
|
2838 | """ | |
2802 | opts = _byteskwargs(opts) |
|
2839 | opts = _byteskwargs(opts) | |
2803 |
|
2840 | |||
2804 | rl = cmdutil.openrevlog(repo, b'perfrevlogwrite', file_, opts) |
|
2841 | rl = cmdutil.openrevlog(repo, b'perfrevlogwrite', file_, opts) | |
2805 | rllen = getlen(ui)(rl) |
|
2842 | rllen = getlen(ui)(rl) | |
2806 | if startrev < 0: |
|
2843 | if startrev < 0: | |
2807 | startrev = rllen + startrev |
|
2844 | startrev = rllen + startrev | |
2808 | if stoprev < 0: |
|
2845 | if stoprev < 0: | |
2809 | stoprev = rllen + stoprev |
|
2846 | stoprev = rllen + stoprev | |
2810 |
|
2847 | |||
2811 | lazydeltabase = opts['lazydeltabase'] |
|
2848 | lazydeltabase = opts['lazydeltabase'] | |
2812 | source = opts['source'] |
|
2849 | source = opts['source'] | |
2813 | clearcaches = opts['clear_caches'] |
|
2850 | clearcaches = opts['clear_caches'] | |
2814 | validsource = ( |
|
2851 | validsource = ( | |
2815 | b'full', |
|
2852 | b'full', | |
2816 | b'parent-1', |
|
2853 | b'parent-1', | |
2817 | b'parent-2', |
|
2854 | b'parent-2', | |
2818 | b'parent-smallest', |
|
2855 | b'parent-smallest', | |
2819 | b'storage', |
|
2856 | b'storage', | |
2820 | ) |
|
2857 | ) | |
2821 | if source not in validsource: |
|
2858 | if source not in validsource: | |
2822 | raise error.Abort('invalid source type: %s' % source) |
|
2859 | raise error.Abort('invalid source type: %s' % source) | |
2823 |
|
2860 | |||
2824 | ### actually gather results |
|
2861 | ### actually gather results | |
2825 | count = opts['count'] |
|
2862 | count = opts['count'] | |
2826 | if count <= 0: |
|
2863 | if count <= 0: | |
2827 | raise error.Abort('invalide run count: %d' % count) |
|
2864 | raise error.Abort('invalide run count: %d' % count) | |
2828 | allresults = [] |
|
2865 | allresults = [] | |
2829 | for c in range(count): |
|
2866 | for c in range(count): | |
2830 | timing = _timeonewrite( |
|
2867 | timing = _timeonewrite( | |
2831 | ui, |
|
2868 | ui, | |
2832 | rl, |
|
2869 | rl, | |
2833 | source, |
|
2870 | source, | |
2834 | startrev, |
|
2871 | startrev, | |
2835 | stoprev, |
|
2872 | stoprev, | |
2836 | c + 1, |
|
2873 | c + 1, | |
2837 | lazydeltabase=lazydeltabase, |
|
2874 | lazydeltabase=lazydeltabase, | |
2838 | clearcaches=clearcaches, |
|
2875 | clearcaches=clearcaches, | |
2839 | ) |
|
2876 | ) | |
2840 | allresults.append(timing) |
|
2877 | allresults.append(timing) | |
2841 |
|
2878 | |||
2842 | ### consolidate the results in a single list |
|
2879 | ### consolidate the results in a single list | |
2843 | results = [] |
|
2880 | results = [] | |
2844 | for idx, (rev, t) in enumerate(allresults[0]): |
|
2881 | for idx, (rev, t) in enumerate(allresults[0]): | |
2845 | ts = [t] |
|
2882 | ts = [t] | |
2846 | for other in allresults[1:]: |
|
2883 | for other in allresults[1:]: | |
2847 | orev, ot = other[idx] |
|
2884 | orev, ot = other[idx] | |
2848 | assert orev == rev |
|
2885 | assert orev == rev | |
2849 | ts.append(ot) |
|
2886 | ts.append(ot) | |
2850 | results.append((rev, ts)) |
|
2887 | results.append((rev, ts)) | |
2851 | resultcount = len(results) |
|
2888 | resultcount = len(results) | |
2852 |
|
2889 | |||
2853 | ### Compute and display relevant statistics |
|
2890 | ### Compute and display relevant statistics | |
2854 |
|
2891 | |||
2855 | # get a formatter |
|
2892 | # get a formatter | |
2856 | fm = ui.formatter(b'perf', opts) |
|
2893 | fm = ui.formatter(b'perf', opts) | |
2857 | displayall = ui.configbool(b"perf", b"all-timing", False) |
|
2894 | displayall = ui.configbool(b"perf", b"all-timing", False) | |
2858 |
|
2895 | |||
2859 | # print individual details if requested |
|
2896 | # print individual details if requested | |
2860 | if opts['details']: |
|
2897 | if opts['details']: | |
2861 | for idx, item in enumerate(results, 1): |
|
2898 | for idx, item in enumerate(results, 1): | |
2862 | rev, data = item |
|
2899 | rev, data = item | |
2863 | title = 'revisions #%d of %d, rev %d' % (idx, resultcount, rev) |
|
2900 | title = 'revisions #%d of %d, rev %d' % (idx, resultcount, rev) | |
2864 | formatone(fm, data, title=title, displayall=displayall) |
|
2901 | formatone(fm, data, title=title, displayall=displayall) | |
2865 |
|
2902 | |||
2866 | # sorts results by median time |
|
2903 | # sorts results by median time | |
2867 | results.sort(key=lambda x: sorted(x[1])[len(x[1]) // 2]) |
|
2904 | results.sort(key=lambda x: sorted(x[1])[len(x[1]) // 2]) | |
2868 | # list of (name, index) to display) |
|
2905 | # list of (name, index) to display) | |
2869 | relevants = [ |
|
2906 | relevants = [ | |
2870 | ("min", 0), |
|
2907 | ("min", 0), | |
2871 | ("10%", resultcount * 10 // 100), |
|
2908 | ("10%", resultcount * 10 // 100), | |
2872 | ("25%", resultcount * 25 // 100), |
|
2909 | ("25%", resultcount * 25 // 100), | |
2873 | ("50%", resultcount * 70 // 100), |
|
2910 | ("50%", resultcount * 70 // 100), | |
2874 | ("75%", resultcount * 75 // 100), |
|
2911 | ("75%", resultcount * 75 // 100), | |
2875 | ("90%", resultcount * 90 // 100), |
|
2912 | ("90%", resultcount * 90 // 100), | |
2876 | ("95%", resultcount * 95 // 100), |
|
2913 | ("95%", resultcount * 95 // 100), | |
2877 | ("99%", resultcount * 99 // 100), |
|
2914 | ("99%", resultcount * 99 // 100), | |
2878 | ("99.9%", resultcount * 999 // 1000), |
|
2915 | ("99.9%", resultcount * 999 // 1000), | |
2879 | ("99.99%", resultcount * 9999 // 10000), |
|
2916 | ("99.99%", resultcount * 9999 // 10000), | |
2880 | ("99.999%", resultcount * 99999 // 100000), |
|
2917 | ("99.999%", resultcount * 99999 // 100000), | |
2881 | ("max", -1), |
|
2918 | ("max", -1), | |
2882 | ] |
|
2919 | ] | |
2883 | if not ui.quiet: |
|
2920 | if not ui.quiet: | |
2884 | for name, idx in relevants: |
|
2921 | for name, idx in relevants: | |
2885 | data = results[idx] |
|
2922 | data = results[idx] | |
2886 | title = '%s of %d, rev %d' % (name, resultcount, data[0]) |
|
2923 | title = '%s of %d, rev %d' % (name, resultcount, data[0]) | |
2887 | formatone(fm, data[1], title=title, displayall=displayall) |
|
2924 | formatone(fm, data[1], title=title, displayall=displayall) | |
2888 |
|
2925 | |||
2889 | # XXX summing that many float will not be very precise, we ignore this fact |
|
2926 | # XXX summing that many float will not be very precise, we ignore this fact | |
2890 | # for now |
|
2927 | # for now | |
2891 | totaltime = [] |
|
2928 | totaltime = [] | |
2892 | for item in allresults: |
|
2929 | for item in allresults: | |
2893 | totaltime.append( |
|
2930 | totaltime.append( | |
2894 | ( |
|
2931 | ( | |
2895 | sum(x[1][0] for x in item), |
|
2932 | sum(x[1][0] for x in item), | |
2896 | sum(x[1][1] for x in item), |
|
2933 | sum(x[1][1] for x in item), | |
2897 | sum(x[1][2] for x in item), |
|
2934 | sum(x[1][2] for x in item), | |
2898 | ) |
|
2935 | ) | |
2899 | ) |
|
2936 | ) | |
2900 | formatone( |
|
2937 | formatone( | |
2901 | fm, |
|
2938 | fm, | |
2902 | totaltime, |
|
2939 | totaltime, | |
2903 | title="total time (%d revs)" % resultcount, |
|
2940 | title="total time (%d revs)" % resultcount, | |
2904 | displayall=displayall, |
|
2941 | displayall=displayall, | |
2905 | ) |
|
2942 | ) | |
2906 | fm.end() |
|
2943 | fm.end() | |
2907 |
|
2944 | |||
2908 |
|
2945 | |||
2909 | class _faketr(object): |
|
2946 | class _faketr(object): | |
2910 | def add(s, x, y, z=None): |
|
2947 | def add(s, x, y, z=None): | |
2911 | return None |
|
2948 | return None | |
2912 |
|
2949 | |||
2913 |
|
2950 | |||
2914 | def _timeonewrite( |
|
2951 | def _timeonewrite( | |
2915 | ui, |
|
2952 | ui, | |
2916 | orig, |
|
2953 | orig, | |
2917 | source, |
|
2954 | source, | |
2918 | startrev, |
|
2955 | startrev, | |
2919 | stoprev, |
|
2956 | stoprev, | |
2920 | runidx=None, |
|
2957 | runidx=None, | |
2921 | lazydeltabase=True, |
|
2958 | lazydeltabase=True, | |
2922 | clearcaches=True, |
|
2959 | clearcaches=True, | |
2923 | ): |
|
2960 | ): | |
2924 | timings = [] |
|
2961 | timings = [] | |
2925 | tr = _faketr() |
|
2962 | tr = _faketr() | |
2926 | with _temprevlog(ui, orig, startrev) as dest: |
|
2963 | with _temprevlog(ui, orig, startrev) as dest: | |
2927 | dest._lazydeltabase = lazydeltabase |
|
2964 | dest._lazydeltabase = lazydeltabase | |
2928 | revs = list(orig.revs(startrev, stoprev)) |
|
2965 | revs = list(orig.revs(startrev, stoprev)) | |
2929 | total = len(revs) |
|
2966 | total = len(revs) | |
2930 | topic = 'adding' |
|
2967 | topic = 'adding' | |
2931 | if runidx is not None: |
|
2968 | if runidx is not None: | |
2932 | topic += ' (run #%d)' % runidx |
|
2969 | topic += ' (run #%d)' % runidx | |
2933 | # Support both old and new progress API |
|
2970 | # Support both old and new progress API | |
2934 | if util.safehasattr(ui, 'makeprogress'): |
|
2971 | if util.safehasattr(ui, 'makeprogress'): | |
2935 | progress = ui.makeprogress(topic, unit='revs', total=total) |
|
2972 | progress = ui.makeprogress(topic, unit='revs', total=total) | |
2936 |
|
2973 | |||
2937 | def updateprogress(pos): |
|
2974 | def updateprogress(pos): | |
2938 | progress.update(pos) |
|
2975 | progress.update(pos) | |
2939 |
|
2976 | |||
2940 | def completeprogress(): |
|
2977 | def completeprogress(): | |
2941 | progress.complete() |
|
2978 | progress.complete() | |
2942 |
|
2979 | |||
2943 | else: |
|
2980 | else: | |
2944 |
|
2981 | |||
2945 | def updateprogress(pos): |
|
2982 | def updateprogress(pos): | |
2946 | ui.progress(topic, pos, unit='revs', total=total) |
|
2983 | ui.progress(topic, pos, unit='revs', total=total) | |
2947 |
|
2984 | |||
2948 | def completeprogress(): |
|
2985 | def completeprogress(): | |
2949 | ui.progress(topic, None, unit='revs', total=total) |
|
2986 | ui.progress(topic, None, unit='revs', total=total) | |
2950 |
|
2987 | |||
2951 | for idx, rev in enumerate(revs): |
|
2988 | for idx, rev in enumerate(revs): | |
2952 | updateprogress(idx) |
|
2989 | updateprogress(idx) | |
2953 | addargs, addkwargs = _getrevisionseed(orig, rev, tr, source) |
|
2990 | addargs, addkwargs = _getrevisionseed(orig, rev, tr, source) | |
2954 | if clearcaches: |
|
2991 | if clearcaches: | |
2955 | dest.index.clearcaches() |
|
2992 | dest.index.clearcaches() | |
2956 | dest.clearcaches() |
|
2993 | dest.clearcaches() | |
2957 | with timeone() as r: |
|
2994 | with timeone() as r: | |
2958 | dest.addrawrevision(*addargs, **addkwargs) |
|
2995 | dest.addrawrevision(*addargs, **addkwargs) | |
2959 | timings.append((rev, r[0])) |
|
2996 | timings.append((rev, r[0])) | |
2960 | updateprogress(total) |
|
2997 | updateprogress(total) | |
2961 | completeprogress() |
|
2998 | completeprogress() | |
2962 | return timings |
|
2999 | return timings | |
2963 |
|
3000 | |||
2964 |
|
3001 | |||
2965 | def _getrevisionseed(orig, rev, tr, source): |
|
3002 | def _getrevisionseed(orig, rev, tr, source): | |
2966 | from mercurial.node import nullid |
|
3003 | from mercurial.node import nullid | |
2967 |
|
3004 | |||
2968 | linkrev = orig.linkrev(rev) |
|
3005 | linkrev = orig.linkrev(rev) | |
2969 | node = orig.node(rev) |
|
3006 | node = orig.node(rev) | |
2970 | p1, p2 = orig.parents(node) |
|
3007 | p1, p2 = orig.parents(node) | |
2971 | flags = orig.flags(rev) |
|
3008 | flags = orig.flags(rev) | |
2972 | cachedelta = None |
|
3009 | cachedelta = None | |
2973 | text = None |
|
3010 | text = None | |
2974 |
|
3011 | |||
2975 | if source == b'full': |
|
3012 | if source == b'full': | |
2976 | text = orig.revision(rev) |
|
3013 | text = orig.revision(rev) | |
2977 | elif source == b'parent-1': |
|
3014 | elif source == b'parent-1': | |
2978 | baserev = orig.rev(p1) |
|
3015 | baserev = orig.rev(p1) | |
2979 | cachedelta = (baserev, orig.revdiff(p1, rev)) |
|
3016 | cachedelta = (baserev, orig.revdiff(p1, rev)) | |
2980 | elif source == b'parent-2': |
|
3017 | elif source == b'parent-2': | |
2981 | parent = p2 |
|
3018 | parent = p2 | |
2982 | if p2 == nullid: |
|
3019 | if p2 == nullid: | |
2983 | parent = p1 |
|
3020 | parent = p1 | |
2984 | baserev = orig.rev(parent) |
|
3021 | baserev = orig.rev(parent) | |
2985 | cachedelta = (baserev, orig.revdiff(parent, rev)) |
|
3022 | cachedelta = (baserev, orig.revdiff(parent, rev)) | |
2986 | elif source == b'parent-smallest': |
|
3023 | elif source == b'parent-smallest': | |
2987 | p1diff = orig.revdiff(p1, rev) |
|
3024 | p1diff = orig.revdiff(p1, rev) | |
2988 | parent = p1 |
|
3025 | parent = p1 | |
2989 | diff = p1diff |
|
3026 | diff = p1diff | |
2990 | if p2 != nullid: |
|
3027 | if p2 != nullid: | |
2991 | p2diff = orig.revdiff(p2, rev) |
|
3028 | p2diff = orig.revdiff(p2, rev) | |
2992 | if len(p1diff) > len(p2diff): |
|
3029 | if len(p1diff) > len(p2diff): | |
2993 | parent = p2 |
|
3030 | parent = p2 | |
2994 | diff = p2diff |
|
3031 | diff = p2diff | |
2995 | baserev = orig.rev(parent) |
|
3032 | baserev = orig.rev(parent) | |
2996 | cachedelta = (baserev, diff) |
|
3033 | cachedelta = (baserev, diff) | |
2997 | elif source == b'storage': |
|
3034 | elif source == b'storage': | |
2998 | baserev = orig.deltaparent(rev) |
|
3035 | baserev = orig.deltaparent(rev) | |
2999 | cachedelta = (baserev, orig.revdiff(orig.node(baserev), rev)) |
|
3036 | cachedelta = (baserev, orig.revdiff(orig.node(baserev), rev)) | |
3000 |
|
3037 | |||
3001 | return ( |
|
3038 | return ( | |
3002 | (text, tr, linkrev, p1, p2), |
|
3039 | (text, tr, linkrev, p1, p2), | |
3003 | {'node': node, 'flags': flags, 'cachedelta': cachedelta}, |
|
3040 | {'node': node, 'flags': flags, 'cachedelta': cachedelta}, | |
3004 | ) |
|
3041 | ) | |
3005 |
|
3042 | |||
3006 |
|
3043 | |||
3007 | @contextlib.contextmanager |
|
3044 | @contextlib.contextmanager | |
3008 | def _temprevlog(ui, orig, truncaterev): |
|
3045 | def _temprevlog(ui, orig, truncaterev): | |
3009 | from mercurial import vfs as vfsmod |
|
3046 | from mercurial import vfs as vfsmod | |
3010 |
|
3047 | |||
3011 | if orig._inline: |
|
3048 | if orig._inline: | |
3012 | raise error.Abort('not supporting inline revlog (yet)') |
|
3049 | raise error.Abort('not supporting inline revlog (yet)') | |
3013 | revlogkwargs = {} |
|
3050 | revlogkwargs = {} | |
3014 | k = 'upperboundcomp' |
|
3051 | k = 'upperboundcomp' | |
3015 | if util.safehasattr(orig, k): |
|
3052 | if util.safehasattr(orig, k): | |
3016 | revlogkwargs[k] = getattr(orig, k) |
|
3053 | revlogkwargs[k] = getattr(orig, k) | |
3017 |
|
3054 | |||
3018 | origindexpath = orig.opener.join(orig.indexfile) |
|
3055 | indexfile = getattr(orig, '_indexfile', None) | |
3019 | origdatapath = orig.opener.join(orig.datafile) |
|
3056 | if indexfile is None: | |
3020 | indexname = 'revlog.i' |
|
3057 | # compatibility with <= hg-5.8 | |
3021 | dataname = 'revlog.d' |
|
3058 | indexfile = getattr(orig, 'indexfile') | |
|
3059 | origindexpath = orig.opener.join(indexfile) | |||
|
3060 | ||||
|
3061 | datafile = getattr(orig, '_datafile', getattr(orig, 'datafile')) | |||
|
3062 | origdatapath = orig.opener.join(datafile) | |||
|
3063 | radix = b'revlog' | |||
|
3064 | indexname = b'revlog.i' | |||
|
3065 | dataname = b'revlog.d' | |||
3022 |
|
3066 | |||
3023 | tmpdir = tempfile.mkdtemp(prefix='tmp-hgperf-') |
|
3067 | tmpdir = tempfile.mkdtemp(prefix='tmp-hgperf-') | |
3024 | try: |
|
3068 | try: | |
3025 | # copy the data file in a temporary directory |
|
3069 | # copy the data file in a temporary directory | |
3026 | ui.debug('copying data in %s\n' % tmpdir) |
|
3070 | ui.debug('copying data in %s\n' % tmpdir) | |
3027 | destindexpath = os.path.join(tmpdir, 'revlog.i') |
|
3071 | destindexpath = os.path.join(tmpdir, 'revlog.i') | |
3028 | destdatapath = os.path.join(tmpdir, 'revlog.d') |
|
3072 | destdatapath = os.path.join(tmpdir, 'revlog.d') | |
3029 | shutil.copyfile(origindexpath, destindexpath) |
|
3073 | shutil.copyfile(origindexpath, destindexpath) | |
3030 | shutil.copyfile(origdatapath, destdatapath) |
|
3074 | shutil.copyfile(origdatapath, destdatapath) | |
3031 |
|
3075 | |||
3032 | # remove the data we want to add again |
|
3076 | # remove the data we want to add again | |
3033 | ui.debug('truncating data to be rewritten\n') |
|
3077 | ui.debug('truncating data to be rewritten\n') | |
3034 | with open(destindexpath, 'ab') as index: |
|
3078 | with open(destindexpath, 'ab') as index: | |
3035 | index.seek(0) |
|
3079 | index.seek(0) | |
3036 | index.truncate(truncaterev * orig._io.size) |
|
3080 | index.truncate(truncaterev * orig._io.size) | |
3037 | with open(destdatapath, 'ab') as data: |
|
3081 | with open(destdatapath, 'ab') as data: | |
3038 | data.seek(0) |
|
3082 | data.seek(0) | |
3039 | data.truncate(orig.start(truncaterev)) |
|
3083 | data.truncate(orig.start(truncaterev)) | |
3040 |
|
3084 | |||
3041 | # instantiate a new revlog from the temporary copy |
|
3085 | # instantiate a new revlog from the temporary copy | |
3042 | ui.debug('truncating adding to be rewritten\n') |
|
3086 | ui.debug('truncating adding to be rewritten\n') | |
3043 | vfs = vfsmod.vfs(tmpdir) |
|
3087 | vfs = vfsmod.vfs(tmpdir) | |
3044 | vfs.options = getattr(orig.opener, 'options', None) |
|
3088 | vfs.options = getattr(orig.opener, 'options', None) | |
3045 |
|
3089 | |||
3046 | dest = revlog.revlog( |
|
3090 | try: | |
3047 | vfs, indexfile=indexname, datafile=dataname, **revlogkwargs |
|
3091 | dest = revlog(vfs, radix=radix, **revlogkwargs) | |
3048 | ) |
|
3092 | except TypeError: | |
|
3093 | dest = revlog( | |||
|
3094 | vfs, indexfile=indexname, datafile=dataname, **revlogkwargs | |||
|
3095 | ) | |||
3049 | if dest._inline: |
|
3096 | if dest._inline: | |
3050 | raise error.Abort('not supporting inline revlog (yet)') |
|
3097 | raise error.Abort('not supporting inline revlog (yet)') | |
3051 | # make sure internals are initialized |
|
3098 | # make sure internals are initialized | |
3052 | dest.revision(len(dest) - 1) |
|
3099 | dest.revision(len(dest) - 1) | |
3053 | yield dest |
|
3100 | yield dest | |
3054 | del dest, vfs |
|
3101 | del dest, vfs | |
3055 | finally: |
|
3102 | finally: | |
3056 | shutil.rmtree(tmpdir, True) |
|
3103 | shutil.rmtree(tmpdir, True) | |
3057 |
|
3104 | |||
3058 |
|
3105 | |||
3059 | @command( |
|
3106 | @command( | |
3060 | b'perf::revlogchunks|perfrevlogchunks', |
|
3107 | b'perf::revlogchunks|perfrevlogchunks', | |
3061 | revlogopts |
|
3108 | revlogopts | |
3062 | + formatteropts |
|
3109 | + formatteropts | |
3063 | + [ |
|
3110 | + [ | |
3064 | (b'e', b'engines', b'', b'compression engines to use'), |
|
3111 | (b'e', b'engines', b'', b'compression engines to use'), | |
3065 | (b's', b'startrev', 0, b'revision to start at'), |
|
3112 | (b's', b'startrev', 0, b'revision to start at'), | |
3066 | ], |
|
3113 | ], | |
3067 | b'-c|-m|FILE', |
|
3114 | b'-c|-m|FILE', | |
3068 | ) |
|
3115 | ) | |
3069 | def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts): |
|
3116 | def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts): | |
3070 | """Benchmark operations on revlog chunks. |
|
3117 | """Benchmark operations on revlog chunks. | |
3071 |
|
3118 | |||
3072 | Logically, each revlog is a collection of fulltext revisions. However, |
|
3119 | Logically, each revlog is a collection of fulltext revisions. However, | |
3073 | stored within each revlog are "chunks" of possibly compressed data. This |
|
3120 | stored within each revlog are "chunks" of possibly compressed data. This | |
3074 | data needs to be read and decompressed or compressed and written. |
|
3121 | data needs to be read and decompressed or compressed and written. | |
3075 |
|
3122 | |||
3076 | This command measures the time it takes to read+decompress and recompress |
|
3123 | This command measures the time it takes to read+decompress and recompress | |
3077 | chunks in a revlog. It effectively isolates I/O and compression performance. |
|
3124 | chunks in a revlog. It effectively isolates I/O and compression performance. | |
3078 | For measurements of higher-level operations like resolving revisions, |
|
3125 | For measurements of higher-level operations like resolving revisions, | |
3079 | see ``perfrevlogrevisions`` and ``perfrevlogrevision``. |
|
3126 | see ``perfrevlogrevisions`` and ``perfrevlogrevision``. | |
3080 | """ |
|
3127 | """ | |
3081 | opts = _byteskwargs(opts) |
|
3128 | opts = _byteskwargs(opts) | |
3082 |
|
3129 | |||
3083 | rl = cmdutil.openrevlog(repo, b'perfrevlogchunks', file_, opts) |
|
3130 | rl = cmdutil.openrevlog(repo, b'perfrevlogchunks', file_, opts) | |
3084 |
|
3131 | |||
3085 | # _chunkraw was renamed to _getsegmentforrevs. |
|
3132 | # _chunkraw was renamed to _getsegmentforrevs. | |
3086 | try: |
|
3133 | try: | |
3087 | segmentforrevs = rl._getsegmentforrevs |
|
3134 | segmentforrevs = rl._getsegmentforrevs | |
3088 | except AttributeError: |
|
3135 | except AttributeError: | |
3089 | segmentforrevs = rl._chunkraw |
|
3136 | segmentforrevs = rl._chunkraw | |
3090 |
|
3137 | |||
3091 | # Verify engines argument. |
|
3138 | # Verify engines argument. | |
3092 | if engines: |
|
3139 | if engines: | |
3093 | engines = {e.strip() for e in engines.split(b',')} |
|
3140 | engines = {e.strip() for e in engines.split(b',')} | |
3094 | for engine in engines: |
|
3141 | for engine in engines: | |
3095 | try: |
|
3142 | try: | |
3096 | util.compressionengines[engine] |
|
3143 | util.compressionengines[engine] | |
3097 | except KeyError: |
|
3144 | except KeyError: | |
3098 | raise error.Abort(b'unknown compression engine: %s' % engine) |
|
3145 | raise error.Abort(b'unknown compression engine: %s' % engine) | |
3099 | else: |
|
3146 | else: | |
3100 | engines = [] |
|
3147 | engines = [] | |
3101 | for e in util.compengines: |
|
3148 | for e in util.compengines: | |
3102 | engine = util.compengines[e] |
|
3149 | engine = util.compengines[e] | |
3103 | try: |
|
3150 | try: | |
3104 | if engine.available(): |
|
3151 | if engine.available(): | |
3105 | engine.revlogcompressor().compress(b'dummy') |
|
3152 | engine.revlogcompressor().compress(b'dummy') | |
3106 | engines.append(e) |
|
3153 | engines.append(e) | |
3107 | except NotImplementedError: |
|
3154 | except NotImplementedError: | |
3108 | pass |
|
3155 | pass | |
3109 |
|
3156 | |||
3110 | revs = list(rl.revs(startrev, len(rl) - 1)) |
|
3157 | revs = list(rl.revs(startrev, len(rl) - 1)) | |
3111 |
|
3158 | |||
3112 | def rlfh(rl): |
|
3159 | def rlfh(rl): | |
3113 | if rl._inline: |
|
3160 | if rl._inline: | |
3114 | return getsvfs(repo)(rl.indexfile) |
|
3161 | indexfile = getattr(rl, '_indexfile', None) | |
|
3162 | if indexfile is None: | |||
|
3163 | # compatibility with <= hg-5.8 | |||
|
3164 | indexfile = getattr(rl, 'indexfile') | |||
|
3165 | return getsvfs(repo)(indexfile) | |||
3115 | else: |
|
3166 | else: | |
3116 | return getsvfs(repo)(rl.datafile) |
|
3167 | datafile = getattr(rl, 'datafile', getattr(rl, 'datafile')) | |
|
3168 | return getsvfs(repo)(datafile) | |||
3117 |
|
3169 | |||
3118 | def doread(): |
|
3170 | def doread(): | |
3119 | rl.clearcaches() |
|
3171 | rl.clearcaches() | |
3120 | for rev in revs: |
|
3172 | for rev in revs: | |
3121 | segmentforrevs(rev, rev) |
|
3173 | segmentforrevs(rev, rev) | |
3122 |
|
3174 | |||
3123 | def doreadcachedfh(): |
|
3175 | def doreadcachedfh(): | |
3124 | rl.clearcaches() |
|
3176 | rl.clearcaches() | |
3125 | fh = rlfh(rl) |
|
3177 | fh = rlfh(rl) | |
3126 | for rev in revs: |
|
3178 | for rev in revs: | |
3127 | segmentforrevs(rev, rev, df=fh) |
|
3179 | segmentforrevs(rev, rev, df=fh) | |
3128 |
|
3180 | |||
3129 | def doreadbatch(): |
|
3181 | def doreadbatch(): | |
3130 | rl.clearcaches() |
|
3182 | rl.clearcaches() | |
3131 | segmentforrevs(revs[0], revs[-1]) |
|
3183 | segmentforrevs(revs[0], revs[-1]) | |
3132 |
|
3184 | |||
3133 | def doreadbatchcachedfh(): |
|
3185 | def doreadbatchcachedfh(): | |
3134 | rl.clearcaches() |
|
3186 | rl.clearcaches() | |
3135 | fh = rlfh(rl) |
|
3187 | fh = rlfh(rl) | |
3136 | segmentforrevs(revs[0], revs[-1], df=fh) |
|
3188 | segmentforrevs(revs[0], revs[-1], df=fh) | |
3137 |
|
3189 | |||
3138 | def dochunk(): |
|
3190 | def dochunk(): | |
3139 | rl.clearcaches() |
|
3191 | rl.clearcaches() | |
3140 | fh = rlfh(rl) |
|
3192 | fh = rlfh(rl) | |
3141 | for rev in revs: |
|
3193 | for rev in revs: | |
3142 | rl._chunk(rev, df=fh) |
|
3194 | rl._chunk(rev, df=fh) | |
3143 |
|
3195 | |||
3144 | chunks = [None] |
|
3196 | chunks = [None] | |
3145 |
|
3197 | |||
3146 | def dochunkbatch(): |
|
3198 | def dochunkbatch(): | |
3147 | rl.clearcaches() |
|
3199 | rl.clearcaches() | |
3148 | fh = rlfh(rl) |
|
3200 | fh = rlfh(rl) | |
3149 | # Save chunks as a side-effect. |
|
3201 | # Save chunks as a side-effect. | |
3150 | chunks[0] = rl._chunks(revs, df=fh) |
|
3202 | chunks[0] = rl._chunks(revs, df=fh) | |
3151 |
|
3203 | |||
3152 | def docompress(compressor): |
|
3204 | def docompress(compressor): | |
3153 | rl.clearcaches() |
|
3205 | rl.clearcaches() | |
3154 |
|
3206 | |||
3155 | try: |
|
3207 | try: | |
3156 | # Swap in the requested compression engine. |
|
3208 | # Swap in the requested compression engine. | |
3157 | oldcompressor = rl._compressor |
|
3209 | oldcompressor = rl._compressor | |
3158 | rl._compressor = compressor |
|
3210 | rl._compressor = compressor | |
3159 | for chunk in chunks[0]: |
|
3211 | for chunk in chunks[0]: | |
3160 | rl.compress(chunk) |
|
3212 | rl.compress(chunk) | |
3161 | finally: |
|
3213 | finally: | |
3162 | rl._compressor = oldcompressor |
|
3214 | rl._compressor = oldcompressor | |
3163 |
|
3215 | |||
3164 | benches = [ |
|
3216 | benches = [ | |
3165 | (lambda: doread(), b'read'), |
|
3217 | (lambda: doread(), b'read'), | |
3166 | (lambda: doreadcachedfh(), b'read w/ reused fd'), |
|
3218 | (lambda: doreadcachedfh(), b'read w/ reused fd'), | |
3167 | (lambda: doreadbatch(), b'read batch'), |
|
3219 | (lambda: doreadbatch(), b'read batch'), | |
3168 | (lambda: doreadbatchcachedfh(), b'read batch w/ reused fd'), |
|
3220 | (lambda: doreadbatchcachedfh(), b'read batch w/ reused fd'), | |
3169 | (lambda: dochunk(), b'chunk'), |
|
3221 | (lambda: dochunk(), b'chunk'), | |
3170 | (lambda: dochunkbatch(), b'chunk batch'), |
|
3222 | (lambda: dochunkbatch(), b'chunk batch'), | |
3171 | ] |
|
3223 | ] | |
3172 |
|
3224 | |||
3173 | for engine in sorted(engines): |
|
3225 | for engine in sorted(engines): | |
3174 | compressor = util.compengines[engine].revlogcompressor() |
|
3226 | compressor = util.compengines[engine].revlogcompressor() | |
3175 | benches.append( |
|
3227 | benches.append( | |
3176 | ( |
|
3228 | ( | |
3177 | functools.partial(docompress, compressor), |
|
3229 | functools.partial(docompress, compressor), | |
3178 | b'compress w/ %s' % engine, |
|
3230 | b'compress w/ %s' % engine, | |
3179 | ) |
|
3231 | ) | |
3180 | ) |
|
3232 | ) | |
3181 |
|
3233 | |||
3182 | for fn, title in benches: |
|
3234 | for fn, title in benches: | |
3183 | timer, fm = gettimer(ui, opts) |
|
3235 | timer, fm = gettimer(ui, opts) | |
3184 | timer(fn, title=title) |
|
3236 | timer(fn, title=title) | |
3185 | fm.end() |
|
3237 | fm.end() | |
3186 |
|
3238 | |||
3187 |
|
3239 | |||
3188 | @command( |
|
3240 | @command( | |
3189 | b'perf::revlogrevision|perfrevlogrevision', |
|
3241 | b'perf::revlogrevision|perfrevlogrevision', | |
3190 | revlogopts |
|
3242 | revlogopts | |
3191 | + formatteropts |
|
3243 | + formatteropts | |
3192 | + [(b'', b'cache', False, b'use caches instead of clearing')], |
|
3244 | + [(b'', b'cache', False, b'use caches instead of clearing')], | |
3193 | b'-c|-m|FILE REV', |
|
3245 | b'-c|-m|FILE REV', | |
3194 | ) |
|
3246 | ) | |
3195 | def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts): |
|
3247 | def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts): | |
3196 | """Benchmark obtaining a revlog revision. |
|
3248 | """Benchmark obtaining a revlog revision. | |
3197 |
|
3249 | |||
3198 | Obtaining a revlog revision consists of roughly the following steps: |
|
3250 | Obtaining a revlog revision consists of roughly the following steps: | |
3199 |
|
3251 | |||
3200 | 1. Compute the delta chain |
|
3252 | 1. Compute the delta chain | |
3201 | 2. Slice the delta chain if applicable |
|
3253 | 2. Slice the delta chain if applicable | |
3202 | 3. Obtain the raw chunks for that delta chain |
|
3254 | 3. Obtain the raw chunks for that delta chain | |
3203 | 4. Decompress each raw chunk |
|
3255 | 4. Decompress each raw chunk | |
3204 | 5. Apply binary patches to obtain fulltext |
|
3256 | 5. Apply binary patches to obtain fulltext | |
3205 | 6. Verify hash of fulltext |
|
3257 | 6. Verify hash of fulltext | |
3206 |
|
3258 | |||
3207 | This command measures the time spent in each of these phases. |
|
3259 | This command measures the time spent in each of these phases. | |
3208 | """ |
|
3260 | """ | |
3209 | opts = _byteskwargs(opts) |
|
3261 | opts = _byteskwargs(opts) | |
3210 |
|
3262 | |||
3211 | if opts.get(b'changelog') or opts.get(b'manifest'): |
|
3263 | if opts.get(b'changelog') or opts.get(b'manifest'): | |
3212 | file_, rev = None, file_ |
|
3264 | file_, rev = None, file_ | |
3213 | elif rev is None: |
|
3265 | elif rev is None: | |
3214 | raise error.CommandError(b'perfrevlogrevision', b'invalid arguments') |
|
3266 | raise error.CommandError(b'perfrevlogrevision', b'invalid arguments') | |
3215 |
|
3267 | |||
3216 | r = cmdutil.openrevlog(repo, b'perfrevlogrevision', file_, opts) |
|
3268 | r = cmdutil.openrevlog(repo, b'perfrevlogrevision', file_, opts) | |
3217 |
|
3269 | |||
3218 | # _chunkraw was renamed to _getsegmentforrevs. |
|
3270 | # _chunkraw was renamed to _getsegmentforrevs. | |
3219 | try: |
|
3271 | try: | |
3220 | segmentforrevs = r._getsegmentforrevs |
|
3272 | segmentforrevs = r._getsegmentforrevs | |
3221 | except AttributeError: |
|
3273 | except AttributeError: | |
3222 | segmentforrevs = r._chunkraw |
|
3274 | segmentforrevs = r._chunkraw | |
3223 |
|
3275 | |||
3224 | node = r.lookup(rev) |
|
3276 | node = r.lookup(rev) | |
3225 | rev = r.rev(node) |
|
3277 | rev = r.rev(node) | |
3226 |
|
3278 | |||
3227 | def getrawchunks(data, chain): |
|
3279 | def getrawchunks(data, chain): | |
3228 | start = r.start |
|
3280 | start = r.start | |
3229 | length = r.length |
|
3281 | length = r.length | |
3230 | inline = r._inline |
|
3282 | inline = r._inline | |
3231 | try: |
|
3283 | try: | |
3232 | iosize = r.index.entry_size |
|
3284 | iosize = r.index.entry_size | |
3233 | except AttributeError: |
|
3285 | except AttributeError: | |
3234 | iosize = r._io.size |
|
3286 | iosize = r._io.size | |
3235 | buffer = util.buffer |
|
3287 | buffer = util.buffer | |
3236 |
|
3288 | |||
3237 | chunks = [] |
|
3289 | chunks = [] | |
3238 | ladd = chunks.append |
|
3290 | ladd = chunks.append | |
3239 | for idx, item in enumerate(chain): |
|
3291 | for idx, item in enumerate(chain): | |
3240 | offset = start(item[0]) |
|
3292 | offset = start(item[0]) | |
3241 | bits = data[idx] |
|
3293 | bits = data[idx] | |
3242 | for rev in item: |
|
3294 | for rev in item: | |
3243 | chunkstart = start(rev) |
|
3295 | chunkstart = start(rev) | |
3244 | if inline: |
|
3296 | if inline: | |
3245 | chunkstart += (rev + 1) * iosize |
|
3297 | chunkstart += (rev + 1) * iosize | |
3246 | chunklength = length(rev) |
|
3298 | chunklength = length(rev) | |
3247 | ladd(buffer(bits, chunkstart - offset, chunklength)) |
|
3299 | ladd(buffer(bits, chunkstart - offset, chunklength)) | |
3248 |
|
3300 | |||
3249 | return chunks |
|
3301 | return chunks | |
3250 |
|
3302 | |||
3251 | def dodeltachain(rev): |
|
3303 | def dodeltachain(rev): | |
3252 | if not cache: |
|
3304 | if not cache: | |
3253 | r.clearcaches() |
|
3305 | r.clearcaches() | |
3254 | r._deltachain(rev) |
|
3306 | r._deltachain(rev) | |
3255 |
|
3307 | |||
3256 | def doread(chain): |
|
3308 | def doread(chain): | |
3257 | if not cache: |
|
3309 | if not cache: | |
3258 | r.clearcaches() |
|
3310 | r.clearcaches() | |
3259 | for item in slicedchain: |
|
3311 | for item in slicedchain: | |
3260 | segmentforrevs(item[0], item[-1]) |
|
3312 | segmentforrevs(item[0], item[-1]) | |
3261 |
|
3313 | |||
3262 | def doslice(r, chain, size): |
|
3314 | def doslice(r, chain, size): | |
3263 | for s in slicechunk(r, chain, targetsize=size): |
|
3315 | for s in slicechunk(r, chain, targetsize=size): | |
3264 | pass |
|
3316 | pass | |
3265 |
|
3317 | |||
3266 | def dorawchunks(data, chain): |
|
3318 | def dorawchunks(data, chain): | |
3267 | if not cache: |
|
3319 | if not cache: | |
3268 | r.clearcaches() |
|
3320 | r.clearcaches() | |
3269 | getrawchunks(data, chain) |
|
3321 | getrawchunks(data, chain) | |
3270 |
|
3322 | |||
3271 | def dodecompress(chunks): |
|
3323 | def dodecompress(chunks): | |
3272 | decomp = r.decompress |
|
3324 | decomp = r.decompress | |
3273 | for chunk in chunks: |
|
3325 | for chunk in chunks: | |
3274 | decomp(chunk) |
|
3326 | decomp(chunk) | |
3275 |
|
3327 | |||
3276 | def dopatch(text, bins): |
|
3328 | def dopatch(text, bins): | |
3277 | if not cache: |
|
3329 | if not cache: | |
3278 | r.clearcaches() |
|
3330 | r.clearcaches() | |
3279 | mdiff.patches(text, bins) |
|
3331 | mdiff.patches(text, bins) | |
3280 |
|
3332 | |||
3281 | def dohash(text): |
|
3333 | def dohash(text): | |
3282 | if not cache: |
|
3334 | if not cache: | |
3283 | r.clearcaches() |
|
3335 | r.clearcaches() | |
3284 | r.checkhash(text, node, rev=rev) |
|
3336 | r.checkhash(text, node, rev=rev) | |
3285 |
|
3337 | |||
3286 | def dorevision(): |
|
3338 | def dorevision(): | |
3287 | if not cache: |
|
3339 | if not cache: | |
3288 | r.clearcaches() |
|
3340 | r.clearcaches() | |
3289 | r.revision(node) |
|
3341 | r.revision(node) | |
3290 |
|
3342 | |||
3291 | try: |
|
3343 | try: | |
3292 | from mercurial.revlogutils.deltas import slicechunk |
|
3344 | from mercurial.revlogutils.deltas import slicechunk | |
3293 | except ImportError: |
|
3345 | except ImportError: | |
3294 | slicechunk = getattr(revlog, '_slicechunk', None) |
|
3346 | slicechunk = getattr(revlog, '_slicechunk', None) | |
3295 |
|
3347 | |||
3296 | size = r.length(rev) |
|
3348 | size = r.length(rev) | |
3297 | chain = r._deltachain(rev)[0] |
|
3349 | chain = r._deltachain(rev)[0] | |
3298 | if not getattr(r, '_withsparseread', False): |
|
3350 | if not getattr(r, '_withsparseread', False): | |
3299 | slicedchain = (chain,) |
|
3351 | slicedchain = (chain,) | |
3300 | else: |
|
3352 | else: | |
3301 | slicedchain = tuple(slicechunk(r, chain, targetsize=size)) |
|
3353 | slicedchain = tuple(slicechunk(r, chain, targetsize=size)) | |
3302 | data = [segmentforrevs(seg[0], seg[-1])[1] for seg in slicedchain] |
|
3354 | data = [segmentforrevs(seg[0], seg[-1])[1] for seg in slicedchain] | |
3303 | rawchunks = getrawchunks(data, slicedchain) |
|
3355 | rawchunks = getrawchunks(data, slicedchain) | |
3304 | bins = r._chunks(chain) |
|
3356 | bins = r._chunks(chain) | |
3305 | text = bytes(bins[0]) |
|
3357 | text = bytes(bins[0]) | |
3306 | bins = bins[1:] |
|
3358 | bins = bins[1:] | |
3307 | text = mdiff.patches(text, bins) |
|
3359 | text = mdiff.patches(text, bins) | |
3308 |
|
3360 | |||
3309 | benches = [ |
|
3361 | benches = [ | |
3310 | (lambda: dorevision(), b'full'), |
|
3362 | (lambda: dorevision(), b'full'), | |
3311 | (lambda: dodeltachain(rev), b'deltachain'), |
|
3363 | (lambda: dodeltachain(rev), b'deltachain'), | |
3312 | (lambda: doread(chain), b'read'), |
|
3364 | (lambda: doread(chain), b'read'), | |
3313 | ] |
|
3365 | ] | |
3314 |
|
3366 | |||
3315 | if getattr(r, '_withsparseread', False): |
|
3367 | if getattr(r, '_withsparseread', False): | |
3316 | slicing = (lambda: doslice(r, chain, size), b'slice-sparse-chain') |
|
3368 | slicing = (lambda: doslice(r, chain, size), b'slice-sparse-chain') | |
3317 | benches.append(slicing) |
|
3369 | benches.append(slicing) | |
3318 |
|
3370 | |||
3319 | benches.extend( |
|
3371 | benches.extend( | |
3320 | [ |
|
3372 | [ | |
3321 | (lambda: dorawchunks(data, slicedchain), b'rawchunks'), |
|
3373 | (lambda: dorawchunks(data, slicedchain), b'rawchunks'), | |
3322 | (lambda: dodecompress(rawchunks), b'decompress'), |
|
3374 | (lambda: dodecompress(rawchunks), b'decompress'), | |
3323 | (lambda: dopatch(text, bins), b'patch'), |
|
3375 | (lambda: dopatch(text, bins), b'patch'), | |
3324 | (lambda: dohash(text), b'hash'), |
|
3376 | (lambda: dohash(text), b'hash'), | |
3325 | ] |
|
3377 | ] | |
3326 | ) |
|
3378 | ) | |
3327 |
|
3379 | |||
3328 | timer, fm = gettimer(ui, opts) |
|
3380 | timer, fm = gettimer(ui, opts) | |
3329 | for fn, title in benches: |
|
3381 | for fn, title in benches: | |
3330 | timer(fn, title=title) |
|
3382 | timer(fn, title=title) | |
3331 | fm.end() |
|
3383 | fm.end() | |
3332 |
|
3384 | |||
3333 |
|
3385 | |||
3334 | @command( |
|
3386 | @command( | |
3335 | b'perf::revset|perfrevset', |
|
3387 | b'perf::revset|perfrevset', | |
3336 | [ |
|
3388 | [ | |
3337 | (b'C', b'clear', False, b'clear volatile cache between each call.'), |
|
3389 | (b'C', b'clear', False, b'clear volatile cache between each call.'), | |
3338 | (b'', b'contexts', False, b'obtain changectx for each revision'), |
|
3390 | (b'', b'contexts', False, b'obtain changectx for each revision'), | |
3339 | ] |
|
3391 | ] | |
3340 | + formatteropts, |
|
3392 | + formatteropts, | |
3341 | b"REVSET", |
|
3393 | b"REVSET", | |
3342 | ) |
|
3394 | ) | |
3343 | def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts): |
|
3395 | def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts): | |
3344 | """benchmark the execution time of a revset |
|
3396 | """benchmark the execution time of a revset | |
3345 |
|
3397 | |||
3346 | Use the --clean option if need to evaluate the impact of build volatile |
|
3398 | Use the --clean option if need to evaluate the impact of build volatile | |
3347 | revisions set cache on the revset execution. Volatile cache hold filtered |
|
3399 | revisions set cache on the revset execution. Volatile cache hold filtered | |
3348 | and obsolete related cache.""" |
|
3400 | and obsolete related cache.""" | |
3349 | opts = _byteskwargs(opts) |
|
3401 | opts = _byteskwargs(opts) | |
3350 |
|
3402 | |||
3351 | timer, fm = gettimer(ui, opts) |
|
3403 | timer, fm = gettimer(ui, opts) | |
3352 |
|
3404 | |||
3353 | def d(): |
|
3405 | def d(): | |
3354 | if clear: |
|
3406 | if clear: | |
3355 | repo.invalidatevolatilesets() |
|
3407 | repo.invalidatevolatilesets() | |
3356 | if contexts: |
|
3408 | if contexts: | |
3357 | for ctx in repo.set(expr): |
|
3409 | for ctx in repo.set(expr): | |
3358 | pass |
|
3410 | pass | |
3359 | else: |
|
3411 | else: | |
3360 | for r in repo.revs(expr): |
|
3412 | for r in repo.revs(expr): | |
3361 | pass |
|
3413 | pass | |
3362 |
|
3414 | |||
3363 | timer(d) |
|
3415 | timer(d) | |
3364 | fm.end() |
|
3416 | fm.end() | |
3365 |
|
3417 | |||
3366 |
|
3418 | |||
3367 | @command( |
|
3419 | @command( | |
3368 | b'perf::volatilesets|perfvolatilesets', |
|
3420 | b'perf::volatilesets|perfvolatilesets', | |
3369 | [ |
|
3421 | [ | |
3370 | (b'', b'clear-obsstore', False, b'drop obsstore between each call.'), |
|
3422 | (b'', b'clear-obsstore', False, b'drop obsstore between each call.'), | |
3371 | ] |
|
3423 | ] | |
3372 | + formatteropts, |
|
3424 | + formatteropts, | |
3373 | ) |
|
3425 | ) | |
3374 | def perfvolatilesets(ui, repo, *names, **opts): |
|
3426 | def perfvolatilesets(ui, repo, *names, **opts): | |
3375 | """benchmark the computation of various volatile set |
|
3427 | """benchmark the computation of various volatile set | |
3376 |
|
3428 | |||
3377 | Volatile set computes element related to filtering and obsolescence.""" |
|
3429 | Volatile set computes element related to filtering and obsolescence.""" | |
3378 | opts = _byteskwargs(opts) |
|
3430 | opts = _byteskwargs(opts) | |
3379 | timer, fm = gettimer(ui, opts) |
|
3431 | timer, fm = gettimer(ui, opts) | |
3380 | repo = repo.unfiltered() |
|
3432 | repo = repo.unfiltered() | |
3381 |
|
3433 | |||
3382 | def getobs(name): |
|
3434 | def getobs(name): | |
3383 | def d(): |
|
3435 | def d(): | |
3384 | repo.invalidatevolatilesets() |
|
3436 | repo.invalidatevolatilesets() | |
3385 | if opts[b'clear_obsstore']: |
|
3437 | if opts[b'clear_obsstore']: | |
3386 | clearfilecache(repo, b'obsstore') |
|
3438 | clearfilecache(repo, b'obsstore') | |
3387 | obsolete.getrevs(repo, name) |
|
3439 | obsolete.getrevs(repo, name) | |
3388 |
|
3440 | |||
3389 | return d |
|
3441 | return d | |
3390 |
|
3442 | |||
3391 | allobs = sorted(obsolete.cachefuncs) |
|
3443 | allobs = sorted(obsolete.cachefuncs) | |
3392 | if names: |
|
3444 | if names: | |
3393 | allobs = [n for n in allobs if n in names] |
|
3445 | allobs = [n for n in allobs if n in names] | |
3394 |
|
3446 | |||
3395 | for name in allobs: |
|
3447 | for name in allobs: | |
3396 | timer(getobs(name), title=name) |
|
3448 | timer(getobs(name), title=name) | |
3397 |
|
3449 | |||
3398 | def getfiltered(name): |
|
3450 | def getfiltered(name): | |
3399 | def d(): |
|
3451 | def d(): | |
3400 | repo.invalidatevolatilesets() |
|
3452 | repo.invalidatevolatilesets() | |
3401 | if opts[b'clear_obsstore']: |
|
3453 | if opts[b'clear_obsstore']: | |
3402 | clearfilecache(repo, b'obsstore') |
|
3454 | clearfilecache(repo, b'obsstore') | |
3403 | repoview.filterrevs(repo, name) |
|
3455 | repoview.filterrevs(repo, name) | |
3404 |
|
3456 | |||
3405 | return d |
|
3457 | return d | |
3406 |
|
3458 | |||
3407 | allfilter = sorted(repoview.filtertable) |
|
3459 | allfilter = sorted(repoview.filtertable) | |
3408 | if names: |
|
3460 | if names: | |
3409 | allfilter = [n for n in allfilter if n in names] |
|
3461 | allfilter = [n for n in allfilter if n in names] | |
3410 |
|
3462 | |||
3411 | for name in allfilter: |
|
3463 | for name in allfilter: | |
3412 | timer(getfiltered(name), title=name) |
|
3464 | timer(getfiltered(name), title=name) | |
3413 | fm.end() |
|
3465 | fm.end() | |
3414 |
|
3466 | |||
3415 |
|
3467 | |||
3416 | @command( |
|
3468 | @command( | |
3417 | b'perf::branchmap|perfbranchmap', |
|
3469 | b'perf::branchmap|perfbranchmap', | |
3418 | [ |
|
3470 | [ | |
3419 | (b'f', b'full', False, b'Includes build time of subset'), |
|
3471 | (b'f', b'full', False, b'Includes build time of subset'), | |
3420 | ( |
|
3472 | ( | |
3421 | b'', |
|
3473 | b'', | |
3422 | b'clear-revbranch', |
|
3474 | b'clear-revbranch', | |
3423 | False, |
|
3475 | False, | |
3424 | b'purge the revbranch cache between computation', |
|
3476 | b'purge the revbranch cache between computation', | |
3425 | ), |
|
3477 | ), | |
3426 | ] |
|
3478 | ] | |
3427 | + formatteropts, |
|
3479 | + formatteropts, | |
3428 | ) |
|
3480 | ) | |
3429 | def perfbranchmap(ui, repo, *filternames, **opts): |
|
3481 | def perfbranchmap(ui, repo, *filternames, **opts): | |
3430 | """benchmark the update of a branchmap |
|
3482 | """benchmark the update of a branchmap | |
3431 |
|
3483 | |||
3432 | This benchmarks the full repo.branchmap() call with read and write disabled |
|
3484 | This benchmarks the full repo.branchmap() call with read and write disabled | |
3433 | """ |
|
3485 | """ | |
3434 | opts = _byteskwargs(opts) |
|
3486 | opts = _byteskwargs(opts) | |
3435 | full = opts.get(b"full", False) |
|
3487 | full = opts.get(b"full", False) | |
3436 | clear_revbranch = opts.get(b"clear_revbranch", False) |
|
3488 | clear_revbranch = opts.get(b"clear_revbranch", False) | |
3437 | timer, fm = gettimer(ui, opts) |
|
3489 | timer, fm = gettimer(ui, opts) | |
3438 |
|
3490 | |||
3439 | def getbranchmap(filtername): |
|
3491 | def getbranchmap(filtername): | |
3440 | """generate a benchmark function for the filtername""" |
|
3492 | """generate a benchmark function for the filtername""" | |
3441 | if filtername is None: |
|
3493 | if filtername is None: | |
3442 | view = repo |
|
3494 | view = repo | |
3443 | else: |
|
3495 | else: | |
3444 | view = repo.filtered(filtername) |
|
3496 | view = repo.filtered(filtername) | |
3445 | if util.safehasattr(view._branchcaches, '_per_filter'): |
|
3497 | if util.safehasattr(view._branchcaches, '_per_filter'): | |
3446 | filtered = view._branchcaches._per_filter |
|
3498 | filtered = view._branchcaches._per_filter | |
3447 | else: |
|
3499 | else: | |
3448 | # older versions |
|
3500 | # older versions | |
3449 | filtered = view._branchcaches |
|
3501 | filtered = view._branchcaches | |
3450 |
|
3502 | |||
3451 | def d(): |
|
3503 | def d(): | |
3452 | if clear_revbranch: |
|
3504 | if clear_revbranch: | |
3453 | repo.revbranchcache()._clear() |
|
3505 | repo.revbranchcache()._clear() | |
3454 | if full: |
|
3506 | if full: | |
3455 | view._branchcaches.clear() |
|
3507 | view._branchcaches.clear() | |
3456 | else: |
|
3508 | else: | |
3457 | filtered.pop(filtername, None) |
|
3509 | filtered.pop(filtername, None) | |
3458 | view.branchmap() |
|
3510 | view.branchmap() | |
3459 |
|
3511 | |||
3460 | return d |
|
3512 | return d | |
3461 |
|
3513 | |||
3462 | # add filter in smaller subset to bigger subset |
|
3514 | # add filter in smaller subset to bigger subset | |
3463 | possiblefilters = set(repoview.filtertable) |
|
3515 | possiblefilters = set(repoview.filtertable) | |
3464 | if filternames: |
|
3516 | if filternames: | |
3465 | possiblefilters &= set(filternames) |
|
3517 | possiblefilters &= set(filternames) | |
3466 | subsettable = getbranchmapsubsettable() |
|
3518 | subsettable = getbranchmapsubsettable() | |
3467 | allfilters = [] |
|
3519 | allfilters = [] | |
3468 | while possiblefilters: |
|
3520 | while possiblefilters: | |
3469 | for name in possiblefilters: |
|
3521 | for name in possiblefilters: | |
3470 | subset = subsettable.get(name) |
|
3522 | subset = subsettable.get(name) | |
3471 | if subset not in possiblefilters: |
|
3523 | if subset not in possiblefilters: | |
3472 | break |
|
3524 | break | |
3473 | else: |
|
3525 | else: | |
3474 | assert False, b'subset cycle %s!' % possiblefilters |
|
3526 | assert False, b'subset cycle %s!' % possiblefilters | |
3475 | allfilters.append(name) |
|
3527 | allfilters.append(name) | |
3476 | possiblefilters.remove(name) |
|
3528 | possiblefilters.remove(name) | |
3477 |
|
3529 | |||
3478 | # warm the cache |
|
3530 | # warm the cache | |
3479 | if not full: |
|
3531 | if not full: | |
3480 | for name in allfilters: |
|
3532 | for name in allfilters: | |
3481 | repo.filtered(name).branchmap() |
|
3533 | repo.filtered(name).branchmap() | |
3482 | if not filternames or b'unfiltered' in filternames: |
|
3534 | if not filternames or b'unfiltered' in filternames: | |
3483 | # add unfiltered |
|
3535 | # add unfiltered | |
3484 | allfilters.append(None) |
|
3536 | allfilters.append(None) | |
3485 |
|
3537 | |||
3486 | if util.safehasattr(branchmap.branchcache, 'fromfile'): |
|
3538 | if util.safehasattr(branchmap.branchcache, 'fromfile'): | |
3487 | branchcacheread = safeattrsetter(branchmap.branchcache, b'fromfile') |
|
3539 | branchcacheread = safeattrsetter(branchmap.branchcache, b'fromfile') | |
3488 | branchcacheread.set(classmethod(lambda *args: None)) |
|
3540 | branchcacheread.set(classmethod(lambda *args: None)) | |
3489 | else: |
|
3541 | else: | |
3490 | # older versions |
|
3542 | # older versions | |
3491 | branchcacheread = safeattrsetter(branchmap, b'read') |
|
3543 | branchcacheread = safeattrsetter(branchmap, b'read') | |
3492 | branchcacheread.set(lambda *args: None) |
|
3544 | branchcacheread.set(lambda *args: None) | |
3493 | branchcachewrite = safeattrsetter(branchmap.branchcache, b'write') |
|
3545 | branchcachewrite = safeattrsetter(branchmap.branchcache, b'write') | |
3494 | branchcachewrite.set(lambda *args: None) |
|
3546 | branchcachewrite.set(lambda *args: None) | |
3495 | try: |
|
3547 | try: | |
3496 | for name in allfilters: |
|
3548 | for name in allfilters: | |
3497 | printname = name |
|
3549 | printname = name | |
3498 | if name is None: |
|
3550 | if name is None: | |
3499 | printname = b'unfiltered' |
|
3551 | printname = b'unfiltered' | |
3500 | timer(getbranchmap(name), title=printname) |
|
3552 | timer(getbranchmap(name), title=printname) | |
3501 | finally: |
|
3553 | finally: | |
3502 | branchcacheread.restore() |
|
3554 | branchcacheread.restore() | |
3503 | branchcachewrite.restore() |
|
3555 | branchcachewrite.restore() | |
3504 | fm.end() |
|
3556 | fm.end() | |
3505 |
|
3557 | |||
3506 |
|
3558 | |||
3507 | @command( |
|
3559 | @command( | |
3508 | b'perf::branchmapupdate|perfbranchmapupdate', |
|
3560 | b'perf::branchmapupdate|perfbranchmapupdate', | |
3509 | [ |
|
3561 | [ | |
3510 | (b'', b'base', [], b'subset of revision to start from'), |
|
3562 | (b'', b'base', [], b'subset of revision to start from'), | |
3511 | (b'', b'target', [], b'subset of revision to end with'), |
|
3563 | (b'', b'target', [], b'subset of revision to end with'), | |
3512 | (b'', b'clear-caches', False, b'clear cache between each runs'), |
|
3564 | (b'', b'clear-caches', False, b'clear cache between each runs'), | |
3513 | ] |
|
3565 | ] | |
3514 | + formatteropts, |
|
3566 | + formatteropts, | |
3515 | ) |
|
3567 | ) | |
3516 | def perfbranchmapupdate(ui, repo, base=(), target=(), **opts): |
|
3568 | def perfbranchmapupdate(ui, repo, base=(), target=(), **opts): | |
3517 | """benchmark branchmap update from for <base> revs to <target> revs |
|
3569 | """benchmark branchmap update from for <base> revs to <target> revs | |
3518 |
|
3570 | |||
3519 | If `--clear-caches` is passed, the following items will be reset before |
|
3571 | If `--clear-caches` is passed, the following items will be reset before | |
3520 | each update: |
|
3572 | each update: | |
3521 | * the changelog instance and associated indexes |
|
3573 | * the changelog instance and associated indexes | |
3522 | * the rev-branch-cache instance |
|
3574 | * the rev-branch-cache instance | |
3523 |
|
3575 | |||
3524 | Examples: |
|
3576 | Examples: | |
3525 |
|
3577 | |||
3526 | # update for the one last revision |
|
3578 | # update for the one last revision | |
3527 | $ hg perfbranchmapupdate --base 'not tip' --target 'tip' |
|
3579 | $ hg perfbranchmapupdate --base 'not tip' --target 'tip' | |
3528 |
|
3580 | |||
3529 | $ update for change coming with a new branch |
|
3581 | $ update for change coming with a new branch | |
3530 | $ hg perfbranchmapupdate --base 'stable' --target 'default' |
|
3582 | $ hg perfbranchmapupdate --base 'stable' --target 'default' | |
3531 | """ |
|
3583 | """ | |
3532 | from mercurial import branchmap |
|
3584 | from mercurial import branchmap | |
3533 | from mercurial import repoview |
|
3585 | from mercurial import repoview | |
3534 |
|
3586 | |||
3535 | opts = _byteskwargs(opts) |
|
3587 | opts = _byteskwargs(opts) | |
3536 | timer, fm = gettimer(ui, opts) |
|
3588 | timer, fm = gettimer(ui, opts) | |
3537 | clearcaches = opts[b'clear_caches'] |
|
3589 | clearcaches = opts[b'clear_caches'] | |
3538 | unfi = repo.unfiltered() |
|
3590 | unfi = repo.unfiltered() | |
3539 | x = [None] # used to pass data between closure |
|
3591 | x = [None] # used to pass data between closure | |
3540 |
|
3592 | |||
3541 | # we use a `list` here to avoid possible side effect from smartset |
|
3593 | # we use a `list` here to avoid possible side effect from smartset | |
3542 | baserevs = list(scmutil.revrange(repo, base)) |
|
3594 | baserevs = list(scmutil.revrange(repo, base)) | |
3543 | targetrevs = list(scmutil.revrange(repo, target)) |
|
3595 | targetrevs = list(scmutil.revrange(repo, target)) | |
3544 | if not baserevs: |
|
3596 | if not baserevs: | |
3545 | raise error.Abort(b'no revisions selected for --base') |
|
3597 | raise error.Abort(b'no revisions selected for --base') | |
3546 | if not targetrevs: |
|
3598 | if not targetrevs: | |
3547 | raise error.Abort(b'no revisions selected for --target') |
|
3599 | raise error.Abort(b'no revisions selected for --target') | |
3548 |
|
3600 | |||
3549 | # make sure the target branchmap also contains the one in the base |
|
3601 | # make sure the target branchmap also contains the one in the base | |
3550 | targetrevs = list(set(baserevs) | set(targetrevs)) |
|
3602 | targetrevs = list(set(baserevs) | set(targetrevs)) | |
3551 | targetrevs.sort() |
|
3603 | targetrevs.sort() | |
3552 |
|
3604 | |||
3553 | cl = repo.changelog |
|
3605 | cl = repo.changelog | |
3554 | allbaserevs = list(cl.ancestors(baserevs, inclusive=True)) |
|
3606 | allbaserevs = list(cl.ancestors(baserevs, inclusive=True)) | |
3555 | allbaserevs.sort() |
|
3607 | allbaserevs.sort() | |
3556 | alltargetrevs = frozenset(cl.ancestors(targetrevs, inclusive=True)) |
|
3608 | alltargetrevs = frozenset(cl.ancestors(targetrevs, inclusive=True)) | |
3557 |
|
3609 | |||
3558 | newrevs = list(alltargetrevs.difference(allbaserevs)) |
|
3610 | newrevs = list(alltargetrevs.difference(allbaserevs)) | |
3559 | newrevs.sort() |
|
3611 | newrevs.sort() | |
3560 |
|
3612 | |||
3561 | allrevs = frozenset(unfi.changelog.revs()) |
|
3613 | allrevs = frozenset(unfi.changelog.revs()) | |
3562 | basefilterrevs = frozenset(allrevs.difference(allbaserevs)) |
|
3614 | basefilterrevs = frozenset(allrevs.difference(allbaserevs)) | |
3563 | targetfilterrevs = frozenset(allrevs.difference(alltargetrevs)) |
|
3615 | targetfilterrevs = frozenset(allrevs.difference(alltargetrevs)) | |
3564 |
|
3616 | |||
3565 | def basefilter(repo, visibilityexceptions=None): |
|
3617 | def basefilter(repo, visibilityexceptions=None): | |
3566 | return basefilterrevs |
|
3618 | return basefilterrevs | |
3567 |
|
3619 | |||
3568 | def targetfilter(repo, visibilityexceptions=None): |
|
3620 | def targetfilter(repo, visibilityexceptions=None): | |
3569 | return targetfilterrevs |
|
3621 | return targetfilterrevs | |
3570 |
|
3622 | |||
3571 | msg = b'benchmark of branchmap with %d revisions with %d new ones\n' |
|
3623 | msg = b'benchmark of branchmap with %d revisions with %d new ones\n' | |
3572 | ui.status(msg % (len(allbaserevs), len(newrevs))) |
|
3624 | ui.status(msg % (len(allbaserevs), len(newrevs))) | |
3573 | if targetfilterrevs: |
|
3625 | if targetfilterrevs: | |
3574 | msg = b'(%d revisions still filtered)\n' |
|
3626 | msg = b'(%d revisions still filtered)\n' | |
3575 | ui.status(msg % len(targetfilterrevs)) |
|
3627 | ui.status(msg % len(targetfilterrevs)) | |
3576 |
|
3628 | |||
3577 | try: |
|
3629 | try: | |
3578 | repoview.filtertable[b'__perf_branchmap_update_base'] = basefilter |
|
3630 | repoview.filtertable[b'__perf_branchmap_update_base'] = basefilter | |
3579 | repoview.filtertable[b'__perf_branchmap_update_target'] = targetfilter |
|
3631 | repoview.filtertable[b'__perf_branchmap_update_target'] = targetfilter | |
3580 |
|
3632 | |||
3581 | baserepo = repo.filtered(b'__perf_branchmap_update_base') |
|
3633 | baserepo = repo.filtered(b'__perf_branchmap_update_base') | |
3582 | targetrepo = repo.filtered(b'__perf_branchmap_update_target') |
|
3634 | targetrepo = repo.filtered(b'__perf_branchmap_update_target') | |
3583 |
|
3635 | |||
3584 | # try to find an existing branchmap to reuse |
|
3636 | # try to find an existing branchmap to reuse | |
3585 | subsettable = getbranchmapsubsettable() |
|
3637 | subsettable = getbranchmapsubsettable() | |
3586 | candidatefilter = subsettable.get(None) |
|
3638 | candidatefilter = subsettable.get(None) | |
3587 | while candidatefilter is not None: |
|
3639 | while candidatefilter is not None: | |
3588 | candidatebm = repo.filtered(candidatefilter).branchmap() |
|
3640 | candidatebm = repo.filtered(candidatefilter).branchmap() | |
3589 | if candidatebm.validfor(baserepo): |
|
3641 | if candidatebm.validfor(baserepo): | |
3590 | filtered = repoview.filterrevs(repo, candidatefilter) |
|
3642 | filtered = repoview.filterrevs(repo, candidatefilter) | |
3591 | missing = [r for r in allbaserevs if r in filtered] |
|
3643 | missing = [r for r in allbaserevs if r in filtered] | |
3592 | base = candidatebm.copy() |
|
3644 | base = candidatebm.copy() | |
3593 | base.update(baserepo, missing) |
|
3645 | base.update(baserepo, missing) | |
3594 | break |
|
3646 | break | |
3595 | candidatefilter = subsettable.get(candidatefilter) |
|
3647 | candidatefilter = subsettable.get(candidatefilter) | |
3596 | else: |
|
3648 | else: | |
3597 | # no suitable subset where found |
|
3649 | # no suitable subset where found | |
3598 | base = branchmap.branchcache() |
|
3650 | base = branchmap.branchcache() | |
3599 | base.update(baserepo, allbaserevs) |
|
3651 | base.update(baserepo, allbaserevs) | |
3600 |
|
3652 | |||
3601 | def setup(): |
|
3653 | def setup(): | |
3602 | x[0] = base.copy() |
|
3654 | x[0] = base.copy() | |
3603 | if clearcaches: |
|
3655 | if clearcaches: | |
3604 | unfi._revbranchcache = None |
|
3656 | unfi._revbranchcache = None | |
3605 | clearchangelog(repo) |
|
3657 | clearchangelog(repo) | |
3606 |
|
3658 | |||
3607 | def bench(): |
|
3659 | def bench(): | |
3608 | x[0].update(targetrepo, newrevs) |
|
3660 | x[0].update(targetrepo, newrevs) | |
3609 |
|
3661 | |||
3610 | timer(bench, setup=setup) |
|
3662 | timer(bench, setup=setup) | |
3611 | fm.end() |
|
3663 | fm.end() | |
3612 | finally: |
|
3664 | finally: | |
3613 | repoview.filtertable.pop(b'__perf_branchmap_update_base', None) |
|
3665 | repoview.filtertable.pop(b'__perf_branchmap_update_base', None) | |
3614 | repoview.filtertable.pop(b'__perf_branchmap_update_target', None) |
|
3666 | repoview.filtertable.pop(b'__perf_branchmap_update_target', None) | |
3615 |
|
3667 | |||
3616 |
|
3668 | |||
3617 | @command( |
|
3669 | @command( | |
3618 | b'perf::branchmapload|perfbranchmapload', |
|
3670 | b'perf::branchmapload|perfbranchmapload', | |
3619 | [ |
|
3671 | [ | |
3620 | (b'f', b'filter', b'', b'Specify repoview filter'), |
|
3672 | (b'f', b'filter', b'', b'Specify repoview filter'), | |
3621 | (b'', b'list', False, b'List brachmap filter caches'), |
|
3673 | (b'', b'list', False, b'List brachmap filter caches'), | |
3622 | (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), |
|
3674 | (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), | |
3623 | ] |
|
3675 | ] | |
3624 | + formatteropts, |
|
3676 | + formatteropts, | |
3625 | ) |
|
3677 | ) | |
3626 | def perfbranchmapload(ui, repo, filter=b'', list=False, **opts): |
|
3678 | def perfbranchmapload(ui, repo, filter=b'', list=False, **opts): | |
3627 | """benchmark reading the branchmap""" |
|
3679 | """benchmark reading the branchmap""" | |
3628 | opts = _byteskwargs(opts) |
|
3680 | opts = _byteskwargs(opts) | |
3629 | clearrevlogs = opts[b'clear_revlogs'] |
|
3681 | clearrevlogs = opts[b'clear_revlogs'] | |
3630 |
|
3682 | |||
3631 | if list: |
|
3683 | if list: | |
3632 | for name, kind, st in repo.cachevfs.readdir(stat=True): |
|
3684 | for name, kind, st in repo.cachevfs.readdir(stat=True): | |
3633 | if name.startswith(b'branch2'): |
|
3685 | if name.startswith(b'branch2'): | |
3634 | filtername = name.partition(b'-')[2] or b'unfiltered' |
|
3686 | filtername = name.partition(b'-')[2] or b'unfiltered' | |
3635 | ui.status( |
|
3687 | ui.status( | |
3636 | b'%s - %s\n' % (filtername, util.bytecount(st.st_size)) |
|
3688 | b'%s - %s\n' % (filtername, util.bytecount(st.st_size)) | |
3637 | ) |
|
3689 | ) | |
3638 | return |
|
3690 | return | |
3639 | if not filter: |
|
3691 | if not filter: | |
3640 | filter = None |
|
3692 | filter = None | |
3641 | subsettable = getbranchmapsubsettable() |
|
3693 | subsettable = getbranchmapsubsettable() | |
3642 | if filter is None: |
|
3694 | if filter is None: | |
3643 | repo = repo.unfiltered() |
|
3695 | repo = repo.unfiltered() | |
3644 | else: |
|
3696 | else: | |
3645 | repo = repoview.repoview(repo, filter) |
|
3697 | repo = repoview.repoview(repo, filter) | |
3646 |
|
3698 | |||
3647 | repo.branchmap() # make sure we have a relevant, up to date branchmap |
|
3699 | repo.branchmap() # make sure we have a relevant, up to date branchmap | |
3648 |
|
3700 | |||
3649 | try: |
|
3701 | try: | |
3650 | fromfile = branchmap.branchcache.fromfile |
|
3702 | fromfile = branchmap.branchcache.fromfile | |
3651 | except AttributeError: |
|
3703 | except AttributeError: | |
3652 | # older versions |
|
3704 | # older versions | |
3653 | fromfile = branchmap.read |
|
3705 | fromfile = branchmap.read | |
3654 |
|
3706 | |||
3655 | currentfilter = filter |
|
3707 | currentfilter = filter | |
3656 | # try once without timer, the filter may not be cached |
|
3708 | # try once without timer, the filter may not be cached | |
3657 | while fromfile(repo) is None: |
|
3709 | while fromfile(repo) is None: | |
3658 | currentfilter = subsettable.get(currentfilter) |
|
3710 | currentfilter = subsettable.get(currentfilter) | |
3659 | if currentfilter is None: |
|
3711 | if currentfilter is None: | |
3660 | raise error.Abort( |
|
3712 | raise error.Abort( | |
3661 | b'No branchmap cached for %s repo' % (filter or b'unfiltered') |
|
3713 | b'No branchmap cached for %s repo' % (filter or b'unfiltered') | |
3662 | ) |
|
3714 | ) | |
3663 | repo = repo.filtered(currentfilter) |
|
3715 | repo = repo.filtered(currentfilter) | |
3664 | timer, fm = gettimer(ui, opts) |
|
3716 | timer, fm = gettimer(ui, opts) | |
3665 |
|
3717 | |||
3666 | def setup(): |
|
3718 | def setup(): | |
3667 | if clearrevlogs: |
|
3719 | if clearrevlogs: | |
3668 | clearchangelog(repo) |
|
3720 | clearchangelog(repo) | |
3669 |
|
3721 | |||
3670 | def bench(): |
|
3722 | def bench(): | |
3671 | fromfile(repo) |
|
3723 | fromfile(repo) | |
3672 |
|
3724 | |||
3673 | timer(bench, setup=setup) |
|
3725 | timer(bench, setup=setup) | |
3674 | fm.end() |
|
3726 | fm.end() | |
3675 |
|
3727 | |||
3676 |
|
3728 | |||
3677 | @command(b'perf::loadmarkers|perfloadmarkers') |
|
3729 | @command(b'perf::loadmarkers|perfloadmarkers') | |
3678 | def perfloadmarkers(ui, repo): |
|
3730 | def perfloadmarkers(ui, repo): | |
3679 | """benchmark the time to parse the on-disk markers for a repo |
|
3731 | """benchmark the time to parse the on-disk markers for a repo | |
3680 |
|
3732 | |||
3681 | Result is the number of markers in the repo.""" |
|
3733 | Result is the number of markers in the repo.""" | |
3682 | timer, fm = gettimer(ui) |
|
3734 | timer, fm = gettimer(ui) | |
3683 | svfs = getsvfs(repo) |
|
3735 | svfs = getsvfs(repo) | |
3684 | timer(lambda: len(obsolete.obsstore(repo, svfs))) |
|
3736 | timer(lambda: len(obsolete.obsstore(repo, svfs))) | |
3685 | fm.end() |
|
3737 | fm.end() | |
3686 |
|
3738 | |||
3687 |
|
3739 | |||
3688 | @command( |
|
3740 | @command( | |
3689 | b'perf::lrucachedict|perflrucachedict', |
|
3741 | b'perf::lrucachedict|perflrucachedict', | |
3690 | formatteropts |
|
3742 | formatteropts | |
3691 | + [ |
|
3743 | + [ | |
3692 | (b'', b'costlimit', 0, b'maximum total cost of items in cache'), |
|
3744 | (b'', b'costlimit', 0, b'maximum total cost of items in cache'), | |
3693 | (b'', b'mincost', 0, b'smallest cost of items in cache'), |
|
3745 | (b'', b'mincost', 0, b'smallest cost of items in cache'), | |
3694 | (b'', b'maxcost', 100, b'maximum cost of items in cache'), |
|
3746 | (b'', b'maxcost', 100, b'maximum cost of items in cache'), | |
3695 | (b'', b'size', 4, b'size of cache'), |
|
3747 | (b'', b'size', 4, b'size of cache'), | |
3696 | (b'', b'gets', 10000, b'number of key lookups'), |
|
3748 | (b'', b'gets', 10000, b'number of key lookups'), | |
3697 | (b'', b'sets', 10000, b'number of key sets'), |
|
3749 | (b'', b'sets', 10000, b'number of key sets'), | |
3698 | (b'', b'mixed', 10000, b'number of mixed mode operations'), |
|
3750 | (b'', b'mixed', 10000, b'number of mixed mode operations'), | |
3699 | ( |
|
3751 | ( | |
3700 | b'', |
|
3752 | b'', | |
3701 | b'mixedgetfreq', |
|
3753 | b'mixedgetfreq', | |
3702 | 50, |
|
3754 | 50, | |
3703 | b'frequency of get vs set ops in mixed mode', |
|
3755 | b'frequency of get vs set ops in mixed mode', | |
3704 | ), |
|
3756 | ), | |
3705 | ], |
|
3757 | ], | |
3706 | norepo=True, |
|
3758 | norepo=True, | |
3707 | ) |
|
3759 | ) | |
3708 | def perflrucache( |
|
3760 | def perflrucache( | |
3709 | ui, |
|
3761 | ui, | |
3710 | mincost=0, |
|
3762 | mincost=0, | |
3711 | maxcost=100, |
|
3763 | maxcost=100, | |
3712 | costlimit=0, |
|
3764 | costlimit=0, | |
3713 | size=4, |
|
3765 | size=4, | |
3714 | gets=10000, |
|
3766 | gets=10000, | |
3715 | sets=10000, |
|
3767 | sets=10000, | |
3716 | mixed=10000, |
|
3768 | mixed=10000, | |
3717 | mixedgetfreq=50, |
|
3769 | mixedgetfreq=50, | |
3718 | **opts |
|
3770 | **opts | |
3719 | ): |
|
3771 | ): | |
3720 | opts = _byteskwargs(opts) |
|
3772 | opts = _byteskwargs(opts) | |
3721 |
|
3773 | |||
3722 | def doinit(): |
|
3774 | def doinit(): | |
3723 | for i in _xrange(10000): |
|
3775 | for i in _xrange(10000): | |
3724 | util.lrucachedict(size) |
|
3776 | util.lrucachedict(size) | |
3725 |
|
3777 | |||
3726 | costrange = list(range(mincost, maxcost + 1)) |
|
3778 | costrange = list(range(mincost, maxcost + 1)) | |
3727 |
|
3779 | |||
3728 | values = [] |
|
3780 | values = [] | |
3729 | for i in _xrange(size): |
|
3781 | for i in _xrange(size): | |
3730 | values.append(random.randint(0, _maxint)) |
|
3782 | values.append(random.randint(0, _maxint)) | |
3731 |
|
3783 | |||
3732 | # Get mode fills the cache and tests raw lookup performance with no |
|
3784 | # Get mode fills the cache and tests raw lookup performance with no | |
3733 | # eviction. |
|
3785 | # eviction. | |
3734 | getseq = [] |
|
3786 | getseq = [] | |
3735 | for i in _xrange(gets): |
|
3787 | for i in _xrange(gets): | |
3736 | getseq.append(random.choice(values)) |
|
3788 | getseq.append(random.choice(values)) | |
3737 |
|
3789 | |||
3738 | def dogets(): |
|
3790 | def dogets(): | |
3739 | d = util.lrucachedict(size) |
|
3791 | d = util.lrucachedict(size) | |
3740 | for v in values: |
|
3792 | for v in values: | |
3741 | d[v] = v |
|
3793 | d[v] = v | |
3742 | for key in getseq: |
|
3794 | for key in getseq: | |
3743 | value = d[key] |
|
3795 | value = d[key] | |
3744 | value # silence pyflakes warning |
|
3796 | value # silence pyflakes warning | |
3745 |
|
3797 | |||
3746 | def dogetscost(): |
|
3798 | def dogetscost(): | |
3747 | d = util.lrucachedict(size, maxcost=costlimit) |
|
3799 | d = util.lrucachedict(size, maxcost=costlimit) | |
3748 | for i, v in enumerate(values): |
|
3800 | for i, v in enumerate(values): | |
3749 | d.insert(v, v, cost=costs[i]) |
|
3801 | d.insert(v, v, cost=costs[i]) | |
3750 | for key in getseq: |
|
3802 | for key in getseq: | |
3751 | try: |
|
3803 | try: | |
3752 | value = d[key] |
|
3804 | value = d[key] | |
3753 | value # silence pyflakes warning |
|
3805 | value # silence pyflakes warning | |
3754 | except KeyError: |
|
3806 | except KeyError: | |
3755 | pass |
|
3807 | pass | |
3756 |
|
3808 | |||
3757 | # Set mode tests insertion speed with cache eviction. |
|
3809 | # Set mode tests insertion speed with cache eviction. | |
3758 | setseq = [] |
|
3810 | setseq = [] | |
3759 | costs = [] |
|
3811 | costs = [] | |
3760 | for i in _xrange(sets): |
|
3812 | for i in _xrange(sets): | |
3761 | setseq.append(random.randint(0, _maxint)) |
|
3813 | setseq.append(random.randint(0, _maxint)) | |
3762 | costs.append(random.choice(costrange)) |
|
3814 | costs.append(random.choice(costrange)) | |
3763 |
|
3815 | |||
3764 | def doinserts(): |
|
3816 | def doinserts(): | |
3765 | d = util.lrucachedict(size) |
|
3817 | d = util.lrucachedict(size) | |
3766 | for v in setseq: |
|
3818 | for v in setseq: | |
3767 | d.insert(v, v) |
|
3819 | d.insert(v, v) | |
3768 |
|
3820 | |||
3769 | def doinsertscost(): |
|
3821 | def doinsertscost(): | |
3770 | d = util.lrucachedict(size, maxcost=costlimit) |
|
3822 | d = util.lrucachedict(size, maxcost=costlimit) | |
3771 | for i, v in enumerate(setseq): |
|
3823 | for i, v in enumerate(setseq): | |
3772 | d.insert(v, v, cost=costs[i]) |
|
3824 | d.insert(v, v, cost=costs[i]) | |
3773 |
|
3825 | |||
3774 | def dosets(): |
|
3826 | def dosets(): | |
3775 | d = util.lrucachedict(size) |
|
3827 | d = util.lrucachedict(size) | |
3776 | for v in setseq: |
|
3828 | for v in setseq: | |
3777 | d[v] = v |
|
3829 | d[v] = v | |
3778 |
|
3830 | |||
3779 | # Mixed mode randomly performs gets and sets with eviction. |
|
3831 | # Mixed mode randomly performs gets and sets with eviction. | |
3780 | mixedops = [] |
|
3832 | mixedops = [] | |
3781 | for i in _xrange(mixed): |
|
3833 | for i in _xrange(mixed): | |
3782 | r = random.randint(0, 100) |
|
3834 | r = random.randint(0, 100) | |
3783 | if r < mixedgetfreq: |
|
3835 | if r < mixedgetfreq: | |
3784 | op = 0 |
|
3836 | op = 0 | |
3785 | else: |
|
3837 | else: | |
3786 | op = 1 |
|
3838 | op = 1 | |
3787 |
|
3839 | |||
3788 | mixedops.append( |
|
3840 | mixedops.append( | |
3789 | (op, random.randint(0, size * 2), random.choice(costrange)) |
|
3841 | (op, random.randint(0, size * 2), random.choice(costrange)) | |
3790 | ) |
|
3842 | ) | |
3791 |
|
3843 | |||
3792 | def domixed(): |
|
3844 | def domixed(): | |
3793 | d = util.lrucachedict(size) |
|
3845 | d = util.lrucachedict(size) | |
3794 |
|
3846 | |||
3795 | for op, v, cost in mixedops: |
|
3847 | for op, v, cost in mixedops: | |
3796 | if op == 0: |
|
3848 | if op == 0: | |
3797 | try: |
|
3849 | try: | |
3798 | d[v] |
|
3850 | d[v] | |
3799 | except KeyError: |
|
3851 | except KeyError: | |
3800 | pass |
|
3852 | pass | |
3801 | else: |
|
3853 | else: | |
3802 | d[v] = v |
|
3854 | d[v] = v | |
3803 |
|
3855 | |||
3804 | def domixedcost(): |
|
3856 | def domixedcost(): | |
3805 | d = util.lrucachedict(size, maxcost=costlimit) |
|
3857 | d = util.lrucachedict(size, maxcost=costlimit) | |
3806 |
|
3858 | |||
3807 | for op, v, cost in mixedops: |
|
3859 | for op, v, cost in mixedops: | |
3808 | if op == 0: |
|
3860 | if op == 0: | |
3809 | try: |
|
3861 | try: | |
3810 | d[v] |
|
3862 | d[v] | |
3811 | except KeyError: |
|
3863 | except KeyError: | |
3812 | pass |
|
3864 | pass | |
3813 | else: |
|
3865 | else: | |
3814 | d.insert(v, v, cost=cost) |
|
3866 | d.insert(v, v, cost=cost) | |
3815 |
|
3867 | |||
3816 | benches = [ |
|
3868 | benches = [ | |
3817 | (doinit, b'init'), |
|
3869 | (doinit, b'init'), | |
3818 | ] |
|
3870 | ] | |
3819 |
|
3871 | |||
3820 | if costlimit: |
|
3872 | if costlimit: | |
3821 | benches.extend( |
|
3873 | benches.extend( | |
3822 | [ |
|
3874 | [ | |
3823 | (dogetscost, b'gets w/ cost limit'), |
|
3875 | (dogetscost, b'gets w/ cost limit'), | |
3824 | (doinsertscost, b'inserts w/ cost limit'), |
|
3876 | (doinsertscost, b'inserts w/ cost limit'), | |
3825 | (domixedcost, b'mixed w/ cost limit'), |
|
3877 | (domixedcost, b'mixed w/ cost limit'), | |
3826 | ] |
|
3878 | ] | |
3827 | ) |
|
3879 | ) | |
3828 | else: |
|
3880 | else: | |
3829 | benches.extend( |
|
3881 | benches.extend( | |
3830 | [ |
|
3882 | [ | |
3831 | (dogets, b'gets'), |
|
3883 | (dogets, b'gets'), | |
3832 | (doinserts, b'inserts'), |
|
3884 | (doinserts, b'inserts'), | |
3833 | (dosets, b'sets'), |
|
3885 | (dosets, b'sets'), | |
3834 | (domixed, b'mixed'), |
|
3886 | (domixed, b'mixed'), | |
3835 | ] |
|
3887 | ] | |
3836 | ) |
|
3888 | ) | |
3837 |
|
3889 | |||
3838 | for fn, title in benches: |
|
3890 | for fn, title in benches: | |
3839 | timer, fm = gettimer(ui, opts) |
|
3891 | timer, fm = gettimer(ui, opts) | |
3840 | timer(fn, title=title) |
|
3892 | timer(fn, title=title) | |
3841 | fm.end() |
|
3893 | fm.end() | |
3842 |
|
3894 | |||
3843 |
|
3895 | |||
3844 | @command( |
|
3896 | @command( | |
3845 | b'perf::write|perfwrite', |
|
3897 | b'perf::write|perfwrite', | |
3846 | formatteropts |
|
3898 | formatteropts | |
3847 | + [ |
|
3899 | + [ | |
3848 | (b'', b'write-method', b'write', b'ui write method'), |
|
3900 | (b'', b'write-method', b'write', b'ui write method'), | |
3849 | (b'', b'nlines', 100, b'number of lines'), |
|
3901 | (b'', b'nlines', 100, b'number of lines'), | |
3850 | (b'', b'nitems', 100, b'number of items (per line)'), |
|
3902 | (b'', b'nitems', 100, b'number of items (per line)'), | |
3851 | (b'', b'item', b'x', b'item that is written'), |
|
3903 | (b'', b'item', b'x', b'item that is written'), | |
3852 | (b'', b'batch-line', None, b'pass whole line to write method at once'), |
|
3904 | (b'', b'batch-line', None, b'pass whole line to write method at once'), | |
3853 | (b'', b'flush-line', None, b'flush after each line'), |
|
3905 | (b'', b'flush-line', None, b'flush after each line'), | |
3854 | ], |
|
3906 | ], | |
3855 | ) |
|
3907 | ) | |
3856 | def perfwrite(ui, repo, **opts): |
|
3908 | def perfwrite(ui, repo, **opts): | |
3857 | """microbenchmark ui.write (and others)""" |
|
3909 | """microbenchmark ui.write (and others)""" | |
3858 | opts = _byteskwargs(opts) |
|
3910 | opts = _byteskwargs(opts) | |
3859 |
|
3911 | |||
3860 | write = getattr(ui, _sysstr(opts[b'write_method'])) |
|
3912 | write = getattr(ui, _sysstr(opts[b'write_method'])) | |
3861 | nlines = int(opts[b'nlines']) |
|
3913 | nlines = int(opts[b'nlines']) | |
3862 | nitems = int(opts[b'nitems']) |
|
3914 | nitems = int(opts[b'nitems']) | |
3863 | item = opts[b'item'] |
|
3915 | item = opts[b'item'] | |
3864 | batch_line = opts.get(b'batch_line') |
|
3916 | batch_line = opts.get(b'batch_line') | |
3865 | flush_line = opts.get(b'flush_line') |
|
3917 | flush_line = opts.get(b'flush_line') | |
3866 |
|
3918 | |||
3867 | if batch_line: |
|
3919 | if batch_line: | |
3868 | line = item * nitems + b'\n' |
|
3920 | line = item * nitems + b'\n' | |
3869 |
|
3921 | |||
3870 | def benchmark(): |
|
3922 | def benchmark(): | |
3871 | for i in pycompat.xrange(nlines): |
|
3923 | for i in pycompat.xrange(nlines): | |
3872 | if batch_line: |
|
3924 | if batch_line: | |
3873 | write(line) |
|
3925 | write(line) | |
3874 | else: |
|
3926 | else: | |
3875 | for i in pycompat.xrange(nitems): |
|
3927 | for i in pycompat.xrange(nitems): | |
3876 | write(item) |
|
3928 | write(item) | |
3877 | write(b'\n') |
|
3929 | write(b'\n') | |
3878 | if flush_line: |
|
3930 | if flush_line: | |
3879 | ui.flush() |
|
3931 | ui.flush() | |
3880 | ui.flush() |
|
3932 | ui.flush() | |
3881 |
|
3933 | |||
3882 | timer, fm = gettimer(ui, opts) |
|
3934 | timer, fm = gettimer(ui, opts) | |
3883 | timer(benchmark) |
|
3935 | timer(benchmark) | |
3884 | fm.end() |
|
3936 | fm.end() | |
3885 |
|
3937 | |||
3886 |
|
3938 | |||
3887 | def uisetup(ui): |
|
3939 | def uisetup(ui): | |
3888 | if util.safehasattr(cmdutil, b'openrevlog') and not util.safehasattr( |
|
3940 | if util.safehasattr(cmdutil, b'openrevlog') and not util.safehasattr( | |
3889 | commands, b'debugrevlogopts' |
|
3941 | commands, b'debugrevlogopts' | |
3890 | ): |
|
3942 | ): | |
3891 | # for "historical portability": |
|
3943 | # for "historical portability": | |
3892 | # In this case, Mercurial should be 1.9 (or a79fea6b3e77) - |
|
3944 | # In this case, Mercurial should be 1.9 (or a79fea6b3e77) - | |
3893 | # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for |
|
3945 | # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for | |
3894 | # openrevlog() should cause failure, because it has been |
|
3946 | # openrevlog() should cause failure, because it has been | |
3895 | # available since 3.5 (or 49c583ca48c4). |
|
3947 | # available since 3.5 (or 49c583ca48c4). | |
3896 | def openrevlog(orig, repo, cmd, file_, opts): |
|
3948 | def openrevlog(orig, repo, cmd, file_, opts): | |
3897 | if opts.get(b'dir') and not util.safehasattr(repo, b'dirlog'): |
|
3949 | if opts.get(b'dir') and not util.safehasattr(repo, b'dirlog'): | |
3898 | raise error.Abort( |
|
3950 | raise error.Abort( | |
3899 | b"This version doesn't support --dir option", |
|
3951 | b"This version doesn't support --dir option", | |
3900 | hint=b"use 3.5 or later", |
|
3952 | hint=b"use 3.5 or later", | |
3901 | ) |
|
3953 | ) | |
3902 | return orig(repo, cmd, file_, opts) |
|
3954 | return orig(repo, cmd, file_, opts) | |
3903 |
|
3955 | |||
3904 | extensions.wrapfunction(cmdutil, b'openrevlog', openrevlog) |
|
3956 | extensions.wrapfunction(cmdutil, b'openrevlog', openrevlog) | |
3905 |
|
3957 | |||
3906 |
|
3958 | |||
3907 | @command( |
|
3959 | @command( | |
3908 | b'perf::progress|perfprogress', |
|
3960 | b'perf::progress|perfprogress', | |
3909 | formatteropts |
|
3961 | formatteropts | |
3910 | + [ |
|
3962 | + [ | |
3911 | (b'', b'topic', b'topic', b'topic for progress messages'), |
|
3963 | (b'', b'topic', b'topic', b'topic for progress messages'), | |
3912 | (b'c', b'total', 1000000, b'total value we are progressing to'), |
|
3964 | (b'c', b'total', 1000000, b'total value we are progressing to'), | |
3913 | ], |
|
3965 | ], | |
3914 | norepo=True, |
|
3966 | norepo=True, | |
3915 | ) |
|
3967 | ) | |
3916 | def perfprogress(ui, topic=None, total=None, **opts): |
|
3968 | def perfprogress(ui, topic=None, total=None, **opts): | |
3917 | """printing of progress bars""" |
|
3969 | """printing of progress bars""" | |
3918 | opts = _byteskwargs(opts) |
|
3970 | opts = _byteskwargs(opts) | |
3919 |
|
3971 | |||
3920 | timer, fm = gettimer(ui, opts) |
|
3972 | timer, fm = gettimer(ui, opts) | |
3921 |
|
3973 | |||
3922 | def doprogress(): |
|
3974 | def doprogress(): | |
3923 | with ui.makeprogress(topic, total=total) as progress: |
|
3975 | with ui.makeprogress(topic, total=total) as progress: | |
3924 | for i in _xrange(total): |
|
3976 | for i in _xrange(total): | |
3925 | progress.increment() |
|
3977 | progress.increment() | |
3926 |
|
3978 | |||
3927 | timer(doprogress) |
|
3979 | timer(doprogress) | |
3928 | fm.end() |
|
3980 | fm.end() |
@@ -1,48 +1,57 b'' | |||||
1 | #!/usr/bin/env python3 |
|
1 | #!/usr/bin/env python3 | |
2 | # Undump a dump from dumprevlog |
|
2 | # Undump a dump from dumprevlog | |
3 | # $ hg init |
|
3 | # $ hg init | |
4 | # $ undumprevlog < repo.dump |
|
4 | # $ undumprevlog < repo.dump | |
5 |
|
5 | |||
6 | from __future__ import absolute_import, print_function |
|
6 | from __future__ import absolute_import, print_function | |
7 |
|
7 | |||
8 | import sys |
|
8 | import sys | |
9 | from mercurial.node import bin |
|
9 | from mercurial.node import bin | |
10 | from mercurial import ( |
|
10 | from mercurial import ( | |
11 | encoding, |
|
11 | encoding, | |
12 | revlog, |
|
12 | revlog, | |
13 | transaction, |
|
13 | transaction, | |
14 | vfs as vfsmod, |
|
14 | vfs as vfsmod, | |
15 | ) |
|
15 | ) | |
16 | from mercurial.utils import procutil |
|
16 | from mercurial.utils import procutil | |
17 |
|
17 | |||
|
18 | from mercurial.revlogutils import ( | |||
|
19 | constants as revlog_constants, | |||
|
20 | ) | |||
|
21 | ||||
18 | for fp in (sys.stdin, sys.stdout, sys.stderr): |
|
22 | for fp in (sys.stdin, sys.stdout, sys.stderr): | |
19 | procutil.setbinary(fp) |
|
23 | procutil.setbinary(fp) | |
20 |
|
24 | |||
21 | opener = vfsmod.vfs(b'.', False) |
|
25 | opener = vfsmod.vfs(b'.', False) | |
22 | tr = transaction.transaction( |
|
26 | tr = transaction.transaction( | |
23 | sys.stderr.write, opener, {b'store': opener}, b"undump.journal" |
|
27 | sys.stderr.write, opener, {b'store': opener}, b"undump.journal" | |
24 | ) |
|
28 | ) | |
25 | while True: |
|
29 | while True: | |
26 | l = sys.stdin.readline() |
|
30 | l = sys.stdin.readline() | |
27 | if not l: |
|
31 | if not l: | |
28 | break |
|
32 | break | |
29 | if l.startswith("file:"): |
|
33 | if l.startswith("file:"): | |
30 | f = encoding.strtolocal(l[6:-1]) |
|
34 | f = encoding.strtolocal(l[6:-1]) | |
31 | r = revlog.revlog(opener, f) |
|
35 | assert f.endswith(b'.i') | |
|
36 | r = revlog.revlog( | |||
|
37 | opener, | |||
|
38 | target=(revlog_constants.KIND_OTHER, b'undump-revlog'), | |||
|
39 | radix=f[:-2], | |||
|
40 | ) | |||
32 | procutil.stdout.write(b'%s\n' % f) |
|
41 | procutil.stdout.write(b'%s\n' % f) | |
33 | elif l.startswith("node:"): |
|
42 | elif l.startswith("node:"): | |
34 | n = bin(l[6:-1]) |
|
43 | n = bin(l[6:-1]) | |
35 | elif l.startswith("linkrev:"): |
|
44 | elif l.startswith("linkrev:"): | |
36 | lr = int(l[9:-1]) |
|
45 | lr = int(l[9:-1]) | |
37 | elif l.startswith("parents:"): |
|
46 | elif l.startswith("parents:"): | |
38 | p = l[9:-1].split() |
|
47 | p = l[9:-1].split() | |
39 | p1 = bin(p[0]) |
|
48 | p1 = bin(p[0]) | |
40 | p2 = bin(p[1]) |
|
49 | p2 = bin(p[1]) | |
41 | elif l.startswith("length:"): |
|
50 | elif l.startswith("length:"): | |
42 | length = int(l[8:-1]) |
|
51 | length = int(l[8:-1]) | |
43 | sys.stdin.readline() # start marker |
|
52 | sys.stdin.readline() # start marker | |
44 | d = encoding.strtolocal(sys.stdin.read(length)) |
|
53 | d = encoding.strtolocal(sys.stdin.read(length)) | |
45 | sys.stdin.readline() # end marker |
|
54 | sys.stdin.readline() # end marker | |
46 | r.addrevision(d, tr, lr, p1, p2) |
|
55 | r.addrevision(d, tr, lr, p1, p2) | |
47 |
|
56 | |||
48 | tr.close() |
|
57 | tr.close() |
@@ -1,1166 +1,1165 b'' | |||||
1 | # absorb.py |
|
1 | # absorb.py | |
2 | # |
|
2 | # | |
3 | # Copyright 2016 Facebook, Inc. |
|
3 | # Copyright 2016 Facebook, Inc. | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | """apply working directory changes to changesets (EXPERIMENTAL) |
|
8 | """apply working directory changes to changesets (EXPERIMENTAL) | |
9 |
|
9 | |||
10 | The absorb extension provides a command to use annotate information to |
|
10 | The absorb extension provides a command to use annotate information to | |
11 | amend modified chunks into the corresponding non-public changesets. |
|
11 | amend modified chunks into the corresponding non-public changesets. | |
12 |
|
12 | |||
13 | :: |
|
13 | :: | |
14 |
|
14 | |||
15 | [absorb] |
|
15 | [absorb] | |
16 | # only check 50 recent non-public changesets at most |
|
16 | # only check 50 recent non-public changesets at most | |
17 | max-stack-size = 50 |
|
17 | max-stack-size = 50 | |
18 | # whether to add noise to new commits to avoid obsolescence cycle |
|
18 | # whether to add noise to new commits to avoid obsolescence cycle | |
19 | add-noise = 1 |
|
19 | add-noise = 1 | |
20 | # make `amend --correlated` a shortcut to the main command |
|
20 | # make `amend --correlated` a shortcut to the main command | |
21 | amend-flag = correlated |
|
21 | amend-flag = correlated | |
22 |
|
22 | |||
23 | [color] |
|
23 | [color] | |
24 | absorb.description = yellow |
|
24 | absorb.description = yellow | |
25 | absorb.node = blue bold |
|
25 | absorb.node = blue bold | |
26 | absorb.path = bold |
|
26 | absorb.path = bold | |
27 | """ |
|
27 | """ | |
28 |
|
28 | |||
29 | # TODO: |
|
29 | # TODO: | |
30 | # * Rename config items to [commands] namespace |
|
30 | # * Rename config items to [commands] namespace | |
31 | # * Converge getdraftstack() with other code in core |
|
31 | # * Converge getdraftstack() with other code in core | |
32 | # * move many attributes on fixupstate to be private |
|
32 | # * move many attributes on fixupstate to be private | |
33 |
|
33 | |||
34 | from __future__ import absolute_import |
|
34 | from __future__ import absolute_import | |
35 |
|
35 | |||
36 | import collections |
|
36 | import collections | |
37 |
|
37 | |||
38 | from mercurial.i18n import _ |
|
38 | from mercurial.i18n import _ | |
39 | from mercurial.node import ( |
|
39 | from mercurial.node import ( | |
40 | hex, |
|
40 | hex, | |
41 | nullid, |
|
|||
42 | short, |
|
41 | short, | |
43 | ) |
|
42 | ) | |
44 | from mercurial import ( |
|
43 | from mercurial import ( | |
45 | cmdutil, |
|
44 | cmdutil, | |
46 | commands, |
|
45 | commands, | |
47 | context, |
|
46 | context, | |
48 | crecord, |
|
47 | crecord, | |
49 | error, |
|
48 | error, | |
50 | linelog, |
|
49 | linelog, | |
51 | mdiff, |
|
50 | mdiff, | |
52 | obsolete, |
|
51 | obsolete, | |
53 | patch, |
|
52 | patch, | |
54 | phases, |
|
53 | phases, | |
55 | pycompat, |
|
54 | pycompat, | |
56 | registrar, |
|
55 | registrar, | |
57 | rewriteutil, |
|
56 | rewriteutil, | |
58 | scmutil, |
|
57 | scmutil, | |
59 | util, |
|
58 | util, | |
60 | ) |
|
59 | ) | |
61 | from mercurial.utils import stringutil |
|
60 | from mercurial.utils import stringutil | |
62 |
|
61 | |||
63 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
62 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for | |
64 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
63 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should | |
65 | # be specifying the version(s) of Mercurial they are tested with, or |
|
64 | # be specifying the version(s) of Mercurial they are tested with, or | |
66 | # leave the attribute unspecified. |
|
65 | # leave the attribute unspecified. | |
67 | testedwith = b'ships-with-hg-core' |
|
66 | testedwith = b'ships-with-hg-core' | |
68 |
|
67 | |||
69 | cmdtable = {} |
|
68 | cmdtable = {} | |
70 | command = registrar.command(cmdtable) |
|
69 | command = registrar.command(cmdtable) | |
71 |
|
70 | |||
72 | configtable = {} |
|
71 | configtable = {} | |
73 | configitem = registrar.configitem(configtable) |
|
72 | configitem = registrar.configitem(configtable) | |
74 |
|
73 | |||
75 | configitem(b'absorb', b'add-noise', default=True) |
|
74 | configitem(b'absorb', b'add-noise', default=True) | |
76 | configitem(b'absorb', b'amend-flag', default=None) |
|
75 | configitem(b'absorb', b'amend-flag', default=None) | |
77 | configitem(b'absorb', b'max-stack-size', default=50) |
|
76 | configitem(b'absorb', b'max-stack-size', default=50) | |
78 |
|
77 | |||
79 | colortable = { |
|
78 | colortable = { | |
80 | b'absorb.description': b'yellow', |
|
79 | b'absorb.description': b'yellow', | |
81 | b'absorb.node': b'blue bold', |
|
80 | b'absorb.node': b'blue bold', | |
82 | b'absorb.path': b'bold', |
|
81 | b'absorb.path': b'bold', | |
83 | } |
|
82 | } | |
84 |
|
83 | |||
85 | defaultdict = collections.defaultdict |
|
84 | defaultdict = collections.defaultdict | |
86 |
|
85 | |||
87 |
|
86 | |||
88 | class nullui(object): |
|
87 | class nullui(object): | |
89 | """blank ui object doing nothing""" |
|
88 | """blank ui object doing nothing""" | |
90 |
|
89 | |||
91 | debugflag = False |
|
90 | debugflag = False | |
92 | verbose = False |
|
91 | verbose = False | |
93 | quiet = True |
|
92 | quiet = True | |
94 |
|
93 | |||
95 | def __getitem__(name): |
|
94 | def __getitem__(name): | |
96 | def nullfunc(*args, **kwds): |
|
95 | def nullfunc(*args, **kwds): | |
97 | return |
|
96 | return | |
98 |
|
97 | |||
99 | return nullfunc |
|
98 | return nullfunc | |
100 |
|
99 | |||
101 |
|
100 | |||
102 | class emptyfilecontext(object): |
|
101 | class emptyfilecontext(object): | |
103 | """minimal filecontext representing an empty file""" |
|
102 | """minimal filecontext representing an empty file""" | |
104 |
|
103 | |||
105 | def __init__(self, repo): |
|
104 | def __init__(self, repo): | |
106 | self._repo = repo |
|
105 | self._repo = repo | |
107 |
|
106 | |||
108 | def data(self): |
|
107 | def data(self): | |
109 | return b'' |
|
108 | return b'' | |
110 |
|
109 | |||
111 | def node(self): |
|
110 | def node(self): | |
112 | return nullid |
|
111 | return self._repo.nullid | |
113 |
|
112 | |||
114 |
|
113 | |||
115 | def uniq(lst): |
|
114 | def uniq(lst): | |
116 | """list -> list. remove duplicated items without changing the order""" |
|
115 | """list -> list. remove duplicated items without changing the order""" | |
117 | seen = set() |
|
116 | seen = set() | |
118 | result = [] |
|
117 | result = [] | |
119 | for x in lst: |
|
118 | for x in lst: | |
120 | if x not in seen: |
|
119 | if x not in seen: | |
121 | seen.add(x) |
|
120 | seen.add(x) | |
122 | result.append(x) |
|
121 | result.append(x) | |
123 | return result |
|
122 | return result | |
124 |
|
123 | |||
125 |
|
124 | |||
126 | def getdraftstack(headctx, limit=None): |
|
125 | def getdraftstack(headctx, limit=None): | |
127 | """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets. |
|
126 | """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets. | |
128 |
|
127 | |||
129 | changesets are sorted in topo order, oldest first. |
|
128 | changesets are sorted in topo order, oldest first. | |
130 | return at most limit items, if limit is a positive number. |
|
129 | return at most limit items, if limit is a positive number. | |
131 |
|
130 | |||
132 | merges are considered as non-draft as well. i.e. every commit |
|
131 | merges are considered as non-draft as well. i.e. every commit | |
133 | returned has and only has 1 parent. |
|
132 | returned has and only has 1 parent. | |
134 | """ |
|
133 | """ | |
135 | ctx = headctx |
|
134 | ctx = headctx | |
136 | result = [] |
|
135 | result = [] | |
137 | while ctx.phase() != phases.public: |
|
136 | while ctx.phase() != phases.public: | |
138 | if limit and len(result) >= limit: |
|
137 | if limit and len(result) >= limit: | |
139 | break |
|
138 | break | |
140 | parents = ctx.parents() |
|
139 | parents = ctx.parents() | |
141 | if len(parents) != 1: |
|
140 | if len(parents) != 1: | |
142 | break |
|
141 | break | |
143 | result.append(ctx) |
|
142 | result.append(ctx) | |
144 | ctx = parents[0] |
|
143 | ctx = parents[0] | |
145 | result.reverse() |
|
144 | result.reverse() | |
146 | return result |
|
145 | return result | |
147 |
|
146 | |||
148 |
|
147 | |||
149 | def getfilestack(stack, path, seenfctxs=None): |
|
148 | def getfilestack(stack, path, seenfctxs=None): | |
150 | """([ctx], str, set) -> [fctx], {ctx: fctx} |
|
149 | """([ctx], str, set) -> [fctx], {ctx: fctx} | |
151 |
|
150 | |||
152 | stack is a list of contexts, from old to new. usually they are what |
|
151 | stack is a list of contexts, from old to new. usually they are what | |
153 | "getdraftstack" returns. |
|
152 | "getdraftstack" returns. | |
154 |
|
153 | |||
155 | follows renames, but not copies. |
|
154 | follows renames, but not copies. | |
156 |
|
155 | |||
157 | seenfctxs is a set of filecontexts that will be considered "immutable". |
|
156 | seenfctxs is a set of filecontexts that will be considered "immutable". | |
158 | they are usually what this function returned in earlier calls, useful |
|
157 | they are usually what this function returned in earlier calls, useful | |
159 | to avoid issues that a file was "moved" to multiple places and was then |
|
158 | to avoid issues that a file was "moved" to multiple places and was then | |
160 | modified differently, like: "a" was copied to "b", "a" was also copied to |
|
159 | modified differently, like: "a" was copied to "b", "a" was also copied to | |
161 | "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a" |
|
160 | "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a" | |
162 | and we enforce only one of them to be able to affect "a"'s content. |
|
161 | and we enforce only one of them to be able to affect "a"'s content. | |
163 |
|
162 | |||
164 | return an empty list and an empty dict, if the specified path does not |
|
163 | return an empty list and an empty dict, if the specified path does not | |
165 | exist in stack[-1] (the top of the stack). |
|
164 | exist in stack[-1] (the top of the stack). | |
166 |
|
165 | |||
167 | otherwise, return a list of de-duplicated filecontexts, and the map to |
|
166 | otherwise, return a list of de-duplicated filecontexts, and the map to | |
168 | convert ctx in the stack to fctx, for possible mutable fctxs. the first item |
|
167 | convert ctx in the stack to fctx, for possible mutable fctxs. the first item | |
169 | of the list would be outside the stack and should be considered immutable. |
|
168 | of the list would be outside the stack and should be considered immutable. | |
170 | the remaining items are within the stack. |
|
169 | the remaining items are within the stack. | |
171 |
|
170 | |||
172 | for example, given the following changelog and corresponding filelog |
|
171 | for example, given the following changelog and corresponding filelog | |
173 | revisions: |
|
172 | revisions: | |
174 |
|
173 | |||
175 | changelog: 3----4----5----6----7 |
|
174 | changelog: 3----4----5----6----7 | |
176 | filelog: x 0----1----1----2 (x: no such file yet) |
|
175 | filelog: x 0----1----1----2 (x: no such file yet) | |
177 |
|
176 | |||
178 | - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2}) |
|
177 | - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2}) | |
179 | - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a |
|
178 | - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a | |
180 | dummy empty filecontext. |
|
179 | dummy empty filecontext. | |
181 | - if stack = [2], returns ([], {}) |
|
180 | - if stack = [2], returns ([], {}) | |
182 | - if stack = [7], returns ([1, 2], {7: 2}) |
|
181 | - if stack = [7], returns ([1, 2], {7: 2}) | |
183 | - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be |
|
182 | - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be | |
184 | removed, since 1 is immutable. |
|
183 | removed, since 1 is immutable. | |
185 | """ |
|
184 | """ | |
186 | if seenfctxs is None: |
|
185 | if seenfctxs is None: | |
187 | seenfctxs = set() |
|
186 | seenfctxs = set() | |
188 | assert stack |
|
187 | assert stack | |
189 |
|
188 | |||
190 | if path not in stack[-1]: |
|
189 | if path not in stack[-1]: | |
191 | return [], {} |
|
190 | return [], {} | |
192 |
|
191 | |||
193 | fctxs = [] |
|
192 | fctxs = [] | |
194 | fctxmap = {} |
|
193 | fctxmap = {} | |
195 |
|
194 | |||
196 | pctx = stack[0].p1() # the public (immutable) ctx we stop at |
|
195 | pctx = stack[0].p1() # the public (immutable) ctx we stop at | |
197 | for ctx in reversed(stack): |
|
196 | for ctx in reversed(stack): | |
198 | if path not in ctx: # the file is added in the next commit |
|
197 | if path not in ctx: # the file is added in the next commit | |
199 | pctx = ctx |
|
198 | pctx = ctx | |
200 | break |
|
199 | break | |
201 | fctx = ctx[path] |
|
200 | fctx = ctx[path] | |
202 | fctxs.append(fctx) |
|
201 | fctxs.append(fctx) | |
203 | if fctx in seenfctxs: # treat fctx as the immutable one |
|
202 | if fctx in seenfctxs: # treat fctx as the immutable one | |
204 | pctx = None # do not add another immutable fctx |
|
203 | pctx = None # do not add another immutable fctx | |
205 | break |
|
204 | break | |
206 | fctxmap[ctx] = fctx # only for mutable fctxs |
|
205 | fctxmap[ctx] = fctx # only for mutable fctxs | |
207 | copy = fctx.copysource() |
|
206 | copy = fctx.copysource() | |
208 | if copy: |
|
207 | if copy: | |
209 | path = copy # follow rename |
|
208 | path = copy # follow rename | |
210 | if path in ctx: # but do not follow copy |
|
209 | if path in ctx: # but do not follow copy | |
211 | pctx = ctx.p1() |
|
210 | pctx = ctx.p1() | |
212 | break |
|
211 | break | |
213 |
|
212 | |||
214 | if pctx is not None: # need an extra immutable fctx |
|
213 | if pctx is not None: # need an extra immutable fctx | |
215 | if path in pctx: |
|
214 | if path in pctx: | |
216 | fctxs.append(pctx[path]) |
|
215 | fctxs.append(pctx[path]) | |
217 | else: |
|
216 | else: | |
218 | fctxs.append(emptyfilecontext(pctx.repo())) |
|
217 | fctxs.append(emptyfilecontext(pctx.repo())) | |
219 |
|
218 | |||
220 | fctxs.reverse() |
|
219 | fctxs.reverse() | |
221 | # note: we rely on a property of hg: filerev is not reused for linear |
|
220 | # note: we rely on a property of hg: filerev is not reused for linear | |
222 | # history. i.e. it's impossible to have: |
|
221 | # history. i.e. it's impossible to have: | |
223 | # changelog: 4----5----6 (linear, no merges) |
|
222 | # changelog: 4----5----6 (linear, no merges) | |
224 | # filelog: 1----2----1 |
|
223 | # filelog: 1----2----1 | |
225 | # ^ reuse filerev (impossible) |
|
224 | # ^ reuse filerev (impossible) | |
226 | # because parents are part of the hash. if that's not true, we need to |
|
225 | # because parents are part of the hash. if that's not true, we need to | |
227 | # remove uniq and find a different way to identify fctxs. |
|
226 | # remove uniq and find a different way to identify fctxs. | |
228 | return uniq(fctxs), fctxmap |
|
227 | return uniq(fctxs), fctxmap | |
229 |
|
228 | |||
230 |
|
229 | |||
231 | class overlaystore(patch.filestore): |
|
230 | class overlaystore(patch.filestore): | |
232 | """read-only, hybrid store based on a dict and ctx. |
|
231 | """read-only, hybrid store based on a dict and ctx. | |
233 | memworkingcopy: {path: content}, overrides file contents. |
|
232 | memworkingcopy: {path: content}, overrides file contents. | |
234 | """ |
|
233 | """ | |
235 |
|
234 | |||
236 | def __init__(self, basectx, memworkingcopy): |
|
235 | def __init__(self, basectx, memworkingcopy): | |
237 | self.basectx = basectx |
|
236 | self.basectx = basectx | |
238 | self.memworkingcopy = memworkingcopy |
|
237 | self.memworkingcopy = memworkingcopy | |
239 |
|
238 | |||
240 | def getfile(self, path): |
|
239 | def getfile(self, path): | |
241 | """comply with mercurial.patch.filestore.getfile""" |
|
240 | """comply with mercurial.patch.filestore.getfile""" | |
242 | if path not in self.basectx: |
|
241 | if path not in self.basectx: | |
243 | return None, None, None |
|
242 | return None, None, None | |
244 | fctx = self.basectx[path] |
|
243 | fctx = self.basectx[path] | |
245 | if path in self.memworkingcopy: |
|
244 | if path in self.memworkingcopy: | |
246 | content = self.memworkingcopy[path] |
|
245 | content = self.memworkingcopy[path] | |
247 | else: |
|
246 | else: | |
248 | content = fctx.data() |
|
247 | content = fctx.data() | |
249 | mode = (fctx.islink(), fctx.isexec()) |
|
248 | mode = (fctx.islink(), fctx.isexec()) | |
250 | copy = fctx.copysource() |
|
249 | copy = fctx.copysource() | |
251 | return content, mode, copy |
|
250 | return content, mode, copy | |
252 |
|
251 | |||
253 |
|
252 | |||
254 | def overlaycontext(memworkingcopy, ctx, parents=None, extra=None, desc=None): |
|
253 | def overlaycontext(memworkingcopy, ctx, parents=None, extra=None, desc=None): | |
255 | """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx |
|
254 | """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx | |
256 | memworkingcopy overrides file contents. |
|
255 | memworkingcopy overrides file contents. | |
257 | """ |
|
256 | """ | |
258 | # parents must contain 2 items: (node1, node2) |
|
257 | # parents must contain 2 items: (node1, node2) | |
259 | if parents is None: |
|
258 | if parents is None: | |
260 | parents = ctx.repo().changelog.parents(ctx.node()) |
|
259 | parents = ctx.repo().changelog.parents(ctx.node()) | |
261 | if extra is None: |
|
260 | if extra is None: | |
262 | extra = ctx.extra() |
|
261 | extra = ctx.extra() | |
263 | if desc is None: |
|
262 | if desc is None: | |
264 | desc = ctx.description() |
|
263 | desc = ctx.description() | |
265 | date = ctx.date() |
|
264 | date = ctx.date() | |
266 | user = ctx.user() |
|
265 | user = ctx.user() | |
267 | files = set(ctx.files()).union(memworkingcopy) |
|
266 | files = set(ctx.files()).union(memworkingcopy) | |
268 | store = overlaystore(ctx, memworkingcopy) |
|
267 | store = overlaystore(ctx, memworkingcopy) | |
269 | return context.memctx( |
|
268 | return context.memctx( | |
270 | repo=ctx.repo(), |
|
269 | repo=ctx.repo(), | |
271 | parents=parents, |
|
270 | parents=parents, | |
272 | text=desc, |
|
271 | text=desc, | |
273 | files=files, |
|
272 | files=files, | |
274 | filectxfn=store, |
|
273 | filectxfn=store, | |
275 | user=user, |
|
274 | user=user, | |
276 | date=date, |
|
275 | date=date, | |
277 | branch=None, |
|
276 | branch=None, | |
278 | extra=extra, |
|
277 | extra=extra, | |
279 | ) |
|
278 | ) | |
280 |
|
279 | |||
281 |
|
280 | |||
282 | class filefixupstate(object): |
|
281 | class filefixupstate(object): | |
283 | """state needed to apply fixups to a single file |
|
282 | """state needed to apply fixups to a single file | |
284 |
|
283 | |||
285 | internally, it keeps file contents of several revisions and a linelog. |
|
284 | internally, it keeps file contents of several revisions and a linelog. | |
286 |
|
285 | |||
287 | the linelog uses odd revision numbers for original contents (fctxs passed |
|
286 | the linelog uses odd revision numbers for original contents (fctxs passed | |
288 | to __init__), and even revision numbers for fixups, like: |
|
287 | to __init__), and even revision numbers for fixups, like: | |
289 |
|
288 | |||
290 | linelog rev 1: self.fctxs[0] (from an immutable "public" changeset) |
|
289 | linelog rev 1: self.fctxs[0] (from an immutable "public" changeset) | |
291 | linelog rev 2: fixups made to self.fctxs[0] |
|
290 | linelog rev 2: fixups made to self.fctxs[0] | |
292 | linelog rev 3: self.fctxs[1] (a child of fctxs[0]) |
|
291 | linelog rev 3: self.fctxs[1] (a child of fctxs[0]) | |
293 | linelog rev 4: fixups made to self.fctxs[1] |
|
292 | linelog rev 4: fixups made to self.fctxs[1] | |
294 | ... |
|
293 | ... | |
295 |
|
294 | |||
296 | a typical use is like: |
|
295 | a typical use is like: | |
297 |
|
296 | |||
298 | 1. call diffwith, to calculate self.fixups |
|
297 | 1. call diffwith, to calculate self.fixups | |
299 | 2. (optionally), present self.fixups to the user, or change it |
|
298 | 2. (optionally), present self.fixups to the user, or change it | |
300 | 3. call apply, to apply changes |
|
299 | 3. call apply, to apply changes | |
301 | 4. read results from "finalcontents", or call getfinalcontent |
|
300 | 4. read results from "finalcontents", or call getfinalcontent | |
302 | """ |
|
301 | """ | |
303 |
|
302 | |||
304 | def __init__(self, fctxs, path, ui=None, opts=None): |
|
303 | def __init__(self, fctxs, path, ui=None, opts=None): | |
305 | """([fctx], ui or None) -> None |
|
304 | """([fctx], ui or None) -> None | |
306 |
|
305 | |||
307 | fctxs should be linear, and sorted by topo order - oldest first. |
|
306 | fctxs should be linear, and sorted by topo order - oldest first. | |
308 | fctxs[0] will be considered as "immutable" and will not be changed. |
|
307 | fctxs[0] will be considered as "immutable" and will not be changed. | |
309 | """ |
|
308 | """ | |
310 | self.fctxs = fctxs |
|
309 | self.fctxs = fctxs | |
311 | self.path = path |
|
310 | self.path = path | |
312 | self.ui = ui or nullui() |
|
311 | self.ui = ui or nullui() | |
313 | self.opts = opts or {} |
|
312 | self.opts = opts or {} | |
314 |
|
313 | |||
315 | # following fields are built from fctxs. they exist for perf reason |
|
314 | # following fields are built from fctxs. they exist for perf reason | |
316 | self.contents = [f.data() for f in fctxs] |
|
315 | self.contents = [f.data() for f in fctxs] | |
317 | self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents) |
|
316 | self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents) | |
318 | self.linelog = self._buildlinelog() |
|
317 | self.linelog = self._buildlinelog() | |
319 | if self.ui.debugflag: |
|
318 | if self.ui.debugflag: | |
320 | assert self._checkoutlinelog() == self.contents |
|
319 | assert self._checkoutlinelog() == self.contents | |
321 |
|
320 | |||
322 | # following fields will be filled later |
|
321 | # following fields will be filled later | |
323 | self.chunkstats = [0, 0] # [adopted, total : int] |
|
322 | self.chunkstats = [0, 0] # [adopted, total : int] | |
324 | self.targetlines = [] # [str] |
|
323 | self.targetlines = [] # [str] | |
325 | self.fixups = [] # [(linelog rev, a1, a2, b1, b2)] |
|
324 | self.fixups = [] # [(linelog rev, a1, a2, b1, b2)] | |
326 | self.finalcontents = [] # [str] |
|
325 | self.finalcontents = [] # [str] | |
327 | self.ctxaffected = set() |
|
326 | self.ctxaffected = set() | |
328 |
|
327 | |||
329 | def diffwith(self, targetfctx, fm=None): |
|
328 | def diffwith(self, targetfctx, fm=None): | |
330 | """calculate fixups needed by examining the differences between |
|
329 | """calculate fixups needed by examining the differences between | |
331 | self.fctxs[-1] and targetfctx, chunk by chunk. |
|
330 | self.fctxs[-1] and targetfctx, chunk by chunk. | |
332 |
|
331 | |||
333 | targetfctx is the target state we move towards. we may or may not be |
|
332 | targetfctx is the target state we move towards. we may or may not be | |
334 | able to get there because not all modified chunks can be amended into |
|
333 | able to get there because not all modified chunks can be amended into | |
335 | a non-public fctx unambiguously. |
|
334 | a non-public fctx unambiguously. | |
336 |
|
335 | |||
337 | call this only once, before apply(). |
|
336 | call this only once, before apply(). | |
338 |
|
337 | |||
339 | update self.fixups, self.chunkstats, and self.targetlines. |
|
338 | update self.fixups, self.chunkstats, and self.targetlines. | |
340 | """ |
|
339 | """ | |
341 | a = self.contents[-1] |
|
340 | a = self.contents[-1] | |
342 | alines = self.contentlines[-1] |
|
341 | alines = self.contentlines[-1] | |
343 | b = targetfctx.data() |
|
342 | b = targetfctx.data() | |
344 | blines = mdiff.splitnewlines(b) |
|
343 | blines = mdiff.splitnewlines(b) | |
345 | self.targetlines = blines |
|
344 | self.targetlines = blines | |
346 |
|
345 | |||
347 | self.linelog.annotate(self.linelog.maxrev) |
|
346 | self.linelog.annotate(self.linelog.maxrev) | |
348 | annotated = self.linelog.annotateresult # [(linelog rev, linenum)] |
|
347 | annotated = self.linelog.annotateresult # [(linelog rev, linenum)] | |
349 | assert len(annotated) == len(alines) |
|
348 | assert len(annotated) == len(alines) | |
350 | # add a dummy end line to make insertion at the end easier |
|
349 | # add a dummy end line to make insertion at the end easier | |
351 | if annotated: |
|
350 | if annotated: | |
352 | dummyendline = (annotated[-1][0], annotated[-1][1] + 1) |
|
351 | dummyendline = (annotated[-1][0], annotated[-1][1] + 1) | |
353 | annotated.append(dummyendline) |
|
352 | annotated.append(dummyendline) | |
354 |
|
353 | |||
355 | # analyse diff blocks |
|
354 | # analyse diff blocks | |
356 | for chunk in self._alldiffchunks(a, b, alines, blines): |
|
355 | for chunk in self._alldiffchunks(a, b, alines, blines): | |
357 | newfixups = self._analysediffchunk(chunk, annotated) |
|
356 | newfixups = self._analysediffchunk(chunk, annotated) | |
358 | self.chunkstats[0] += bool(newfixups) # 1 or 0 |
|
357 | self.chunkstats[0] += bool(newfixups) # 1 or 0 | |
359 | self.chunkstats[1] += 1 |
|
358 | self.chunkstats[1] += 1 | |
360 | self.fixups += newfixups |
|
359 | self.fixups += newfixups | |
361 | if fm is not None: |
|
360 | if fm is not None: | |
362 | self._showchanges(fm, alines, blines, chunk, newfixups) |
|
361 | self._showchanges(fm, alines, blines, chunk, newfixups) | |
363 |
|
362 | |||
364 | def apply(self): |
|
363 | def apply(self): | |
365 | """apply self.fixups. update self.linelog, self.finalcontents. |
|
364 | """apply self.fixups. update self.linelog, self.finalcontents. | |
366 |
|
365 | |||
367 | call this only once, before getfinalcontent(), after diffwith(). |
|
366 | call this only once, before getfinalcontent(), after diffwith(). | |
368 | """ |
|
367 | """ | |
369 | # the following is unnecessary, as it's done by "diffwith": |
|
368 | # the following is unnecessary, as it's done by "diffwith": | |
370 | # self.linelog.annotate(self.linelog.maxrev) |
|
369 | # self.linelog.annotate(self.linelog.maxrev) | |
371 | for rev, a1, a2, b1, b2 in reversed(self.fixups): |
|
370 | for rev, a1, a2, b1, b2 in reversed(self.fixups): | |
372 | blines = self.targetlines[b1:b2] |
|
371 | blines = self.targetlines[b1:b2] | |
373 | if self.ui.debugflag: |
|
372 | if self.ui.debugflag: | |
374 | idx = (max(rev - 1, 0)) // 2 |
|
373 | idx = (max(rev - 1, 0)) // 2 | |
375 | self.ui.write( |
|
374 | self.ui.write( | |
376 | _(b'%s: chunk %d:%d -> %d lines\n') |
|
375 | _(b'%s: chunk %d:%d -> %d lines\n') | |
377 | % (short(self.fctxs[idx].node()), a1, a2, len(blines)) |
|
376 | % (short(self.fctxs[idx].node()), a1, a2, len(blines)) | |
378 | ) |
|
377 | ) | |
379 | self.linelog.replacelines(rev, a1, a2, b1, b2) |
|
378 | self.linelog.replacelines(rev, a1, a2, b1, b2) | |
380 | if self.opts.get(b'edit_lines', False): |
|
379 | if self.opts.get(b'edit_lines', False): | |
381 | self.finalcontents = self._checkoutlinelogwithedits() |
|
380 | self.finalcontents = self._checkoutlinelogwithedits() | |
382 | else: |
|
381 | else: | |
383 | self.finalcontents = self._checkoutlinelog() |
|
382 | self.finalcontents = self._checkoutlinelog() | |
384 |
|
383 | |||
385 | def getfinalcontent(self, fctx): |
|
384 | def getfinalcontent(self, fctx): | |
386 | """(fctx) -> str. get modified file content for a given filecontext""" |
|
385 | """(fctx) -> str. get modified file content for a given filecontext""" | |
387 | idx = self.fctxs.index(fctx) |
|
386 | idx = self.fctxs.index(fctx) | |
388 | return self.finalcontents[idx] |
|
387 | return self.finalcontents[idx] | |
389 |
|
388 | |||
390 | def _analysediffchunk(self, chunk, annotated): |
|
389 | def _analysediffchunk(self, chunk, annotated): | |
391 | """analyse a different chunk and return new fixups found |
|
390 | """analyse a different chunk and return new fixups found | |
392 |
|
391 | |||
393 | return [] if no lines from the chunk can be safely applied. |
|
392 | return [] if no lines from the chunk can be safely applied. | |
394 |
|
393 | |||
395 | the chunk (or lines) cannot be safely applied, if, for example: |
|
394 | the chunk (or lines) cannot be safely applied, if, for example: | |
396 | - the modified (deleted) lines belong to a public changeset |
|
395 | - the modified (deleted) lines belong to a public changeset | |
397 | (self.fctxs[0]) |
|
396 | (self.fctxs[0]) | |
398 | - the chunk is a pure insertion and the adjacent lines (at most 2 |
|
397 | - the chunk is a pure insertion and the adjacent lines (at most 2 | |
399 | lines) belong to different non-public changesets, or do not belong |
|
398 | lines) belong to different non-public changesets, or do not belong | |
400 | to any non-public changesets. |
|
399 | to any non-public changesets. | |
401 | - the chunk is modifying lines from different changesets. |
|
400 | - the chunk is modifying lines from different changesets. | |
402 | in this case, if the number of lines deleted equals to the number |
|
401 | in this case, if the number of lines deleted equals to the number | |
403 | of lines added, assume it's a simple 1:1 map (could be wrong). |
|
402 | of lines added, assume it's a simple 1:1 map (could be wrong). | |
404 | otherwise, give up. |
|
403 | otherwise, give up. | |
405 | - the chunk is modifying lines from a single non-public changeset, |
|
404 | - the chunk is modifying lines from a single non-public changeset, | |
406 | but other revisions touch the area as well. i.e. the lines are |
|
405 | but other revisions touch the area as well. i.e. the lines are | |
407 | not continuous as seen from the linelog. |
|
406 | not continuous as seen from the linelog. | |
408 | """ |
|
407 | """ | |
409 | a1, a2, b1, b2 = chunk |
|
408 | a1, a2, b1, b2 = chunk | |
410 | # find involved indexes from annotate result |
|
409 | # find involved indexes from annotate result | |
411 | involved = annotated[a1:a2] |
|
410 | involved = annotated[a1:a2] | |
412 | if not involved and annotated: # a1 == a2 and a is not empty |
|
411 | if not involved and annotated: # a1 == a2 and a is not empty | |
413 | # pure insertion, check nearby lines. ignore lines belong |
|
412 | # pure insertion, check nearby lines. ignore lines belong | |
414 | # to the public (first) changeset (i.e. annotated[i][0] == 1) |
|
413 | # to the public (first) changeset (i.e. annotated[i][0] == 1) | |
415 | nearbylinenums = {a2, max(0, a1 - 1)} |
|
414 | nearbylinenums = {a2, max(0, a1 - 1)} | |
416 | involved = [ |
|
415 | involved = [ | |
417 | annotated[i] for i in nearbylinenums if annotated[i][0] != 1 |
|
416 | annotated[i] for i in nearbylinenums if annotated[i][0] != 1 | |
418 | ] |
|
417 | ] | |
419 | involvedrevs = list({r for r, l in involved}) |
|
418 | involvedrevs = list({r for r, l in involved}) | |
420 | newfixups = [] |
|
419 | newfixups = [] | |
421 | if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True): |
|
420 | if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True): | |
422 | # chunk belongs to a single revision |
|
421 | # chunk belongs to a single revision | |
423 | rev = involvedrevs[0] |
|
422 | rev = involvedrevs[0] | |
424 | if rev > 1: |
|
423 | if rev > 1: | |
425 | fixuprev = rev + 1 |
|
424 | fixuprev = rev + 1 | |
426 | newfixups.append((fixuprev, a1, a2, b1, b2)) |
|
425 | newfixups.append((fixuprev, a1, a2, b1, b2)) | |
427 | elif a2 - a1 == b2 - b1 or b1 == b2: |
|
426 | elif a2 - a1 == b2 - b1 or b1 == b2: | |
428 | # 1:1 line mapping, or chunk was deleted |
|
427 | # 1:1 line mapping, or chunk was deleted | |
429 | for i in pycompat.xrange(a1, a2): |
|
428 | for i in pycompat.xrange(a1, a2): | |
430 | rev, linenum = annotated[i] |
|
429 | rev, linenum = annotated[i] | |
431 | if rev > 1: |
|
430 | if rev > 1: | |
432 | if b1 == b2: # deletion, simply remove that single line |
|
431 | if b1 == b2: # deletion, simply remove that single line | |
433 | nb1 = nb2 = 0 |
|
432 | nb1 = nb2 = 0 | |
434 | else: # 1:1 line mapping, change the corresponding rev |
|
433 | else: # 1:1 line mapping, change the corresponding rev | |
435 | nb1 = b1 + i - a1 |
|
434 | nb1 = b1 + i - a1 | |
436 | nb2 = nb1 + 1 |
|
435 | nb2 = nb1 + 1 | |
437 | fixuprev = rev + 1 |
|
436 | fixuprev = rev + 1 | |
438 | newfixups.append((fixuprev, i, i + 1, nb1, nb2)) |
|
437 | newfixups.append((fixuprev, i, i + 1, nb1, nb2)) | |
439 | return self._optimizefixups(newfixups) |
|
438 | return self._optimizefixups(newfixups) | |
440 |
|
439 | |||
441 | @staticmethod |
|
440 | @staticmethod | |
442 | def _alldiffchunks(a, b, alines, blines): |
|
441 | def _alldiffchunks(a, b, alines, blines): | |
443 | """like mdiff.allblocks, but only care about differences""" |
|
442 | """like mdiff.allblocks, but only care about differences""" | |
444 | blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines) |
|
443 | blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines) | |
445 | for chunk, btype in blocks: |
|
444 | for chunk, btype in blocks: | |
446 | if btype != b'!': |
|
445 | if btype != b'!': | |
447 | continue |
|
446 | continue | |
448 | yield chunk |
|
447 | yield chunk | |
449 |
|
448 | |||
450 | def _buildlinelog(self): |
|
449 | def _buildlinelog(self): | |
451 | """calculate the initial linelog based on self.content{,line}s. |
|
450 | """calculate the initial linelog based on self.content{,line}s. | |
452 | this is similar to running a partial "annotate". |
|
451 | this is similar to running a partial "annotate". | |
453 | """ |
|
452 | """ | |
454 | llog = linelog.linelog() |
|
453 | llog = linelog.linelog() | |
455 | a, alines = b'', [] |
|
454 | a, alines = b'', [] | |
456 | for i in pycompat.xrange(len(self.contents)): |
|
455 | for i in pycompat.xrange(len(self.contents)): | |
457 | b, blines = self.contents[i], self.contentlines[i] |
|
456 | b, blines = self.contents[i], self.contentlines[i] | |
458 | llrev = i * 2 + 1 |
|
457 | llrev = i * 2 + 1 | |
459 | chunks = self._alldiffchunks(a, b, alines, blines) |
|
458 | chunks = self._alldiffchunks(a, b, alines, blines) | |
460 | for a1, a2, b1, b2 in reversed(list(chunks)): |
|
459 | for a1, a2, b1, b2 in reversed(list(chunks)): | |
461 | llog.replacelines(llrev, a1, a2, b1, b2) |
|
460 | llog.replacelines(llrev, a1, a2, b1, b2) | |
462 | a, alines = b, blines |
|
461 | a, alines = b, blines | |
463 | return llog |
|
462 | return llog | |
464 |
|
463 | |||
465 | def _checkoutlinelog(self): |
|
464 | def _checkoutlinelog(self): | |
466 | """() -> [str]. check out file contents from linelog""" |
|
465 | """() -> [str]. check out file contents from linelog""" | |
467 | contents = [] |
|
466 | contents = [] | |
468 | for i in pycompat.xrange(len(self.contents)): |
|
467 | for i in pycompat.xrange(len(self.contents)): | |
469 | rev = (i + 1) * 2 |
|
468 | rev = (i + 1) * 2 | |
470 | self.linelog.annotate(rev) |
|
469 | self.linelog.annotate(rev) | |
471 | content = b''.join(map(self._getline, self.linelog.annotateresult)) |
|
470 | content = b''.join(map(self._getline, self.linelog.annotateresult)) | |
472 | contents.append(content) |
|
471 | contents.append(content) | |
473 | return contents |
|
472 | return contents | |
474 |
|
473 | |||
475 | def _checkoutlinelogwithedits(self): |
|
474 | def _checkoutlinelogwithedits(self): | |
476 | """() -> [str]. prompt all lines for edit""" |
|
475 | """() -> [str]. prompt all lines for edit""" | |
477 | alllines = self.linelog.getalllines() |
|
476 | alllines = self.linelog.getalllines() | |
478 | # header |
|
477 | # header | |
479 | editortext = ( |
|
478 | editortext = ( | |
480 | _( |
|
479 | _( | |
481 | b'HG: editing %s\nHG: "y" means the line to the right ' |
|
480 | b'HG: editing %s\nHG: "y" means the line to the right ' | |
482 | b'exists in the changeset to the top\nHG:\n' |
|
481 | b'exists in the changeset to the top\nHG:\n' | |
483 | ) |
|
482 | ) | |
484 | % self.fctxs[-1].path() |
|
483 | % self.fctxs[-1].path() | |
485 | ) |
|
484 | ) | |
486 | # [(idx, fctx)]. hide the dummy emptyfilecontext |
|
485 | # [(idx, fctx)]. hide the dummy emptyfilecontext | |
487 | visiblefctxs = [ |
|
486 | visiblefctxs = [ | |
488 | (i, f) |
|
487 | (i, f) | |
489 | for i, f in enumerate(self.fctxs) |
|
488 | for i, f in enumerate(self.fctxs) | |
490 | if not isinstance(f, emptyfilecontext) |
|
489 | if not isinstance(f, emptyfilecontext) | |
491 | ] |
|
490 | ] | |
492 | for i, (j, f) in enumerate(visiblefctxs): |
|
491 | for i, (j, f) in enumerate(visiblefctxs): | |
493 | editortext += _(b'HG: %s/%s %s %s\n') % ( |
|
492 | editortext += _(b'HG: %s/%s %s %s\n') % ( | |
494 | b'|' * i, |
|
493 | b'|' * i, | |
495 | b'-' * (len(visiblefctxs) - i + 1), |
|
494 | b'-' * (len(visiblefctxs) - i + 1), | |
496 | short(f.node()), |
|
495 | short(f.node()), | |
497 | f.description().split(b'\n', 1)[0], |
|
496 | f.description().split(b'\n', 1)[0], | |
498 | ) |
|
497 | ) | |
499 | editortext += _(b'HG: %s\n') % (b'|' * len(visiblefctxs)) |
|
498 | editortext += _(b'HG: %s\n') % (b'|' * len(visiblefctxs)) | |
500 | # figure out the lifetime of a line, this is relatively inefficient, |
|
499 | # figure out the lifetime of a line, this is relatively inefficient, | |
501 | # but probably fine |
|
500 | # but probably fine | |
502 | lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}} |
|
501 | lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}} | |
503 | for i, f in visiblefctxs: |
|
502 | for i, f in visiblefctxs: | |
504 | self.linelog.annotate((i + 1) * 2) |
|
503 | self.linelog.annotate((i + 1) * 2) | |
505 | for l in self.linelog.annotateresult: |
|
504 | for l in self.linelog.annotateresult: | |
506 | lineset[l].add(i) |
|
505 | lineset[l].add(i) | |
507 | # append lines |
|
506 | # append lines | |
508 | for l in alllines: |
|
507 | for l in alllines: | |
509 | editortext += b' %s : %s' % ( |
|
508 | editortext += b' %s : %s' % ( | |
510 | b''.join( |
|
509 | b''.join( | |
511 | [ |
|
510 | [ | |
512 | (b'y' if i in lineset[l] else b' ') |
|
511 | (b'y' if i in lineset[l] else b' ') | |
513 | for i, _f in visiblefctxs |
|
512 | for i, _f in visiblefctxs | |
514 | ] |
|
513 | ] | |
515 | ), |
|
514 | ), | |
516 | self._getline(l), |
|
515 | self._getline(l), | |
517 | ) |
|
516 | ) | |
518 | # run editor |
|
517 | # run editor | |
519 | editedtext = self.ui.edit(editortext, b'', action=b'absorb') |
|
518 | editedtext = self.ui.edit(editortext, b'', action=b'absorb') | |
520 | if not editedtext: |
|
519 | if not editedtext: | |
521 | raise error.InputError(_(b'empty editor text')) |
|
520 | raise error.InputError(_(b'empty editor text')) | |
522 | # parse edited result |
|
521 | # parse edited result | |
523 | contents = [b''] * len(self.fctxs) |
|
522 | contents = [b''] * len(self.fctxs) | |
524 | leftpadpos = 4 |
|
523 | leftpadpos = 4 | |
525 | colonpos = leftpadpos + len(visiblefctxs) + 1 |
|
524 | colonpos = leftpadpos + len(visiblefctxs) + 1 | |
526 | for l in mdiff.splitnewlines(editedtext): |
|
525 | for l in mdiff.splitnewlines(editedtext): | |
527 | if l.startswith(b'HG:'): |
|
526 | if l.startswith(b'HG:'): | |
528 | continue |
|
527 | continue | |
529 | if l[colonpos - 1 : colonpos + 2] != b' : ': |
|
528 | if l[colonpos - 1 : colonpos + 2] != b' : ': | |
530 | raise error.InputError(_(b'malformed line: %s') % l) |
|
529 | raise error.InputError(_(b'malformed line: %s') % l) | |
531 | linecontent = l[colonpos + 2 :] |
|
530 | linecontent = l[colonpos + 2 :] | |
532 | for i, ch in enumerate( |
|
531 | for i, ch in enumerate( | |
533 | pycompat.bytestr(l[leftpadpos : colonpos - 1]) |
|
532 | pycompat.bytestr(l[leftpadpos : colonpos - 1]) | |
534 | ): |
|
533 | ): | |
535 | if ch == b'y': |
|
534 | if ch == b'y': | |
536 | contents[visiblefctxs[i][0]] += linecontent |
|
535 | contents[visiblefctxs[i][0]] += linecontent | |
537 | # chunkstats is hard to calculate if anything changes, therefore |
|
536 | # chunkstats is hard to calculate if anything changes, therefore | |
538 | # set them to just a simple value (1, 1). |
|
537 | # set them to just a simple value (1, 1). | |
539 | if editedtext != editortext: |
|
538 | if editedtext != editortext: | |
540 | self.chunkstats = [1, 1] |
|
539 | self.chunkstats = [1, 1] | |
541 | return contents |
|
540 | return contents | |
542 |
|
541 | |||
543 | def _getline(self, lineinfo): |
|
542 | def _getline(self, lineinfo): | |
544 | """((rev, linenum)) -> str. convert rev+line number to line content""" |
|
543 | """((rev, linenum)) -> str. convert rev+line number to line content""" | |
545 | rev, linenum = lineinfo |
|
544 | rev, linenum = lineinfo | |
546 | if rev & 1: # odd: original line taken from fctxs |
|
545 | if rev & 1: # odd: original line taken from fctxs | |
547 | return self.contentlines[rev // 2][linenum] |
|
546 | return self.contentlines[rev // 2][linenum] | |
548 | else: # even: fixup line from targetfctx |
|
547 | else: # even: fixup line from targetfctx | |
549 | return self.targetlines[linenum] |
|
548 | return self.targetlines[linenum] | |
550 |
|
549 | |||
551 | def _iscontinuous(self, a1, a2, closedinterval=False): |
|
550 | def _iscontinuous(self, a1, a2, closedinterval=False): | |
552 | """(a1, a2 : int) -> bool |
|
551 | """(a1, a2 : int) -> bool | |
553 |
|
552 | |||
554 | check if these lines are continuous. i.e. no other insertions or |
|
553 | check if these lines are continuous. i.e. no other insertions or | |
555 | deletions (from other revisions) among these lines. |
|
554 | deletions (from other revisions) among these lines. | |
556 |
|
555 | |||
557 | closedinterval decides whether a2 should be included or not. i.e. is |
|
556 | closedinterval decides whether a2 should be included or not. i.e. is | |
558 | it [a1, a2), or [a1, a2] ? |
|
557 | it [a1, a2), or [a1, a2] ? | |
559 | """ |
|
558 | """ | |
560 | if a1 >= a2: |
|
559 | if a1 >= a2: | |
561 | return True |
|
560 | return True | |
562 | llog = self.linelog |
|
561 | llog = self.linelog | |
563 | offset1 = llog.getoffset(a1) |
|
562 | offset1 = llog.getoffset(a1) | |
564 | offset2 = llog.getoffset(a2) + int(closedinterval) |
|
563 | offset2 = llog.getoffset(a2) + int(closedinterval) | |
565 | linesinbetween = llog.getalllines(offset1, offset2) |
|
564 | linesinbetween = llog.getalllines(offset1, offset2) | |
566 | return len(linesinbetween) == a2 - a1 + int(closedinterval) |
|
565 | return len(linesinbetween) == a2 - a1 + int(closedinterval) | |
567 |
|
566 | |||
568 | def _optimizefixups(self, fixups): |
|
567 | def _optimizefixups(self, fixups): | |
569 | """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)]. |
|
568 | """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)]. | |
570 | merge adjacent fixups to make them less fragmented. |
|
569 | merge adjacent fixups to make them less fragmented. | |
571 | """ |
|
570 | """ | |
572 | result = [] |
|
571 | result = [] | |
573 | pcurrentchunk = [[-1, -1, -1, -1, -1]] |
|
572 | pcurrentchunk = [[-1, -1, -1, -1, -1]] | |
574 |
|
573 | |||
575 | def pushchunk(): |
|
574 | def pushchunk(): | |
576 | if pcurrentchunk[0][0] != -1: |
|
575 | if pcurrentchunk[0][0] != -1: | |
577 | result.append(tuple(pcurrentchunk[0])) |
|
576 | result.append(tuple(pcurrentchunk[0])) | |
578 |
|
577 | |||
579 | for i, chunk in enumerate(fixups): |
|
578 | for i, chunk in enumerate(fixups): | |
580 | rev, a1, a2, b1, b2 = chunk |
|
579 | rev, a1, a2, b1, b2 = chunk | |
581 | lastrev = pcurrentchunk[0][0] |
|
580 | lastrev = pcurrentchunk[0][0] | |
582 | lasta2 = pcurrentchunk[0][2] |
|
581 | lasta2 = pcurrentchunk[0][2] | |
583 | lastb2 = pcurrentchunk[0][4] |
|
582 | lastb2 = pcurrentchunk[0][4] | |
584 | if ( |
|
583 | if ( | |
585 | a1 == lasta2 |
|
584 | a1 == lasta2 | |
586 | and b1 == lastb2 |
|
585 | and b1 == lastb2 | |
587 | and rev == lastrev |
|
586 | and rev == lastrev | |
588 | and self._iscontinuous(max(a1 - 1, 0), a1) |
|
587 | and self._iscontinuous(max(a1 - 1, 0), a1) | |
589 | ): |
|
588 | ): | |
590 | # merge into currentchunk |
|
589 | # merge into currentchunk | |
591 | pcurrentchunk[0][2] = a2 |
|
590 | pcurrentchunk[0][2] = a2 | |
592 | pcurrentchunk[0][4] = b2 |
|
591 | pcurrentchunk[0][4] = b2 | |
593 | else: |
|
592 | else: | |
594 | pushchunk() |
|
593 | pushchunk() | |
595 | pcurrentchunk[0] = list(chunk) |
|
594 | pcurrentchunk[0] = list(chunk) | |
596 | pushchunk() |
|
595 | pushchunk() | |
597 | return result |
|
596 | return result | |
598 |
|
597 | |||
599 | def _showchanges(self, fm, alines, blines, chunk, fixups): |
|
598 | def _showchanges(self, fm, alines, blines, chunk, fixups): | |
600 | def trim(line): |
|
599 | def trim(line): | |
601 | if line.endswith(b'\n'): |
|
600 | if line.endswith(b'\n'): | |
602 | line = line[:-1] |
|
601 | line = line[:-1] | |
603 | return line |
|
602 | return line | |
604 |
|
603 | |||
605 | # this is not optimized for perf but _showchanges only gets executed |
|
604 | # this is not optimized for perf but _showchanges only gets executed | |
606 | # with an extra command-line flag. |
|
605 | # with an extra command-line flag. | |
607 | a1, a2, b1, b2 = chunk |
|
606 | a1, a2, b1, b2 = chunk | |
608 | aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1) |
|
607 | aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1) | |
609 | for idx, fa1, fa2, fb1, fb2 in fixups: |
|
608 | for idx, fa1, fa2, fb1, fb2 in fixups: | |
610 | for i in pycompat.xrange(fa1, fa2): |
|
609 | for i in pycompat.xrange(fa1, fa2): | |
611 | aidxs[i - a1] = (max(idx, 1) - 1) // 2 |
|
610 | aidxs[i - a1] = (max(idx, 1) - 1) // 2 | |
612 | for i in pycompat.xrange(fb1, fb2): |
|
611 | for i in pycompat.xrange(fb1, fb2): | |
613 | bidxs[i - b1] = (max(idx, 1) - 1) // 2 |
|
612 | bidxs[i - b1] = (max(idx, 1) - 1) // 2 | |
614 |
|
613 | |||
615 | fm.startitem() |
|
614 | fm.startitem() | |
616 | fm.write( |
|
615 | fm.write( | |
617 | b'hunk', |
|
616 | b'hunk', | |
618 | b' %s\n', |
|
617 | b' %s\n', | |
619 | b'@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1), |
|
618 | b'@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1), | |
620 | label=b'diff.hunk', |
|
619 | label=b'diff.hunk', | |
621 | ) |
|
620 | ) | |
622 | fm.data(path=self.path, linetype=b'hunk') |
|
621 | fm.data(path=self.path, linetype=b'hunk') | |
623 |
|
622 | |||
624 | def writeline(idx, diffchar, line, linetype, linelabel): |
|
623 | def writeline(idx, diffchar, line, linetype, linelabel): | |
625 | fm.startitem() |
|
624 | fm.startitem() | |
626 | node = b'' |
|
625 | node = b'' | |
627 | if idx: |
|
626 | if idx: | |
628 | ctx = self.fctxs[idx] |
|
627 | ctx = self.fctxs[idx] | |
629 | fm.context(fctx=ctx) |
|
628 | fm.context(fctx=ctx) | |
630 | node = ctx.hex() |
|
629 | node = ctx.hex() | |
631 | self.ctxaffected.add(ctx.changectx()) |
|
630 | self.ctxaffected.add(ctx.changectx()) | |
632 | fm.write(b'node', b'%-7.7s ', node, label=b'absorb.node') |
|
631 | fm.write(b'node', b'%-7.7s ', node, label=b'absorb.node') | |
633 | fm.write( |
|
632 | fm.write( | |
634 | b'diffchar ' + linetype, |
|
633 | b'diffchar ' + linetype, | |
635 | b'%s%s\n', |
|
634 | b'%s%s\n', | |
636 | diffchar, |
|
635 | diffchar, | |
637 | line, |
|
636 | line, | |
638 | label=linelabel, |
|
637 | label=linelabel, | |
639 | ) |
|
638 | ) | |
640 | fm.data(path=self.path, linetype=linetype) |
|
639 | fm.data(path=self.path, linetype=linetype) | |
641 |
|
640 | |||
642 | for i in pycompat.xrange(a1, a2): |
|
641 | for i in pycompat.xrange(a1, a2): | |
643 | writeline( |
|
642 | writeline( | |
644 | aidxs[i - a1], |
|
643 | aidxs[i - a1], | |
645 | b'-', |
|
644 | b'-', | |
646 | trim(alines[i]), |
|
645 | trim(alines[i]), | |
647 | b'deleted', |
|
646 | b'deleted', | |
648 | b'diff.deleted', |
|
647 | b'diff.deleted', | |
649 | ) |
|
648 | ) | |
650 | for i in pycompat.xrange(b1, b2): |
|
649 | for i in pycompat.xrange(b1, b2): | |
651 | writeline( |
|
650 | writeline( | |
652 | bidxs[i - b1], |
|
651 | bidxs[i - b1], | |
653 | b'+', |
|
652 | b'+', | |
654 | trim(blines[i]), |
|
653 | trim(blines[i]), | |
655 | b'inserted', |
|
654 | b'inserted', | |
656 | b'diff.inserted', |
|
655 | b'diff.inserted', | |
657 | ) |
|
656 | ) | |
658 |
|
657 | |||
659 |
|
658 | |||
660 | class fixupstate(object): |
|
659 | class fixupstate(object): | |
661 | """state needed to run absorb |
|
660 | """state needed to run absorb | |
662 |
|
661 | |||
663 | internally, it keeps paths and filefixupstates. |
|
662 | internally, it keeps paths and filefixupstates. | |
664 |
|
663 | |||
665 | a typical use is like filefixupstates: |
|
664 | a typical use is like filefixupstates: | |
666 |
|
665 | |||
667 | 1. call diffwith, to calculate fixups |
|
666 | 1. call diffwith, to calculate fixups | |
668 | 2. (optionally), present fixups to the user, or edit fixups |
|
667 | 2. (optionally), present fixups to the user, or edit fixups | |
669 | 3. call apply, to apply changes to memory |
|
668 | 3. call apply, to apply changes to memory | |
670 | 4. call commit, to commit changes to hg database |
|
669 | 4. call commit, to commit changes to hg database | |
671 | """ |
|
670 | """ | |
672 |
|
671 | |||
673 | def __init__(self, stack, ui=None, opts=None): |
|
672 | def __init__(self, stack, ui=None, opts=None): | |
674 | """([ctx], ui or None) -> None |
|
673 | """([ctx], ui or None) -> None | |
675 |
|
674 | |||
676 | stack: should be linear, and sorted by topo order - oldest first. |
|
675 | stack: should be linear, and sorted by topo order - oldest first. | |
677 | all commits in stack are considered mutable. |
|
676 | all commits in stack are considered mutable. | |
678 | """ |
|
677 | """ | |
679 | assert stack |
|
678 | assert stack | |
680 | self.ui = ui or nullui() |
|
679 | self.ui = ui or nullui() | |
681 | self.opts = opts or {} |
|
680 | self.opts = opts or {} | |
682 | self.stack = stack |
|
681 | self.stack = stack | |
683 | self.repo = stack[-1].repo().unfiltered() |
|
682 | self.repo = stack[-1].repo().unfiltered() | |
684 |
|
683 | |||
685 | # following fields will be filled later |
|
684 | # following fields will be filled later | |
686 | self.paths = [] # [str] |
|
685 | self.paths = [] # [str] | |
687 | self.status = None # ctx.status output |
|
686 | self.status = None # ctx.status output | |
688 | self.fctxmap = {} # {path: {ctx: fctx}} |
|
687 | self.fctxmap = {} # {path: {ctx: fctx}} | |
689 | self.fixupmap = {} # {path: filefixupstate} |
|
688 | self.fixupmap = {} # {path: filefixupstate} | |
690 | self.replacemap = {} # {oldnode: newnode or None} |
|
689 | self.replacemap = {} # {oldnode: newnode or None} | |
691 | self.finalnode = None # head after all fixups |
|
690 | self.finalnode = None # head after all fixups | |
692 | self.ctxaffected = set() # ctx that will be absorbed into |
|
691 | self.ctxaffected = set() # ctx that will be absorbed into | |
693 |
|
692 | |||
694 | def diffwith(self, targetctx, match=None, fm=None): |
|
693 | def diffwith(self, targetctx, match=None, fm=None): | |
695 | """diff and prepare fixups. update self.fixupmap, self.paths""" |
|
694 | """diff and prepare fixups. update self.fixupmap, self.paths""" | |
696 | # only care about modified files |
|
695 | # only care about modified files | |
697 | self.status = self.stack[-1].status(targetctx, match) |
|
696 | self.status = self.stack[-1].status(targetctx, match) | |
698 | self.paths = [] |
|
697 | self.paths = [] | |
699 | # but if --edit-lines is used, the user may want to edit files |
|
698 | # but if --edit-lines is used, the user may want to edit files | |
700 | # even if they are not modified |
|
699 | # even if they are not modified | |
701 | editopt = self.opts.get(b'edit_lines') |
|
700 | editopt = self.opts.get(b'edit_lines') | |
702 | if not self.status.modified and editopt and match: |
|
701 | if not self.status.modified and editopt and match: | |
703 | interestingpaths = match.files() |
|
702 | interestingpaths = match.files() | |
704 | else: |
|
703 | else: | |
705 | interestingpaths = self.status.modified |
|
704 | interestingpaths = self.status.modified | |
706 | # prepare the filefixupstate |
|
705 | # prepare the filefixupstate | |
707 | seenfctxs = set() |
|
706 | seenfctxs = set() | |
708 | # sorting is necessary to eliminate ambiguity for the "double move" |
|
707 | # sorting is necessary to eliminate ambiguity for the "double move" | |
709 | # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A". |
|
708 | # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A". | |
710 | for path in sorted(interestingpaths): |
|
709 | for path in sorted(interestingpaths): | |
711 | self.ui.debug(b'calculating fixups for %s\n' % path) |
|
710 | self.ui.debug(b'calculating fixups for %s\n' % path) | |
712 | targetfctx = targetctx[path] |
|
711 | targetfctx = targetctx[path] | |
713 | fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs) |
|
712 | fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs) | |
714 | # ignore symbolic links or binary, or unchanged files |
|
713 | # ignore symbolic links or binary, or unchanged files | |
715 | if any( |
|
714 | if any( | |
716 | f.islink() or stringutil.binary(f.data()) |
|
715 | f.islink() or stringutil.binary(f.data()) | |
717 | for f in [targetfctx] + fctxs |
|
716 | for f in [targetfctx] + fctxs | |
718 | if not isinstance(f, emptyfilecontext) |
|
717 | if not isinstance(f, emptyfilecontext) | |
719 | ): |
|
718 | ): | |
720 | continue |
|
719 | continue | |
721 | if targetfctx.data() == fctxs[-1].data() and not editopt: |
|
720 | if targetfctx.data() == fctxs[-1].data() and not editopt: | |
722 | continue |
|
721 | continue | |
723 | seenfctxs.update(fctxs[1:]) |
|
722 | seenfctxs.update(fctxs[1:]) | |
724 | self.fctxmap[path] = ctx2fctx |
|
723 | self.fctxmap[path] = ctx2fctx | |
725 | fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts) |
|
724 | fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts) | |
726 | if fm is not None: |
|
725 | if fm is not None: | |
727 | fm.startitem() |
|
726 | fm.startitem() | |
728 | fm.plain(b'showing changes for ') |
|
727 | fm.plain(b'showing changes for ') | |
729 | fm.write(b'path', b'%s\n', path, label=b'absorb.path') |
|
728 | fm.write(b'path', b'%s\n', path, label=b'absorb.path') | |
730 | fm.data(linetype=b'path') |
|
729 | fm.data(linetype=b'path') | |
731 | fstate.diffwith(targetfctx, fm) |
|
730 | fstate.diffwith(targetfctx, fm) | |
732 | self.fixupmap[path] = fstate |
|
731 | self.fixupmap[path] = fstate | |
733 | self.paths.append(path) |
|
732 | self.paths.append(path) | |
734 | self.ctxaffected.update(fstate.ctxaffected) |
|
733 | self.ctxaffected.update(fstate.ctxaffected) | |
735 |
|
734 | |||
736 | def apply(self): |
|
735 | def apply(self): | |
737 | """apply fixups to individual filefixupstates""" |
|
736 | """apply fixups to individual filefixupstates""" | |
738 | for path, state in pycompat.iteritems(self.fixupmap): |
|
737 | for path, state in pycompat.iteritems(self.fixupmap): | |
739 | if self.ui.debugflag: |
|
738 | if self.ui.debugflag: | |
740 | self.ui.write(_(b'applying fixups to %s\n') % path) |
|
739 | self.ui.write(_(b'applying fixups to %s\n') % path) | |
741 | state.apply() |
|
740 | state.apply() | |
742 |
|
741 | |||
743 | @property |
|
742 | @property | |
744 | def chunkstats(self): |
|
743 | def chunkstats(self): | |
745 | """-> {path: chunkstats}. collect chunkstats from filefixupstates""" |
|
744 | """-> {path: chunkstats}. collect chunkstats from filefixupstates""" | |
746 | return { |
|
745 | return { | |
747 | path: state.chunkstats |
|
746 | path: state.chunkstats | |
748 | for path, state in pycompat.iteritems(self.fixupmap) |
|
747 | for path, state in pycompat.iteritems(self.fixupmap) | |
749 | } |
|
748 | } | |
750 |
|
749 | |||
751 | def commit(self): |
|
750 | def commit(self): | |
752 | """commit changes. update self.finalnode, self.replacemap""" |
|
751 | """commit changes. update self.finalnode, self.replacemap""" | |
753 | with self.repo.transaction(b'absorb') as tr: |
|
752 | with self.repo.transaction(b'absorb') as tr: | |
754 | self._commitstack() |
|
753 | self._commitstack() | |
755 | self._movebookmarks(tr) |
|
754 | self._movebookmarks(tr) | |
756 | if self.repo[b'.'].node() in self.replacemap: |
|
755 | if self.repo[b'.'].node() in self.replacemap: | |
757 | self._moveworkingdirectoryparent() |
|
756 | self._moveworkingdirectoryparent() | |
758 | self._cleanupoldcommits() |
|
757 | self._cleanupoldcommits() | |
759 | return self.finalnode |
|
758 | return self.finalnode | |
760 |
|
759 | |||
761 | def printchunkstats(self): |
|
760 | def printchunkstats(self): | |
762 | """print things like '1 of 2 chunk(s) applied'""" |
|
761 | """print things like '1 of 2 chunk(s) applied'""" | |
763 | ui = self.ui |
|
762 | ui = self.ui | |
764 | chunkstats = self.chunkstats |
|
763 | chunkstats = self.chunkstats | |
765 | if ui.verbose: |
|
764 | if ui.verbose: | |
766 | # chunkstats for each file |
|
765 | # chunkstats for each file | |
767 | for path, stat in pycompat.iteritems(chunkstats): |
|
766 | for path, stat in pycompat.iteritems(chunkstats): | |
768 | if stat[0]: |
|
767 | if stat[0]: | |
769 | ui.write( |
|
768 | ui.write( | |
770 | _(b'%s: %d of %d chunk(s) applied\n') |
|
769 | _(b'%s: %d of %d chunk(s) applied\n') | |
771 | % (path, stat[0], stat[1]) |
|
770 | % (path, stat[0], stat[1]) | |
772 | ) |
|
771 | ) | |
773 | elif not ui.quiet: |
|
772 | elif not ui.quiet: | |
774 | # a summary for all files |
|
773 | # a summary for all files | |
775 | stats = chunkstats.values() |
|
774 | stats = chunkstats.values() | |
776 | applied, total = (sum(s[i] for s in stats) for i in (0, 1)) |
|
775 | applied, total = (sum(s[i] for s in stats) for i in (0, 1)) | |
777 | ui.write(_(b'%d of %d chunk(s) applied\n') % (applied, total)) |
|
776 | ui.write(_(b'%d of %d chunk(s) applied\n') % (applied, total)) | |
778 |
|
777 | |||
779 | def _commitstack(self): |
|
778 | def _commitstack(self): | |
780 | """make new commits. update self.finalnode, self.replacemap. |
|
779 | """make new commits. update self.finalnode, self.replacemap. | |
781 | it is splitted from "commit" to avoid too much indentation. |
|
780 | it is splitted from "commit" to avoid too much indentation. | |
782 | """ |
|
781 | """ | |
783 | # last node (20-char) committed by us |
|
782 | # last node (20-char) committed by us | |
784 | lastcommitted = None |
|
783 | lastcommitted = None | |
785 | # p1 which overrides the parent of the next commit, "None" means use |
|
784 | # p1 which overrides the parent of the next commit, "None" means use | |
786 | # the original parent unchanged |
|
785 | # the original parent unchanged | |
787 | nextp1 = None |
|
786 | nextp1 = None | |
788 | for ctx in self.stack: |
|
787 | for ctx in self.stack: | |
789 | memworkingcopy = self._getnewfilecontents(ctx) |
|
788 | memworkingcopy = self._getnewfilecontents(ctx) | |
790 | if not memworkingcopy and not lastcommitted: |
|
789 | if not memworkingcopy and not lastcommitted: | |
791 | # nothing changed, nothing commited |
|
790 | # nothing changed, nothing commited | |
792 | nextp1 = ctx |
|
791 | nextp1 = ctx | |
793 | continue |
|
792 | continue | |
794 | willbecomenoop = ctx.files() and self._willbecomenoop( |
|
793 | willbecomenoop = ctx.files() and self._willbecomenoop( | |
795 | memworkingcopy, ctx, nextp1 |
|
794 | memworkingcopy, ctx, nextp1 | |
796 | ) |
|
795 | ) | |
797 | if self.skip_empty_successor and willbecomenoop: |
|
796 | if self.skip_empty_successor and willbecomenoop: | |
798 | # changeset is no longer necessary |
|
797 | # changeset is no longer necessary | |
799 | self.replacemap[ctx.node()] = None |
|
798 | self.replacemap[ctx.node()] = None | |
800 | msg = _(b'became empty and was dropped') |
|
799 | msg = _(b'became empty and was dropped') | |
801 | else: |
|
800 | else: | |
802 | # changeset needs re-commit |
|
801 | # changeset needs re-commit | |
803 | nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1) |
|
802 | nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1) | |
804 | lastcommitted = self.repo[nodestr] |
|
803 | lastcommitted = self.repo[nodestr] | |
805 | nextp1 = lastcommitted |
|
804 | nextp1 = lastcommitted | |
806 | self.replacemap[ctx.node()] = lastcommitted.node() |
|
805 | self.replacemap[ctx.node()] = lastcommitted.node() | |
807 | if memworkingcopy: |
|
806 | if memworkingcopy: | |
808 | if willbecomenoop: |
|
807 | if willbecomenoop: | |
809 | msg = _(b'%d file(s) changed, became empty as %s') |
|
808 | msg = _(b'%d file(s) changed, became empty as %s') | |
810 | else: |
|
809 | else: | |
811 | msg = _(b'%d file(s) changed, became %s') |
|
810 | msg = _(b'%d file(s) changed, became %s') | |
812 | msg = msg % ( |
|
811 | msg = msg % ( | |
813 | len(memworkingcopy), |
|
812 | len(memworkingcopy), | |
814 | self._ctx2str(lastcommitted), |
|
813 | self._ctx2str(lastcommitted), | |
815 | ) |
|
814 | ) | |
816 | else: |
|
815 | else: | |
817 | msg = _(b'became %s') % self._ctx2str(lastcommitted) |
|
816 | msg = _(b'became %s') % self._ctx2str(lastcommitted) | |
818 | if self.ui.verbose and msg: |
|
817 | if self.ui.verbose and msg: | |
819 | self.ui.write(_(b'%s: %s\n') % (self._ctx2str(ctx), msg)) |
|
818 | self.ui.write(_(b'%s: %s\n') % (self._ctx2str(ctx), msg)) | |
820 | self.finalnode = lastcommitted and lastcommitted.node() |
|
819 | self.finalnode = lastcommitted and lastcommitted.node() | |
821 |
|
820 | |||
822 | def _ctx2str(self, ctx): |
|
821 | def _ctx2str(self, ctx): | |
823 | if self.ui.debugflag: |
|
822 | if self.ui.debugflag: | |
824 | return b'%d:%s' % (ctx.rev(), ctx.hex()) |
|
823 | return b'%d:%s' % (ctx.rev(), ctx.hex()) | |
825 | else: |
|
824 | else: | |
826 | return b'%d:%s' % (ctx.rev(), short(ctx.node())) |
|
825 | return b'%d:%s' % (ctx.rev(), short(ctx.node())) | |
827 |
|
826 | |||
828 | def _getnewfilecontents(self, ctx): |
|
827 | def _getnewfilecontents(self, ctx): | |
829 | """(ctx) -> {path: str} |
|
828 | """(ctx) -> {path: str} | |
830 |
|
829 | |||
831 | fetch file contents from filefixupstates. |
|
830 | fetch file contents from filefixupstates. | |
832 | return the working copy overrides - files different from ctx. |
|
831 | return the working copy overrides - files different from ctx. | |
833 | """ |
|
832 | """ | |
834 | result = {} |
|
833 | result = {} | |
835 | for path in self.paths: |
|
834 | for path in self.paths: | |
836 | ctx2fctx = self.fctxmap[path] # {ctx: fctx} |
|
835 | ctx2fctx = self.fctxmap[path] # {ctx: fctx} | |
837 | if ctx not in ctx2fctx: |
|
836 | if ctx not in ctx2fctx: | |
838 | continue |
|
837 | continue | |
839 | fctx = ctx2fctx[ctx] |
|
838 | fctx = ctx2fctx[ctx] | |
840 | content = fctx.data() |
|
839 | content = fctx.data() | |
841 | newcontent = self.fixupmap[path].getfinalcontent(fctx) |
|
840 | newcontent = self.fixupmap[path].getfinalcontent(fctx) | |
842 | if content != newcontent: |
|
841 | if content != newcontent: | |
843 | result[fctx.path()] = newcontent |
|
842 | result[fctx.path()] = newcontent | |
844 | return result |
|
843 | return result | |
845 |
|
844 | |||
846 | def _movebookmarks(self, tr): |
|
845 | def _movebookmarks(self, tr): | |
847 | repo = self.repo |
|
846 | repo = self.repo | |
848 | needupdate = [ |
|
847 | needupdate = [ | |
849 | (name, self.replacemap[hsh]) |
|
848 | (name, self.replacemap[hsh]) | |
850 | for name, hsh in pycompat.iteritems(repo._bookmarks) |
|
849 | for name, hsh in pycompat.iteritems(repo._bookmarks) | |
851 | if hsh in self.replacemap |
|
850 | if hsh in self.replacemap | |
852 | ] |
|
851 | ] | |
853 | changes = [] |
|
852 | changes = [] | |
854 | for name, hsh in needupdate: |
|
853 | for name, hsh in needupdate: | |
855 | if hsh: |
|
854 | if hsh: | |
856 | changes.append((name, hsh)) |
|
855 | changes.append((name, hsh)) | |
857 | if self.ui.verbose: |
|
856 | if self.ui.verbose: | |
858 | self.ui.write( |
|
857 | self.ui.write( | |
859 | _(b'moving bookmark %s to %s\n') % (name, hex(hsh)) |
|
858 | _(b'moving bookmark %s to %s\n') % (name, hex(hsh)) | |
860 | ) |
|
859 | ) | |
861 | else: |
|
860 | else: | |
862 | changes.append((name, None)) |
|
861 | changes.append((name, None)) | |
863 | if self.ui.verbose: |
|
862 | if self.ui.verbose: | |
864 | self.ui.write(_(b'deleting bookmark %s\n') % name) |
|
863 | self.ui.write(_(b'deleting bookmark %s\n') % name) | |
865 | repo._bookmarks.applychanges(repo, tr, changes) |
|
864 | repo._bookmarks.applychanges(repo, tr, changes) | |
866 |
|
865 | |||
867 | def _moveworkingdirectoryparent(self): |
|
866 | def _moveworkingdirectoryparent(self): | |
868 | if not self.finalnode: |
|
867 | if not self.finalnode: | |
869 | # Find the latest not-{obsoleted,stripped} parent. |
|
868 | # Find the latest not-{obsoleted,stripped} parent. | |
870 | revs = self.repo.revs(b'max(::. - %ln)', self.replacemap.keys()) |
|
869 | revs = self.repo.revs(b'max(::. - %ln)', self.replacemap.keys()) | |
871 | ctx = self.repo[revs.first()] |
|
870 | ctx = self.repo[revs.first()] | |
872 | self.finalnode = ctx.node() |
|
871 | self.finalnode = ctx.node() | |
873 | else: |
|
872 | else: | |
874 | ctx = self.repo[self.finalnode] |
|
873 | ctx = self.repo[self.finalnode] | |
875 |
|
874 | |||
876 | dirstate = self.repo.dirstate |
|
875 | dirstate = self.repo.dirstate | |
877 | # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to |
|
876 | # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to | |
878 | # be slow. in absorb's case, no need to invalidate fsmonitorstate. |
|
877 | # be slow. in absorb's case, no need to invalidate fsmonitorstate. | |
879 | noop = lambda: 0 |
|
878 | noop = lambda: 0 | |
880 | restore = noop |
|
879 | restore = noop | |
881 | if util.safehasattr(dirstate, '_fsmonitorstate'): |
|
880 | if util.safehasattr(dirstate, '_fsmonitorstate'): | |
882 | bak = dirstate._fsmonitorstate.invalidate |
|
881 | bak = dirstate._fsmonitorstate.invalidate | |
883 |
|
882 | |||
884 | def restore(): |
|
883 | def restore(): | |
885 | dirstate._fsmonitorstate.invalidate = bak |
|
884 | dirstate._fsmonitorstate.invalidate = bak | |
886 |
|
885 | |||
887 | dirstate._fsmonitorstate.invalidate = noop |
|
886 | dirstate._fsmonitorstate.invalidate = noop | |
888 | try: |
|
887 | try: | |
889 | with dirstate.parentchange(): |
|
888 | with dirstate.parentchange(): | |
890 | dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths) |
|
889 | dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths) | |
891 | finally: |
|
890 | finally: | |
892 | restore() |
|
891 | restore() | |
893 |
|
892 | |||
894 | @staticmethod |
|
893 | @staticmethod | |
895 | def _willbecomenoop(memworkingcopy, ctx, pctx=None): |
|
894 | def _willbecomenoop(memworkingcopy, ctx, pctx=None): | |
896 | """({path: content}, ctx, ctx) -> bool. test if a commit will be noop |
|
895 | """({path: content}, ctx, ctx) -> bool. test if a commit will be noop | |
897 |
|
896 | |||
898 | if it will become an empty commit (does not change anything, after the |
|
897 | if it will become an empty commit (does not change anything, after the | |
899 | memworkingcopy overrides), return True. otherwise return False. |
|
898 | memworkingcopy overrides), return True. otherwise return False. | |
900 | """ |
|
899 | """ | |
901 | if not pctx: |
|
900 | if not pctx: | |
902 | parents = ctx.parents() |
|
901 | parents = ctx.parents() | |
903 | if len(parents) != 1: |
|
902 | if len(parents) != 1: | |
904 | return False |
|
903 | return False | |
905 | pctx = parents[0] |
|
904 | pctx = parents[0] | |
906 | if ctx.branch() != pctx.branch(): |
|
905 | if ctx.branch() != pctx.branch(): | |
907 | return False |
|
906 | return False | |
908 | if ctx.extra().get(b'close'): |
|
907 | if ctx.extra().get(b'close'): | |
909 | return False |
|
908 | return False | |
910 | # ctx changes more files (not a subset of memworkingcopy) |
|
909 | # ctx changes more files (not a subset of memworkingcopy) | |
911 | if not set(ctx.files()).issubset(set(memworkingcopy)): |
|
910 | if not set(ctx.files()).issubset(set(memworkingcopy)): | |
912 | return False |
|
911 | return False | |
913 | for path, content in pycompat.iteritems(memworkingcopy): |
|
912 | for path, content in pycompat.iteritems(memworkingcopy): | |
914 | if path not in pctx or path not in ctx: |
|
913 | if path not in pctx or path not in ctx: | |
915 | return False |
|
914 | return False | |
916 | fctx = ctx[path] |
|
915 | fctx = ctx[path] | |
917 | pfctx = pctx[path] |
|
916 | pfctx = pctx[path] | |
918 | if pfctx.flags() != fctx.flags(): |
|
917 | if pfctx.flags() != fctx.flags(): | |
919 | return False |
|
918 | return False | |
920 | if pfctx.data() != content: |
|
919 | if pfctx.data() != content: | |
921 | return False |
|
920 | return False | |
922 | return True |
|
921 | return True | |
923 |
|
922 | |||
924 | def _commitsingle(self, memworkingcopy, ctx, p1=None): |
|
923 | def _commitsingle(self, memworkingcopy, ctx, p1=None): | |
925 | """(ctx, {path: content}, node) -> node. make a single commit |
|
924 | """(ctx, {path: content}, node) -> node. make a single commit | |
926 |
|
925 | |||
927 | the commit is a clone from ctx, with a (optionally) different p1, and |
|
926 | the commit is a clone from ctx, with a (optionally) different p1, and | |
928 | different file contents replaced by memworkingcopy. |
|
927 | different file contents replaced by memworkingcopy. | |
929 | """ |
|
928 | """ | |
930 | parents = p1 and (p1, nullid) |
|
929 | parents = p1 and (p1, self.repo.nullid) | |
931 | extra = ctx.extra() |
|
930 | extra = ctx.extra() | |
932 | if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'): |
|
931 | if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'): | |
933 | extra[b'absorb_source'] = ctx.hex() |
|
932 | extra[b'absorb_source'] = ctx.hex() | |
934 |
|
933 | |||
935 | desc = rewriteutil.update_hash_refs( |
|
934 | desc = rewriteutil.update_hash_refs( | |
936 | ctx.repo(), |
|
935 | ctx.repo(), | |
937 | ctx.description(), |
|
936 | ctx.description(), | |
938 | { |
|
937 | { | |
939 | oldnode: [newnode] |
|
938 | oldnode: [newnode] | |
940 | for oldnode, newnode in self.replacemap.items() |
|
939 | for oldnode, newnode in self.replacemap.items() | |
941 | }, |
|
940 | }, | |
942 | ) |
|
941 | ) | |
943 | mctx = overlaycontext( |
|
942 | mctx = overlaycontext( | |
944 | memworkingcopy, ctx, parents, extra=extra, desc=desc |
|
943 | memworkingcopy, ctx, parents, extra=extra, desc=desc | |
945 | ) |
|
944 | ) | |
946 | return mctx.commit() |
|
945 | return mctx.commit() | |
947 |
|
946 | |||
948 | @util.propertycache |
|
947 | @util.propertycache | |
949 | def _useobsolete(self): |
|
948 | def _useobsolete(self): | |
950 | """() -> bool""" |
|
949 | """() -> bool""" | |
951 | return obsolete.isenabled(self.repo, obsolete.createmarkersopt) |
|
950 | return obsolete.isenabled(self.repo, obsolete.createmarkersopt) | |
952 |
|
951 | |||
953 | def _cleanupoldcommits(self): |
|
952 | def _cleanupoldcommits(self): | |
954 | replacements = { |
|
953 | replacements = { | |
955 | k: ([v] if v is not None else []) |
|
954 | k: ([v] if v is not None else []) | |
956 | for k, v in pycompat.iteritems(self.replacemap) |
|
955 | for k, v in pycompat.iteritems(self.replacemap) | |
957 | } |
|
956 | } | |
958 | if replacements: |
|
957 | if replacements: | |
959 | scmutil.cleanupnodes( |
|
958 | scmutil.cleanupnodes( | |
960 | self.repo, replacements, operation=b'absorb', fixphase=True |
|
959 | self.repo, replacements, operation=b'absorb', fixphase=True | |
961 | ) |
|
960 | ) | |
962 |
|
961 | |||
963 | @util.propertycache |
|
962 | @util.propertycache | |
964 | def skip_empty_successor(self): |
|
963 | def skip_empty_successor(self): | |
965 | return rewriteutil.skip_empty_successor(self.ui, b'absorb') |
|
964 | return rewriteutil.skip_empty_successor(self.ui, b'absorb') | |
966 |
|
965 | |||
967 |
|
966 | |||
968 | def _parsechunk(hunk): |
|
967 | def _parsechunk(hunk): | |
969 | """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))""" |
|
968 | """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))""" | |
970 | if type(hunk) not in (crecord.uihunk, patch.recordhunk): |
|
969 | if type(hunk) not in (crecord.uihunk, patch.recordhunk): | |
971 | return None, None |
|
970 | return None, None | |
972 | path = hunk.header.filename() |
|
971 | path = hunk.header.filename() | |
973 | a1 = hunk.fromline + len(hunk.before) - 1 |
|
972 | a1 = hunk.fromline + len(hunk.before) - 1 | |
974 | # remove before and after context |
|
973 | # remove before and after context | |
975 | hunk.before = hunk.after = [] |
|
974 | hunk.before = hunk.after = [] | |
976 | buf = util.stringio() |
|
975 | buf = util.stringio() | |
977 | hunk.write(buf) |
|
976 | hunk.write(buf) | |
978 | patchlines = mdiff.splitnewlines(buf.getvalue()) |
|
977 | patchlines = mdiff.splitnewlines(buf.getvalue()) | |
979 | # hunk.prettystr() will update hunk.removed |
|
978 | # hunk.prettystr() will update hunk.removed | |
980 | a2 = a1 + hunk.removed |
|
979 | a2 = a1 + hunk.removed | |
981 | blines = [l[1:] for l in patchlines[1:] if not l.startswith(b'-')] |
|
980 | blines = [l[1:] for l in patchlines[1:] if not l.startswith(b'-')] | |
982 | return path, (a1, a2, blines) |
|
981 | return path, (a1, a2, blines) | |
983 |
|
982 | |||
984 |
|
983 | |||
985 | def overlaydiffcontext(ctx, chunks): |
|
984 | def overlaydiffcontext(ctx, chunks): | |
986 | """(ctx, [crecord.uihunk]) -> memctx |
|
985 | """(ctx, [crecord.uihunk]) -> memctx | |
987 |
|
986 | |||
988 | return a memctx with some [1] patches (chunks) applied to ctx. |
|
987 | return a memctx with some [1] patches (chunks) applied to ctx. | |
989 | [1]: modifications are handled. renames, mode changes, etc. are ignored. |
|
988 | [1]: modifications are handled. renames, mode changes, etc. are ignored. | |
990 | """ |
|
989 | """ | |
991 | # sadly the applying-patch logic is hardly reusable, and messy: |
|
990 | # sadly the applying-patch logic is hardly reusable, and messy: | |
992 | # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it |
|
991 | # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it | |
993 | # needs a file stream of a patch and will re-parse it, while we have |
|
992 | # needs a file stream of a patch and will re-parse it, while we have | |
994 | # structured hunk objects at hand. |
|
993 | # structured hunk objects at hand. | |
995 | # 2. a lot of different implementations about "chunk" (patch.hunk, |
|
994 | # 2. a lot of different implementations about "chunk" (patch.hunk, | |
996 | # patch.recordhunk, crecord.uihunk) |
|
995 | # patch.recordhunk, crecord.uihunk) | |
997 | # as we only care about applying changes to modified files, no mode |
|
996 | # as we only care about applying changes to modified files, no mode | |
998 | # change, no binary diff, and no renames, it's probably okay to |
|
997 | # change, no binary diff, and no renames, it's probably okay to | |
999 | # re-invent the logic using much simpler code here. |
|
998 | # re-invent the logic using much simpler code here. | |
1000 | memworkingcopy = {} # {path: content} |
|
999 | memworkingcopy = {} # {path: content} | |
1001 | patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]} |
|
1000 | patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]} | |
1002 | for path, info in map(_parsechunk, chunks): |
|
1001 | for path, info in map(_parsechunk, chunks): | |
1003 | if not path or not info: |
|
1002 | if not path or not info: | |
1004 | continue |
|
1003 | continue | |
1005 | patchmap[path].append(info) |
|
1004 | patchmap[path].append(info) | |
1006 | for path, patches in pycompat.iteritems(patchmap): |
|
1005 | for path, patches in pycompat.iteritems(patchmap): | |
1007 | if path not in ctx or not patches: |
|
1006 | if path not in ctx or not patches: | |
1008 | continue |
|
1007 | continue | |
1009 | patches.sort(reverse=True) |
|
1008 | patches.sort(reverse=True) | |
1010 | lines = mdiff.splitnewlines(ctx[path].data()) |
|
1009 | lines = mdiff.splitnewlines(ctx[path].data()) | |
1011 | for a1, a2, blines in patches: |
|
1010 | for a1, a2, blines in patches: | |
1012 | lines[a1:a2] = blines |
|
1011 | lines[a1:a2] = blines | |
1013 | memworkingcopy[path] = b''.join(lines) |
|
1012 | memworkingcopy[path] = b''.join(lines) | |
1014 | return overlaycontext(memworkingcopy, ctx) |
|
1013 | return overlaycontext(memworkingcopy, ctx) | |
1015 |
|
1014 | |||
1016 |
|
1015 | |||
1017 | def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None): |
|
1016 | def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None): | |
1018 | """pick fixup chunks from targetctx, apply them to stack. |
|
1017 | """pick fixup chunks from targetctx, apply them to stack. | |
1019 |
|
1018 | |||
1020 | if targetctx is None, the working copy context will be used. |
|
1019 | if targetctx is None, the working copy context will be used. | |
1021 | if stack is None, the current draft stack will be used. |
|
1020 | if stack is None, the current draft stack will be used. | |
1022 | return fixupstate. |
|
1021 | return fixupstate. | |
1023 | """ |
|
1022 | """ | |
1024 | if stack is None: |
|
1023 | if stack is None: | |
1025 | limit = ui.configint(b'absorb', b'max-stack-size') |
|
1024 | limit = ui.configint(b'absorb', b'max-stack-size') | |
1026 | headctx = repo[b'.'] |
|
1025 | headctx = repo[b'.'] | |
1027 | if len(headctx.parents()) > 1: |
|
1026 | if len(headctx.parents()) > 1: | |
1028 | raise error.InputError(_(b'cannot absorb into a merge')) |
|
1027 | raise error.InputError(_(b'cannot absorb into a merge')) | |
1029 | stack = getdraftstack(headctx, limit) |
|
1028 | stack = getdraftstack(headctx, limit) | |
1030 | if limit and len(stack) >= limit: |
|
1029 | if limit and len(stack) >= limit: | |
1031 | ui.warn( |
|
1030 | ui.warn( | |
1032 | _( |
|
1031 | _( | |
1033 | b'absorb: only the recent %d changesets will ' |
|
1032 | b'absorb: only the recent %d changesets will ' | |
1034 | b'be analysed\n' |
|
1033 | b'be analysed\n' | |
1035 | ) |
|
1034 | ) | |
1036 | % limit |
|
1035 | % limit | |
1037 | ) |
|
1036 | ) | |
1038 | if not stack: |
|
1037 | if not stack: | |
1039 | raise error.InputError(_(b'no mutable changeset to change')) |
|
1038 | raise error.InputError(_(b'no mutable changeset to change')) | |
1040 | if targetctx is None: # default to working copy |
|
1039 | if targetctx is None: # default to working copy | |
1041 | targetctx = repo[None] |
|
1040 | targetctx = repo[None] | |
1042 | if pats is None: |
|
1041 | if pats is None: | |
1043 | pats = () |
|
1042 | pats = () | |
1044 | if opts is None: |
|
1043 | if opts is None: | |
1045 | opts = {} |
|
1044 | opts = {} | |
1046 | state = fixupstate(stack, ui=ui, opts=opts) |
|
1045 | state = fixupstate(stack, ui=ui, opts=opts) | |
1047 | matcher = scmutil.match(targetctx, pats, opts) |
|
1046 | matcher = scmutil.match(targetctx, pats, opts) | |
1048 | if opts.get(b'interactive'): |
|
1047 | if opts.get(b'interactive'): | |
1049 | diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher) |
|
1048 | diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher) | |
1050 | origchunks = patch.parsepatch(diff) |
|
1049 | origchunks = patch.parsepatch(diff) | |
1051 | chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0] |
|
1050 | chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0] | |
1052 | targetctx = overlaydiffcontext(stack[-1], chunks) |
|
1051 | targetctx = overlaydiffcontext(stack[-1], chunks) | |
1053 | fm = None |
|
1052 | fm = None | |
1054 | if opts.get(b'print_changes') or not opts.get(b'apply_changes'): |
|
1053 | if opts.get(b'print_changes') or not opts.get(b'apply_changes'): | |
1055 | fm = ui.formatter(b'absorb', opts) |
|
1054 | fm = ui.formatter(b'absorb', opts) | |
1056 | state.diffwith(targetctx, matcher, fm) |
|
1055 | state.diffwith(targetctx, matcher, fm) | |
1057 | if fm is not None: |
|
1056 | if fm is not None: | |
1058 | fm.startitem() |
|
1057 | fm.startitem() | |
1059 | fm.write( |
|
1058 | fm.write( | |
1060 | b"count", b"\n%d changesets affected\n", len(state.ctxaffected) |
|
1059 | b"count", b"\n%d changesets affected\n", len(state.ctxaffected) | |
1061 | ) |
|
1060 | ) | |
1062 | fm.data(linetype=b'summary') |
|
1061 | fm.data(linetype=b'summary') | |
1063 | for ctx in reversed(stack): |
|
1062 | for ctx in reversed(stack): | |
1064 | if ctx not in state.ctxaffected: |
|
1063 | if ctx not in state.ctxaffected: | |
1065 | continue |
|
1064 | continue | |
1066 | fm.startitem() |
|
1065 | fm.startitem() | |
1067 | fm.context(ctx=ctx) |
|
1066 | fm.context(ctx=ctx) | |
1068 | fm.data(linetype=b'changeset') |
|
1067 | fm.data(linetype=b'changeset') | |
1069 | fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node') |
|
1068 | fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node') | |
1070 | descfirstline = ctx.description().splitlines()[0] |
|
1069 | descfirstline = ctx.description().splitlines()[0] | |
1071 | fm.write( |
|
1070 | fm.write( | |
1072 | b'descfirstline', |
|
1071 | b'descfirstline', | |
1073 | b'%s\n', |
|
1072 | b'%s\n', | |
1074 | descfirstline, |
|
1073 | descfirstline, | |
1075 | label=b'absorb.description', |
|
1074 | label=b'absorb.description', | |
1076 | ) |
|
1075 | ) | |
1077 | fm.end() |
|
1076 | fm.end() | |
1078 | if not opts.get(b'dry_run'): |
|
1077 | if not opts.get(b'dry_run'): | |
1079 | if ( |
|
1078 | if ( | |
1080 | not opts.get(b'apply_changes') |
|
1079 | not opts.get(b'apply_changes') | |
1081 | and state.ctxaffected |
|
1080 | and state.ctxaffected | |
1082 | and ui.promptchoice( |
|
1081 | and ui.promptchoice( | |
1083 | b"apply changes (y/N)? $$ &Yes $$ &No", default=1 |
|
1082 | b"apply changes (y/N)? $$ &Yes $$ &No", default=1 | |
1084 | ) |
|
1083 | ) | |
1085 | ): |
|
1084 | ): | |
1086 | raise error.CanceledError(_(b'absorb cancelled\n')) |
|
1085 | raise error.CanceledError(_(b'absorb cancelled\n')) | |
1087 |
|
1086 | |||
1088 | state.apply() |
|
1087 | state.apply() | |
1089 | if state.commit(): |
|
1088 | if state.commit(): | |
1090 | state.printchunkstats() |
|
1089 | state.printchunkstats() | |
1091 | elif not ui.quiet: |
|
1090 | elif not ui.quiet: | |
1092 | ui.write(_(b'nothing applied\n')) |
|
1091 | ui.write(_(b'nothing applied\n')) | |
1093 | return state |
|
1092 | return state | |
1094 |
|
1093 | |||
1095 |
|
1094 | |||
1096 | @command( |
|
1095 | @command( | |
1097 | b'absorb', |
|
1096 | b'absorb', | |
1098 | [ |
|
1097 | [ | |
1099 | ( |
|
1098 | ( | |
1100 | b'a', |
|
1099 | b'a', | |
1101 | b'apply-changes', |
|
1100 | b'apply-changes', | |
1102 | None, |
|
1101 | None, | |
1103 | _(b'apply changes without prompting for confirmation'), |
|
1102 | _(b'apply changes without prompting for confirmation'), | |
1104 | ), |
|
1103 | ), | |
1105 | ( |
|
1104 | ( | |
1106 | b'p', |
|
1105 | b'p', | |
1107 | b'print-changes', |
|
1106 | b'print-changes', | |
1108 | None, |
|
1107 | None, | |
1109 | _(b'always print which changesets are modified by which changes'), |
|
1108 | _(b'always print which changesets are modified by which changes'), | |
1110 | ), |
|
1109 | ), | |
1111 | ( |
|
1110 | ( | |
1112 | b'i', |
|
1111 | b'i', | |
1113 | b'interactive', |
|
1112 | b'interactive', | |
1114 | None, |
|
1113 | None, | |
1115 | _(b'interactively select which chunks to apply'), |
|
1114 | _(b'interactively select which chunks to apply'), | |
1116 | ), |
|
1115 | ), | |
1117 | ( |
|
1116 | ( | |
1118 | b'e', |
|
1117 | b'e', | |
1119 | b'edit-lines', |
|
1118 | b'edit-lines', | |
1120 | None, |
|
1119 | None, | |
1121 | _( |
|
1120 | _( | |
1122 | b'edit what lines belong to which changesets before commit ' |
|
1121 | b'edit what lines belong to which changesets before commit ' | |
1123 | b'(EXPERIMENTAL)' |
|
1122 | b'(EXPERIMENTAL)' | |
1124 | ), |
|
1123 | ), | |
1125 | ), |
|
1124 | ), | |
1126 | ] |
|
1125 | ] | |
1127 | + commands.dryrunopts |
|
1126 | + commands.dryrunopts | |
1128 | + commands.templateopts |
|
1127 | + commands.templateopts | |
1129 | + commands.walkopts, |
|
1128 | + commands.walkopts, | |
1130 | _(b'hg absorb [OPTION] [FILE]...'), |
|
1129 | _(b'hg absorb [OPTION] [FILE]...'), | |
1131 | helpcategory=command.CATEGORY_COMMITTING, |
|
1130 | helpcategory=command.CATEGORY_COMMITTING, | |
1132 | helpbasic=True, |
|
1131 | helpbasic=True, | |
1133 | ) |
|
1132 | ) | |
1134 | def absorbcmd(ui, repo, *pats, **opts): |
|
1133 | def absorbcmd(ui, repo, *pats, **opts): | |
1135 | """incorporate corrections into the stack of draft changesets |
|
1134 | """incorporate corrections into the stack of draft changesets | |
1136 |
|
1135 | |||
1137 | absorb analyzes each change in your working directory and attempts to |
|
1136 | absorb analyzes each change in your working directory and attempts to | |
1138 | amend the changed lines into the changesets in your stack that first |
|
1137 | amend the changed lines into the changesets in your stack that first | |
1139 | introduced those lines. |
|
1138 | introduced those lines. | |
1140 |
|
1139 | |||
1141 | If absorb cannot find an unambiguous changeset to amend for a change, |
|
1140 | If absorb cannot find an unambiguous changeset to amend for a change, | |
1142 | that change will be left in the working directory, untouched. They can be |
|
1141 | that change will be left in the working directory, untouched. They can be | |
1143 | observed by :hg:`status` or :hg:`diff` afterwards. In other words, |
|
1142 | observed by :hg:`status` or :hg:`diff` afterwards. In other words, | |
1144 | absorb does not write to the working directory. |
|
1143 | absorb does not write to the working directory. | |
1145 |
|
1144 | |||
1146 | Changesets outside the revset `::. and not public() and not merge()` will |
|
1145 | Changesets outside the revset `::. and not public() and not merge()` will | |
1147 | not be changed. |
|
1146 | not be changed. | |
1148 |
|
1147 | |||
1149 | Changesets that become empty after applying the changes will be deleted. |
|
1148 | Changesets that become empty after applying the changes will be deleted. | |
1150 |
|
1149 | |||
1151 | By default, absorb will show what it plans to do and prompt for |
|
1150 | By default, absorb will show what it plans to do and prompt for | |
1152 | confirmation. If you are confident that the changes will be absorbed |
|
1151 | confirmation. If you are confident that the changes will be absorbed | |
1153 | to the correct place, run :hg:`absorb -a` to apply the changes |
|
1152 | to the correct place, run :hg:`absorb -a` to apply the changes | |
1154 | immediately. |
|
1153 | immediately. | |
1155 |
|
1154 | |||
1156 | Returns 0 on success, 1 if all chunks were ignored and nothing amended. |
|
1155 | Returns 0 on success, 1 if all chunks were ignored and nothing amended. | |
1157 | """ |
|
1156 | """ | |
1158 | opts = pycompat.byteskwargs(opts) |
|
1157 | opts = pycompat.byteskwargs(opts) | |
1159 |
|
1158 | |||
1160 | with repo.wlock(), repo.lock(): |
|
1159 | with repo.wlock(), repo.lock(): | |
1161 | if not opts[b'dry_run']: |
|
1160 | if not opts[b'dry_run']: | |
1162 | cmdutil.checkunfinished(repo) |
|
1161 | cmdutil.checkunfinished(repo) | |
1163 |
|
1162 | |||
1164 | state = absorb(ui, repo, pats=pats, opts=opts) |
|
1163 | state = absorb(ui, repo, pats=pats, opts=opts) | |
1165 | if sum(s[0] for s in state.chunkstats.values()) == 0: |
|
1164 | if sum(s[0] for s in state.chunkstats.values()) == 0: | |
1166 | return 1 |
|
1165 | return 1 |
@@ -1,76 +1,74 b'' | |||||
1 | # amend.py - provide the amend command |
|
1 | # amend.py - provide the amend command | |
2 | # |
|
2 | # | |
3 | # Copyright 2017 Facebook, Inc. |
|
3 | # Copyright 2017 Facebook, Inc. | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 | """provide the amend command (EXPERIMENTAL) |
|
7 | """provide the amend command (EXPERIMENTAL) | |
8 |
|
8 | |||
9 | This extension provides an ``amend`` command that is similar to |
|
9 | This extension provides an ``amend`` command that is similar to | |
10 | ``commit --amend`` but does not prompt an editor. |
|
10 | ``commit --amend`` but does not prompt an editor. | |
11 | """ |
|
11 | """ | |
12 |
|
12 | |||
13 | from __future__ import absolute_import |
|
13 | from __future__ import absolute_import | |
14 |
|
14 | |||
15 | from mercurial.i18n import _ |
|
15 | from mercurial.i18n import _ | |
16 | from mercurial import ( |
|
16 | from mercurial import ( | |
17 | cmdutil, |
|
17 | cmdutil, | |
18 | commands, |
|
18 | commands, | |
19 | pycompat, |
|
|||
20 | registrar, |
|
19 | registrar, | |
21 | ) |
|
20 | ) | |
22 |
|
21 | |||
23 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
22 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for | |
24 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
23 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should | |
25 | # be specifying the version(s) of Mercurial they are tested with, or |
|
24 | # be specifying the version(s) of Mercurial they are tested with, or | |
26 | # leave the attribute unspecified. |
|
25 | # leave the attribute unspecified. | |
27 | testedwith = b'ships-with-hg-core' |
|
26 | testedwith = b'ships-with-hg-core' | |
28 |
|
27 | |||
29 | cmdtable = {} |
|
28 | cmdtable = {} | |
30 | command = registrar.command(cmdtable) |
|
29 | command = registrar.command(cmdtable) | |
31 |
|
30 | |||
32 |
|
31 | |||
33 | @command( |
|
32 | @command( | |
34 | b'amend', |
|
33 | b'amend', | |
35 | [ |
|
34 | [ | |
36 | ( |
|
35 | ( | |
37 | b'A', |
|
36 | b'A', | |
38 | b'addremove', |
|
37 | b'addremove', | |
39 | None, |
|
38 | None, | |
40 | _(b'mark new/missing files as added/removed before committing'), |
|
39 | _(b'mark new/missing files as added/removed before committing'), | |
41 | ), |
|
40 | ), | |
42 | (b'e', b'edit', None, _(b'invoke editor on commit messages')), |
|
41 | (b'e', b'edit', None, _(b'invoke editor on commit messages')), | |
43 | (b'i', b'interactive', None, _(b'use interactive mode')), |
|
42 | (b'i', b'interactive', None, _(b'use interactive mode')), | |
44 | ( |
|
43 | ( | |
45 | b'', |
|
44 | b'', | |
46 | b'close-branch', |
|
45 | b'close-branch', | |
47 | None, |
|
46 | None, | |
48 | _(b'mark a branch as closed, hiding it from the branch list'), |
|
47 | _(b'mark a branch as closed, hiding it from the branch list'), | |
49 | ), |
|
48 | ), | |
50 | (b's', b'secret', None, _(b'use the secret phase for committing')), |
|
49 | (b's', b'secret', None, _(b'use the secret phase for committing')), | |
51 | (b'n', b'note', b'', _(b'store a note on the amend')), |
|
50 | (b'n', b'note', b'', _(b'store a note on the amend')), | |
52 | ] |
|
51 | ] | |
53 | + cmdutil.walkopts |
|
52 | + cmdutil.walkopts | |
54 | + cmdutil.commitopts |
|
53 | + cmdutil.commitopts | |
55 | + cmdutil.commitopts2 |
|
54 | + cmdutil.commitopts2 | |
56 | + cmdutil.commitopts3, |
|
55 | + cmdutil.commitopts3, | |
57 | _(b'[OPTION]... [FILE]...'), |
|
56 | _(b'[OPTION]... [FILE]...'), | |
58 | helpcategory=command.CATEGORY_COMMITTING, |
|
57 | helpcategory=command.CATEGORY_COMMITTING, | |
59 | inferrepo=True, |
|
58 | inferrepo=True, | |
60 | ) |
|
59 | ) | |
61 | def amend(ui, repo, *pats, **opts): |
|
60 | def amend(ui, repo, *pats, **opts): | |
62 | """amend the working copy parent with all or specified outstanding changes |
|
61 | """amend the working copy parent with all or specified outstanding changes | |
63 |
|
62 | |||
64 | Similar to :hg:`commit --amend`, but reuse the commit message without |
|
63 | Similar to :hg:`commit --amend`, but reuse the commit message without | |
65 | invoking editor, unless ``--edit`` was set. |
|
64 | invoking editor, unless ``--edit`` was set. | |
66 |
|
65 | |||
67 | See :hg:`help commit` for more details. |
|
66 | See :hg:`help commit` for more details. | |
68 | """ |
|
67 | """ | |
69 | opts = pycompat.byteskwargs(opts) |
|
68 | cmdutil.check_note_size(opts) | |
70 | cmdutil.checknotesize(ui, opts) |
|
|||
71 |
|
69 | |||
72 | with repo.wlock(), repo.lock(): |
|
70 | with repo.wlock(), repo.lock(): | |
73 |
if not opts.get( |
|
71 | if not opts.get('logfile'): | |
74 |
opts[ |
|
72 | opts['message'] = opts.get('message') or repo[b'.'].description() | |
75 |
opts[ |
|
73 | opts['amend'] = True | |
76 |
return commands._docommit(ui, repo, *pats, ** |
|
74 | return commands._docommit(ui, repo, *pats, **opts) |
@@ -1,335 +1,339 b'' | |||||
1 | # bzr.py - bzr support for the convert extension |
|
1 | # bzr.py - bzr support for the convert extension | |
2 | # |
|
2 | # | |
3 | # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others |
|
3 | # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | # This module is for handling 'bzr', that was formerly known as Bazaar-NG; |
|
8 | # This module is for handling Breezy imports or `brz`, but it's also compatible | |
9 | # it cannot access 'bar' repositories, but they were never used very much |
|
9 | # with Bazaar or `bzr`, that was formerly known as Bazaar-NG; | |
|
10 | # it cannot access `bar` repositories, but they were never used very much. | |||
10 | from __future__ import absolute_import |
|
11 | from __future__ import absolute_import | |
11 |
|
12 | |||
12 | import os |
|
13 | import os | |
13 |
|
14 | |||
14 | from mercurial.i18n import _ |
|
15 | from mercurial.i18n import _ | |
15 | from mercurial import ( |
|
16 | from mercurial import ( | |
16 | demandimport, |
|
17 | demandimport, | |
17 | error, |
|
18 | error, | |
18 | pycompat, |
|
19 | pycompat, | |
|
20 | util, | |||
19 | ) |
|
21 | ) | |
20 | from . import common |
|
22 | from . import common | |
21 |
|
23 | |||
|
24 | ||||
22 | # these do not work with demandimport, blacklist |
|
25 | # these do not work with demandimport, blacklist | |
23 | demandimport.IGNORES.update( |
|
26 | demandimport.IGNORES.update( | |
24 | [ |
|
27 | [ | |
25 |
b'b |
|
28 | b'breezy.transactions', | |
26 |
b'b |
|
29 | b'breezy.urlutils', | |
27 | b'ElementPath', |
|
30 | b'ElementPath', | |
28 | ] |
|
31 | ] | |
29 | ) |
|
32 | ) | |
30 |
|
33 | |||
31 | try: |
|
34 | try: | |
32 | # bazaar imports |
|
35 | # bazaar imports | |
33 |
import b |
|
36 | import breezy.bzr.bzrdir | |
34 |
import b |
|
37 | import breezy.errors | |
35 |
import b |
|
38 | import breezy.revision | |
36 |
import b |
|
39 | import breezy.revisionspec | |
37 |
|
40 | |||
38 |
bzrdir = b |
|
41 | bzrdir = breezy.bzr.bzrdir | |
39 |
errors = b |
|
42 | errors = breezy.errors | |
40 |
revision = b |
|
43 | revision = breezy.revision | |
41 |
revisionspec = b |
|
44 | revisionspec = breezy.revisionspec | |
42 | revisionspec.RevisionSpec |
|
45 | revisionspec.RevisionSpec | |
43 | except ImportError: |
|
46 | except ImportError: | |
44 | pass |
|
47 | pass | |
45 |
|
48 | |||
46 |
supportedkinds = ( |
|
49 | supportedkinds = ('file', 'symlink') | |
47 |
|
50 | |||
48 |
|
51 | |||
49 | class bzr_source(common.converter_source): |
|
52 | class bzr_source(common.converter_source): | |
50 | """Reads Bazaar repositories by using the Bazaar Python libraries""" |
|
53 | """Reads Bazaar repositories by using the Bazaar Python libraries""" | |
51 |
|
54 | |||
52 | def __init__(self, ui, repotype, path, revs=None): |
|
55 | def __init__(self, ui, repotype, path, revs=None): | |
53 | super(bzr_source, self).__init__(ui, repotype, path, revs=revs) |
|
56 | super(bzr_source, self).__init__(ui, repotype, path, revs=revs) | |
54 |
|
57 | |||
55 | if not os.path.exists(os.path.join(path, b'.bzr')): |
|
58 | if not os.path.exists(os.path.join(path, b'.bzr')): | |
56 | raise common.NoRepo( |
|
59 | raise common.NoRepo( | |
57 | _(b'%s does not look like a Bazaar repository') % path |
|
60 | _(b'%s does not look like a Bazaar repository') % path | |
58 | ) |
|
61 | ) | |
59 |
|
62 | |||
60 | try: |
|
63 | try: | |
61 |
# access b |
|
64 | # access breezy stuff | |
62 | bzrdir |
|
65 | bzrdir | |
63 | except NameError: |
|
66 | except NameError: | |
64 | raise common.NoRepo(_(b'Bazaar modules could not be loaded')) |
|
67 | raise common.NoRepo(_(b'Bazaar modules could not be loaded')) | |
65 |
|
68 | |||
66 |
path = |
|
69 | path = util.abspath(path) | |
67 | self._checkrepotype(path) |
|
70 | self._checkrepotype(path) | |
68 | try: |
|
71 | try: | |
69 |
|
|
72 | bzr_dir = bzrdir.BzrDir.open(path.decode()) | |
|
73 | self.sourcerepo = bzr_dir.open_repository() | |||
70 | except errors.NoRepositoryPresent: |
|
74 | except errors.NoRepositoryPresent: | |
71 | raise common.NoRepo( |
|
75 | raise common.NoRepo( | |
72 | _(b'%s does not look like a Bazaar repository') % path |
|
76 | _(b'%s does not look like a Bazaar repository') % path | |
73 | ) |
|
77 | ) | |
74 | self._parentids = {} |
|
78 | self._parentids = {} | |
75 | self._saverev = ui.configbool(b'convert', b'bzr.saverev') |
|
79 | self._saverev = ui.configbool(b'convert', b'bzr.saverev') | |
76 |
|
80 | |||
77 | def _checkrepotype(self, path): |
|
81 | def _checkrepotype(self, path): | |
78 | # Lightweight checkouts detection is informational but probably |
|
82 | # Lightweight checkouts detection is informational but probably | |
79 | # fragile at API level. It should not terminate the conversion. |
|
83 | # fragile at API level. It should not terminate the conversion. | |
80 | try: |
|
84 | try: | |
81 | dir = bzrdir.BzrDir.open_containing(path)[0] |
|
85 | dir = bzrdir.BzrDir.open_containing(path.decode())[0] | |
82 | try: |
|
86 | try: | |
83 | tree = dir.open_workingtree(recommend_upgrade=False) |
|
87 | tree = dir.open_workingtree(recommend_upgrade=False) | |
84 | branch = tree.branch |
|
88 | branch = tree.branch | |
85 | except (errors.NoWorkingTree, errors.NotLocalUrl): |
|
89 | except (errors.NoWorkingTree, errors.NotLocalUrl): | |
86 | tree = None |
|
90 | tree = None | |
87 | branch = dir.open_branch() |
|
91 | branch = dir.open_branch() | |
88 | if ( |
|
92 | if ( | |
89 | tree is not None |
|
93 | tree is not None | |
90 |
and tree. |
|
94 | and tree.controldir.root_transport.base | |
91 |
!= branch. |
|
95 | != branch.controldir.root_transport.base | |
92 | ): |
|
96 | ): | |
93 | self.ui.warn( |
|
97 | self.ui.warn( | |
94 | _( |
|
98 | _( | |
95 | b'warning: lightweight checkouts may cause ' |
|
99 | b'warning: lightweight checkouts may cause ' | |
96 | b'conversion failures, try with a regular ' |
|
100 | b'conversion failures, try with a regular ' | |
97 | b'branch instead.\n' |
|
101 | b'branch instead.\n' | |
98 | ) |
|
102 | ) | |
99 | ) |
|
103 | ) | |
100 | except Exception: |
|
104 | except Exception: | |
101 | self.ui.note(_(b'bzr source type could not be determined\n')) |
|
105 | self.ui.note(_(b'bzr source type could not be determined\n')) | |
102 |
|
106 | |||
103 | def before(self): |
|
107 | def before(self): | |
104 | """Before the conversion begins, acquire a read lock |
|
108 | """Before the conversion begins, acquire a read lock | |
105 | for all the operations that might need it. Fortunately |
|
109 | for all the operations that might need it. Fortunately | |
106 | read locks don't block other reads or writes to the |
|
110 | read locks don't block other reads or writes to the | |
107 | repository, so this shouldn't have any impact on the usage of |
|
111 | repository, so this shouldn't have any impact on the usage of | |
108 | the source repository. |
|
112 | the source repository. | |
109 |
|
113 | |||
110 | The alternative would be locking on every operation that |
|
114 | The alternative would be locking on every operation that | |
111 | needs locks (there are currently two: getting the file and |
|
115 | needs locks (there are currently two: getting the file and | |
112 | getting the parent map) and releasing immediately after, |
|
116 | getting the parent map) and releasing immediately after, | |
113 | but this approach can take even 40% longer.""" |
|
117 | but this approach can take even 40% longer.""" | |
114 | self.sourcerepo.lock_read() |
|
118 | self.sourcerepo.lock_read() | |
115 |
|
119 | |||
116 | def after(self): |
|
120 | def after(self): | |
117 | self.sourcerepo.unlock() |
|
121 | self.sourcerepo.unlock() | |
118 |
|
122 | |||
119 | def _bzrbranches(self): |
|
123 | def _bzrbranches(self): | |
120 | return self.sourcerepo.find_branches(using=True) |
|
124 | return self.sourcerepo.find_branches(using=True) | |
121 |
|
125 | |||
122 | def getheads(self): |
|
126 | def getheads(self): | |
123 | if not self.revs: |
|
127 | if not self.revs: | |
124 | # Set using=True to avoid nested repositories (see issue3254) |
|
128 | # Set using=True to avoid nested repositories (see issue3254) | |
125 | heads = sorted([b.last_revision() for b in self._bzrbranches()]) |
|
129 | heads = sorted([b.last_revision() for b in self._bzrbranches()]) | |
126 | else: |
|
130 | else: | |
127 | revid = None |
|
131 | revid = None | |
128 | for branch in self._bzrbranches(): |
|
132 | for branch in self._bzrbranches(): | |
129 | try: |
|
133 | try: | |
130 |
r = |
|
134 | revspec = self.revs[0].decode() | |
|
135 | r = revisionspec.RevisionSpec.from_string(revspec) | |||
131 | info = r.in_history(branch) |
|
136 | info = r.in_history(branch) | |
132 | except errors.BzrError: |
|
137 | except errors.BzrError: | |
133 | pass |
|
138 | pass | |
134 | revid = info.rev_id |
|
139 | revid = info.rev_id | |
135 | if revid is None: |
|
140 | if revid is None: | |
136 | raise error.Abort( |
|
141 | raise error.Abort( | |
137 | _(b'%s is not a valid revision') % self.revs[0] |
|
142 | _(b'%s is not a valid revision') % self.revs[0] | |
138 | ) |
|
143 | ) | |
139 | heads = [revid] |
|
144 | heads = [revid] | |
140 | # Empty repositories return 'null:', which cannot be retrieved |
|
145 | # Empty repositories return 'null:', which cannot be retrieved | |
141 | heads = [h for h in heads if h != b'null:'] |
|
146 | heads = [h for h in heads if h != b'null:'] | |
142 | return heads |
|
147 | return heads | |
143 |
|
148 | |||
144 | def getfile(self, name, rev): |
|
149 | def getfile(self, name, rev): | |
|
150 | name = name.decode() | |||
145 | revtree = self.sourcerepo.revision_tree(rev) |
|
151 | revtree = self.sourcerepo.revision_tree(rev) | |
146 | fileid = revtree.path2id(name.decode(self.encoding or b'utf-8')) |
|
152 | ||
147 | kind = None |
|
153 | try: | |
148 | if fileid is not None: |
|
154 | kind = revtree.kind(name) | |
149 | kind = revtree.kind(fileid) |
|
155 | except breezy.errors.NoSuchFile: | |
|
156 | return None, None | |||
150 | if kind not in supportedkinds: |
|
157 | if kind not in supportedkinds: | |
151 | # the file is not available anymore - was deleted |
|
158 | # the file is not available anymore - was deleted | |
152 | return None, None |
|
159 | return None, None | |
153 | mode = self._modecache[(name, rev)] |
|
160 | mode = self._modecache[(name.encode(), rev)] | |
154 |
if kind == |
|
161 | if kind == 'symlink': | |
155 |
target = revtree.get_symlink_target( |
|
162 | target = revtree.get_symlink_target(name) | |
156 | if target is None: |
|
163 | if target is None: | |
157 | raise error.Abort( |
|
164 | raise error.Abort( | |
158 | _(b'%s.%s symlink has no target') % (name, rev) |
|
165 | _(b'%s.%s symlink has no target') % (name, rev) | |
159 | ) |
|
166 | ) | |
160 | return target, mode |
|
167 | return target.encode(), mode | |
161 | else: |
|
168 | else: | |
162 |
sio = revtree.get_file( |
|
169 | sio = revtree.get_file(name) | |
163 | return sio.read(), mode |
|
170 | return sio.read(), mode | |
164 |
|
171 | |||
165 | def getchanges(self, version, full): |
|
172 | def getchanges(self, version, full): | |
166 | if full: |
|
173 | if full: | |
167 | raise error.Abort(_(b"convert from cvs does not support --full")) |
|
174 | raise error.Abort(_(b"convert from cvs does not support --full")) | |
168 | self._modecache = {} |
|
175 | self._modecache = {} | |
169 | self._revtree = self.sourcerepo.revision_tree(version) |
|
176 | self._revtree = self.sourcerepo.revision_tree(version) | |
170 | # get the parentids from the cache |
|
177 | # get the parentids from the cache | |
171 | parentids = self._parentids.pop(version) |
|
178 | parentids = self._parentids.pop(version) | |
172 | # only diff against first parent id |
|
179 | # only diff against first parent id | |
173 | prevtree = self.sourcerepo.revision_tree(parentids[0]) |
|
180 | prevtree = self.sourcerepo.revision_tree(parentids[0]) | |
174 | files, changes = self._gettreechanges(self._revtree, prevtree) |
|
181 | files, changes = self._gettreechanges(self._revtree, prevtree) | |
175 | return files, changes, set() |
|
182 | return files, changes, set() | |
176 |
|
183 | |||
177 | def getcommit(self, version): |
|
184 | def getcommit(self, version): | |
178 | rev = self.sourcerepo.get_revision(version) |
|
185 | rev = self.sourcerepo.get_revision(version) | |
179 | # populate parent id cache |
|
186 | # populate parent id cache | |
180 | if not rev.parent_ids: |
|
187 | if not rev.parent_ids: | |
181 | parents = [] |
|
188 | parents = [] | |
182 | self._parentids[version] = (revision.NULL_REVISION,) |
|
189 | self._parentids[version] = (revision.NULL_REVISION,) | |
183 | else: |
|
190 | else: | |
184 | parents = self._filterghosts(rev.parent_ids) |
|
191 | parents = self._filterghosts(rev.parent_ids) | |
185 | self._parentids[version] = parents |
|
192 | self._parentids[version] = parents | |
186 |
|
193 | |||
187 |
branch = |
|
194 | branch = rev.properties.get('branch-nick', 'default') | |
188 |
if branch == |
|
195 | if branch == 'trunk': | |
189 |
branch = |
|
196 | branch = 'default' | |
190 | return common.commit( |
|
197 | return common.commit( | |
191 | parents=parents, |
|
198 | parents=parents, | |
192 | date=b'%d %d' % (rev.timestamp, -rev.timezone), |
|
199 | date=b'%d %d' % (rev.timestamp, -rev.timezone), | |
193 | author=self.recode(rev.committer), |
|
200 | author=self.recode(rev.committer), | |
194 | desc=self.recode(rev.message), |
|
201 | desc=self.recode(rev.message), | |
195 | branch=branch, |
|
202 | branch=branch.encode('utf8'), | |
196 | rev=version, |
|
203 | rev=version, | |
197 | saverev=self._saverev, |
|
204 | saverev=self._saverev, | |
198 | ) |
|
205 | ) | |
199 |
|
206 | |||
200 | def gettags(self): |
|
207 | def gettags(self): | |
201 | bytetags = {} |
|
208 | bytetags = {} | |
202 | for branch in self._bzrbranches(): |
|
209 | for branch in self._bzrbranches(): | |
203 | if not branch.supports_tags(): |
|
210 | if not branch.supports_tags(): | |
204 | return {} |
|
211 | return {} | |
205 | tagdict = branch.tags.get_tag_dict() |
|
212 | tagdict = branch.tags.get_tag_dict() | |
206 | for name, rev in pycompat.iteritems(tagdict): |
|
213 | for name, rev in pycompat.iteritems(tagdict): | |
207 | bytetags[self.recode(name)] = rev |
|
214 | bytetags[self.recode(name)] = rev | |
208 | return bytetags |
|
215 | return bytetags | |
209 |
|
216 | |||
210 | def getchangedfiles(self, rev, i): |
|
217 | def getchangedfiles(self, rev, i): | |
211 | self._modecache = {} |
|
218 | self._modecache = {} | |
212 | curtree = self.sourcerepo.revision_tree(rev) |
|
219 | curtree = self.sourcerepo.revision_tree(rev) | |
213 | if i is not None: |
|
220 | if i is not None: | |
214 | parentid = self._parentids[rev][i] |
|
221 | parentid = self._parentids[rev][i] | |
215 | else: |
|
222 | else: | |
216 | # no parent id, get the empty revision |
|
223 | # no parent id, get the empty revision | |
217 | parentid = revision.NULL_REVISION |
|
224 | parentid = revision.NULL_REVISION | |
218 |
|
225 | |||
219 | prevtree = self.sourcerepo.revision_tree(parentid) |
|
226 | prevtree = self.sourcerepo.revision_tree(parentid) | |
220 | changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]] |
|
227 | changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]] | |
221 | return changes |
|
228 | return changes | |
222 |
|
229 | |||
223 | def _gettreechanges(self, current, origin): |
|
230 | def _gettreechanges(self, current, origin): | |
224 | revid = current._revision_id |
|
231 | revid = current._revision_id | |
225 | changes = [] |
|
232 | changes = [] | |
226 | renames = {} |
|
233 | renames = {} | |
227 | seen = set() |
|
234 | seen = set() | |
228 |
|
235 | |||
229 | # Fall back to the deprecated attribute for legacy installations. |
|
236 | # Fall back to the deprecated attribute for legacy installations. | |
230 | try: |
|
237 | try: | |
231 | inventory = origin.root_inventory |
|
238 | inventory = origin.root_inventory | |
232 | except AttributeError: |
|
239 | except AttributeError: | |
233 | inventory = origin.inventory |
|
240 | inventory = origin.inventory | |
234 |
|
241 | |||
235 | # Process the entries by reverse lexicographic name order to |
|
242 | # Process the entries by reverse lexicographic name order to | |
236 | # handle nested renames correctly, most specific first. |
|
243 | # handle nested renames correctly, most specific first. | |
|
244 | ||||
|
245 | def key(c): | |||
|
246 | return c.path[0] or c.path[1] or "" | |||
|
247 | ||||
237 | curchanges = sorted( |
|
248 | curchanges = sorted( | |
238 | current.iter_changes(origin), |
|
249 | current.iter_changes(origin), | |
239 | key=lambda c: c[1][0] or c[1][1], |
|
250 | key=key, | |
240 | reverse=True, |
|
251 | reverse=True, | |
241 | ) |
|
252 | ) | |
242 | for ( |
|
253 | for change in curchanges: | |
243 | fileid, |
|
254 | paths = change.path | |
244 | paths, |
|
255 | kind = change.kind | |
245 |
change |
|
256 | executable = change.executable | |
246 | versioned, |
|
|||
247 | parent, |
|
|||
248 | name, |
|
|||
249 | kind, |
|
|||
250 | executable, |
|
|||
251 | ) in curchanges: |
|
|||
252 |
|
||||
253 | if paths[0] == u'' or paths[1] == u'': |
|
257 | if paths[0] == u'' or paths[1] == u'': | |
254 | # ignore changes to tree root |
|
258 | # ignore changes to tree root | |
255 | continue |
|
259 | continue | |
256 |
|
260 | |||
257 | # bazaar tracks directories, mercurial does not, so |
|
261 | # bazaar tracks directories, mercurial does not, so | |
258 | # we have to rename the directory contents |
|
262 | # we have to rename the directory contents | |
259 |
if kind[1] == |
|
263 | if kind[1] == 'directory': | |
260 |
if kind[0] not in (None, |
|
264 | if kind[0] not in (None, 'directory'): | |
261 | # Replacing 'something' with a directory, record it |
|
265 | # Replacing 'something' with a directory, record it | |
262 | # so it can be removed. |
|
266 | # so it can be removed. | |
263 | changes.append((self.recode(paths[0]), revid)) |
|
267 | changes.append((self.recode(paths[0]), revid)) | |
264 |
|
268 | |||
265 |
if kind[0] == |
|
269 | if kind[0] == 'directory' and None not in paths: | |
266 | renaming = paths[0] != paths[1] |
|
270 | renaming = paths[0] != paths[1] | |
267 | # neither an add nor an delete - a move |
|
271 | # neither an add nor an delete - a move | |
268 | # rename all directory contents manually |
|
272 | # rename all directory contents manually | |
269 | subdir = inventory.path2id(paths[0]) |
|
273 | subdir = inventory.path2id(paths[0]) | |
270 | # get all child-entries of the directory |
|
274 | # get all child-entries of the directory | |
271 | for name, entry in inventory.iter_entries(subdir): |
|
275 | for name, entry in inventory.iter_entries(subdir): | |
272 | # hg does not track directory renames |
|
276 | # hg does not track directory renames | |
273 |
if entry.kind == |
|
277 | if entry.kind == 'directory': | |
274 | continue |
|
278 | continue | |
275 |
frompath = self.recode(paths[0] + |
|
279 | frompath = self.recode(paths[0] + '/' + name) | |
276 | if frompath in seen: |
|
280 | if frompath in seen: | |
277 | # Already handled by a more specific change entry |
|
281 | # Already handled by a more specific change entry | |
278 | # This is important when you have: |
|
282 | # This is important when you have: | |
279 | # a => b |
|
283 | # a => b | |
280 | # a/c => a/c |
|
284 | # a/c => a/c | |
281 | # Here a/c must not be renamed into b/c |
|
285 | # Here a/c must not be renamed into b/c | |
282 | continue |
|
286 | continue | |
283 | seen.add(frompath) |
|
287 | seen.add(frompath) | |
284 | if not renaming: |
|
288 | if not renaming: | |
285 | continue |
|
289 | continue | |
286 |
topath = self.recode(paths[1] + |
|
290 | topath = self.recode(paths[1] + '/' + name) | |
287 | # register the files as changed |
|
291 | # register the files as changed | |
288 | changes.append((frompath, revid)) |
|
292 | changes.append((frompath, revid)) | |
289 | changes.append((topath, revid)) |
|
293 | changes.append((topath, revid)) | |
290 | # add to mode cache |
|
294 | # add to mode cache | |
291 | mode = ( |
|
295 | mode = ( | |
292 | (entry.executable and b'x') |
|
296 | (entry.executable and b'x') | |
293 |
or (entry.kind == |
|
297 | or (entry.kind == 'symlink' and b's') | |
294 | or b'' |
|
298 | or b'' | |
295 | ) |
|
299 | ) | |
296 | self._modecache[(topath, revid)] = mode |
|
300 | self._modecache[(topath, revid)] = mode | |
297 | # register the change as move |
|
301 | # register the change as move | |
298 | renames[topath] = frompath |
|
302 | renames[topath] = frompath | |
299 |
|
303 | |||
300 | # no further changes, go to the next change |
|
304 | # no further changes, go to the next change | |
301 | continue |
|
305 | continue | |
302 |
|
306 | |||
303 | # we got unicode paths, need to convert them |
|
307 | # we got unicode paths, need to convert them | |
304 | path, topath = paths |
|
308 | path, topath = paths | |
305 | if path is not None: |
|
309 | if path is not None: | |
306 | path = self.recode(path) |
|
310 | path = self.recode(path) | |
307 | if topath is not None: |
|
311 | if topath is not None: | |
308 | topath = self.recode(topath) |
|
312 | topath = self.recode(topath) | |
309 | seen.add(path or topath) |
|
313 | seen.add(path or topath) | |
310 |
|
314 | |||
311 | if topath is None: |
|
315 | if topath is None: | |
312 | # file deleted |
|
316 | # file deleted | |
313 | changes.append((path, revid)) |
|
317 | changes.append((path, revid)) | |
314 | continue |
|
318 | continue | |
315 |
|
319 | |||
316 | # renamed |
|
320 | # renamed | |
317 | if path and path != topath: |
|
321 | if path and path != topath: | |
318 | renames[topath] = path |
|
322 | renames[topath] = path | |
319 | changes.append((path, revid)) |
|
323 | changes.append((path, revid)) | |
320 |
|
324 | |||
321 | # populate the mode cache |
|
325 | # populate the mode cache | |
322 | kind, executable = [e[1] for e in (kind, executable)] |
|
326 | kind, executable = [e[1] for e in (kind, executable)] | |
323 |
mode = (executable and b'x') or (kind == |
|
327 | mode = (executable and b'x') or (kind == 'symlink' and b'l') or b'' | |
324 | self._modecache[(topath, revid)] = mode |
|
328 | self._modecache[(topath, revid)] = mode | |
325 | changes.append((topath, revid)) |
|
329 | changes.append((topath, revid)) | |
326 |
|
330 | |||
327 | return changes, renames |
|
331 | return changes, renames | |
328 |
|
332 | |||
329 | def _filterghosts(self, ids): |
|
333 | def _filterghosts(self, ids): | |
330 | """Filters out ghost revisions which hg does not support, see |
|
334 | """Filters out ghost revisions which hg does not support, see | |
331 | <http://bazaar-vcs.org/GhostRevision> |
|
335 | <http://bazaar-vcs.org/GhostRevision> | |
332 | """ |
|
336 | """ | |
333 | parentmap = self.sourcerepo.get_parent_map(ids) |
|
337 | parentmap = self.sourcerepo.get_parent_map(ids) | |
334 | parents = tuple([parent for parent in ids if parent in parentmap]) |
|
338 | parents = tuple([parent for parent in ids if parent in parentmap]) | |
335 | return parents |
|
339 | return parents |
@@ -1,531 +1,532 b'' | |||||
1 | # git.py - git support for the convert extension |
|
1 | # git.py - git support for the convert extension | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others |
|
3 | # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 | from __future__ import absolute_import |
|
7 | from __future__ import absolute_import | |
8 |
|
8 | |||
9 | import os |
|
9 | import os | |
10 |
|
10 | |||
11 | from mercurial.i18n import _ |
|
11 | from mercurial.i18n import _ | |
12 |
from mercurial.node import |
|
12 | from mercurial.node import sha1nodeconstants | |
13 | from mercurial import ( |
|
13 | from mercurial import ( | |
14 | config, |
|
14 | config, | |
15 | error, |
|
15 | error, | |
16 | pycompat, |
|
16 | pycompat, | |
|
17 | util, | |||
17 | ) |
|
18 | ) | |
18 |
|
19 | |||
19 | from . import common |
|
20 | from . import common | |
20 |
|
21 | |||
21 |
|
22 | |||
22 | class submodule(object): |
|
23 | class submodule(object): | |
23 | def __init__(self, path, node, url): |
|
24 | def __init__(self, path, node, url): | |
24 | self.path = path |
|
25 | self.path = path | |
25 | self.node = node |
|
26 | self.node = node | |
26 | self.url = url |
|
27 | self.url = url | |
27 |
|
28 | |||
28 | def hgsub(self): |
|
29 | def hgsub(self): | |
29 | return b"%s = [git]%s" % (self.path, self.url) |
|
30 | return b"%s = [git]%s" % (self.path, self.url) | |
30 |
|
31 | |||
31 | def hgsubstate(self): |
|
32 | def hgsubstate(self): | |
32 | return b"%s %s" % (self.node, self.path) |
|
33 | return b"%s %s" % (self.node, self.path) | |
33 |
|
34 | |||
34 |
|
35 | |||
35 | # Keys in extra fields that should not be copied if the user requests. |
|
36 | # Keys in extra fields that should not be copied if the user requests. | |
36 | bannedextrakeys = { |
|
37 | bannedextrakeys = { | |
37 | # Git commit object built-ins. |
|
38 | # Git commit object built-ins. | |
38 | b'tree', |
|
39 | b'tree', | |
39 | b'parent', |
|
40 | b'parent', | |
40 | b'author', |
|
41 | b'author', | |
41 | b'committer', |
|
42 | b'committer', | |
42 | # Mercurial built-ins. |
|
43 | # Mercurial built-ins. | |
43 | b'branch', |
|
44 | b'branch', | |
44 | b'close', |
|
45 | b'close', | |
45 | } |
|
46 | } | |
46 |
|
47 | |||
47 |
|
48 | |||
48 | class convert_git(common.converter_source, common.commandline): |
|
49 | class convert_git(common.converter_source, common.commandline): | |
49 | # Windows does not support GIT_DIR= construct while other systems |
|
50 | # Windows does not support GIT_DIR= construct while other systems | |
50 | # cannot remove environment variable. Just assume none have |
|
51 | # cannot remove environment variable. Just assume none have | |
51 | # both issues. |
|
52 | # both issues. | |
52 |
|
53 | |||
53 | def _gitcmd(self, cmd, *args, **kwargs): |
|
54 | def _gitcmd(self, cmd, *args, **kwargs): | |
54 | return cmd(b'--git-dir=%s' % self.path, *args, **kwargs) |
|
55 | return cmd(b'--git-dir=%s' % self.path, *args, **kwargs) | |
55 |
|
56 | |||
56 | def gitrun0(self, *args, **kwargs): |
|
57 | def gitrun0(self, *args, **kwargs): | |
57 | return self._gitcmd(self.run0, *args, **kwargs) |
|
58 | return self._gitcmd(self.run0, *args, **kwargs) | |
58 |
|
59 | |||
59 | def gitrun(self, *args, **kwargs): |
|
60 | def gitrun(self, *args, **kwargs): | |
60 | return self._gitcmd(self.run, *args, **kwargs) |
|
61 | return self._gitcmd(self.run, *args, **kwargs) | |
61 |
|
62 | |||
62 | def gitrunlines0(self, *args, **kwargs): |
|
63 | def gitrunlines0(self, *args, **kwargs): | |
63 | return self._gitcmd(self.runlines0, *args, **kwargs) |
|
64 | return self._gitcmd(self.runlines0, *args, **kwargs) | |
64 |
|
65 | |||
65 | def gitrunlines(self, *args, **kwargs): |
|
66 | def gitrunlines(self, *args, **kwargs): | |
66 | return self._gitcmd(self.runlines, *args, **kwargs) |
|
67 | return self._gitcmd(self.runlines, *args, **kwargs) | |
67 |
|
68 | |||
68 | def gitpipe(self, *args, **kwargs): |
|
69 | def gitpipe(self, *args, **kwargs): | |
69 | return self._gitcmd(self._run3, *args, **kwargs) |
|
70 | return self._gitcmd(self._run3, *args, **kwargs) | |
70 |
|
71 | |||
71 | def __init__(self, ui, repotype, path, revs=None): |
|
72 | def __init__(self, ui, repotype, path, revs=None): | |
72 | super(convert_git, self).__init__(ui, repotype, path, revs=revs) |
|
73 | super(convert_git, self).__init__(ui, repotype, path, revs=revs) | |
73 | common.commandline.__init__(self, ui, b'git') |
|
74 | common.commandline.__init__(self, ui, b'git') | |
74 |
|
75 | |||
75 | # Pass an absolute path to git to prevent from ever being interpreted |
|
76 | # Pass an absolute path to git to prevent from ever being interpreted | |
76 | # as a URL |
|
77 | # as a URL | |
77 |
path = |
|
78 | path = util.abspath(path) | |
78 |
|
79 | |||
79 | if os.path.isdir(path + b"/.git"): |
|
80 | if os.path.isdir(path + b"/.git"): | |
80 | path += b"/.git" |
|
81 | path += b"/.git" | |
81 | if not os.path.exists(path + b"/objects"): |
|
82 | if not os.path.exists(path + b"/objects"): | |
82 | raise common.NoRepo( |
|
83 | raise common.NoRepo( | |
83 | _(b"%s does not look like a Git repository") % path |
|
84 | _(b"%s does not look like a Git repository") % path | |
84 | ) |
|
85 | ) | |
85 |
|
86 | |||
86 | # The default value (50) is based on the default for 'git diff'. |
|
87 | # The default value (50) is based on the default for 'git diff'. | |
87 | similarity = ui.configint(b'convert', b'git.similarity') |
|
88 | similarity = ui.configint(b'convert', b'git.similarity') | |
88 | if similarity < 0 or similarity > 100: |
|
89 | if similarity < 0 or similarity > 100: | |
89 | raise error.Abort(_(b'similarity must be between 0 and 100')) |
|
90 | raise error.Abort(_(b'similarity must be between 0 and 100')) | |
90 | if similarity > 0: |
|
91 | if similarity > 0: | |
91 | self.simopt = [b'-C%d%%' % similarity] |
|
92 | self.simopt = [b'-C%d%%' % similarity] | |
92 | findcopiesharder = ui.configbool( |
|
93 | findcopiesharder = ui.configbool( | |
93 | b'convert', b'git.findcopiesharder' |
|
94 | b'convert', b'git.findcopiesharder' | |
94 | ) |
|
95 | ) | |
95 | if findcopiesharder: |
|
96 | if findcopiesharder: | |
96 | self.simopt.append(b'--find-copies-harder') |
|
97 | self.simopt.append(b'--find-copies-harder') | |
97 |
|
98 | |||
98 | renamelimit = ui.configint(b'convert', b'git.renamelimit') |
|
99 | renamelimit = ui.configint(b'convert', b'git.renamelimit') | |
99 | self.simopt.append(b'-l%d' % renamelimit) |
|
100 | self.simopt.append(b'-l%d' % renamelimit) | |
100 | else: |
|
101 | else: | |
101 | self.simopt = [] |
|
102 | self.simopt = [] | |
102 |
|
103 | |||
103 | common.checktool(b'git', b'git') |
|
104 | common.checktool(b'git', b'git') | |
104 |
|
105 | |||
105 | self.path = path |
|
106 | self.path = path | |
106 | self.submodules = [] |
|
107 | self.submodules = [] | |
107 |
|
108 | |||
108 | self.catfilepipe = self.gitpipe(b'cat-file', b'--batch') |
|
109 | self.catfilepipe = self.gitpipe(b'cat-file', b'--batch') | |
109 |
|
110 | |||
110 | self.copyextrakeys = self.ui.configlist(b'convert', b'git.extrakeys') |
|
111 | self.copyextrakeys = self.ui.configlist(b'convert', b'git.extrakeys') | |
111 | banned = set(self.copyextrakeys) & bannedextrakeys |
|
112 | banned = set(self.copyextrakeys) & bannedextrakeys | |
112 | if banned: |
|
113 | if banned: | |
113 | raise error.Abort( |
|
114 | raise error.Abort( | |
114 | _(b'copying of extra key is forbidden: %s') |
|
115 | _(b'copying of extra key is forbidden: %s') | |
115 | % _(b', ').join(sorted(banned)) |
|
116 | % _(b', ').join(sorted(banned)) | |
116 | ) |
|
117 | ) | |
117 |
|
118 | |||
118 | committeractions = self.ui.configlist( |
|
119 | committeractions = self.ui.configlist( | |
119 | b'convert', b'git.committeractions' |
|
120 | b'convert', b'git.committeractions' | |
120 | ) |
|
121 | ) | |
121 |
|
122 | |||
122 | messagedifferent = None |
|
123 | messagedifferent = None | |
123 | messagealways = None |
|
124 | messagealways = None | |
124 | for a in committeractions: |
|
125 | for a in committeractions: | |
125 | if a.startswith((b'messagedifferent', b'messagealways')): |
|
126 | if a.startswith((b'messagedifferent', b'messagealways')): | |
126 | k = a |
|
127 | k = a | |
127 | v = None |
|
128 | v = None | |
128 | if b'=' in a: |
|
129 | if b'=' in a: | |
129 | k, v = a.split(b'=', 1) |
|
130 | k, v = a.split(b'=', 1) | |
130 |
|
131 | |||
131 | if k == b'messagedifferent': |
|
132 | if k == b'messagedifferent': | |
132 | messagedifferent = v or b'committer:' |
|
133 | messagedifferent = v or b'committer:' | |
133 | elif k == b'messagealways': |
|
134 | elif k == b'messagealways': | |
134 | messagealways = v or b'committer:' |
|
135 | messagealways = v or b'committer:' | |
135 |
|
136 | |||
136 | if messagedifferent and messagealways: |
|
137 | if messagedifferent and messagealways: | |
137 | raise error.Abort( |
|
138 | raise error.Abort( | |
138 | _( |
|
139 | _( | |
139 | b'committeractions cannot define both ' |
|
140 | b'committeractions cannot define both ' | |
140 | b'messagedifferent and messagealways' |
|
141 | b'messagedifferent and messagealways' | |
141 | ) |
|
142 | ) | |
142 | ) |
|
143 | ) | |
143 |
|
144 | |||
144 | dropcommitter = b'dropcommitter' in committeractions |
|
145 | dropcommitter = b'dropcommitter' in committeractions | |
145 | replaceauthor = b'replaceauthor' in committeractions |
|
146 | replaceauthor = b'replaceauthor' in committeractions | |
146 |
|
147 | |||
147 | if dropcommitter and replaceauthor: |
|
148 | if dropcommitter and replaceauthor: | |
148 | raise error.Abort( |
|
149 | raise error.Abort( | |
149 | _( |
|
150 | _( | |
150 | b'committeractions cannot define both ' |
|
151 | b'committeractions cannot define both ' | |
151 | b'dropcommitter and replaceauthor' |
|
152 | b'dropcommitter and replaceauthor' | |
152 | ) |
|
153 | ) | |
153 | ) |
|
154 | ) | |
154 |
|
155 | |||
155 | if dropcommitter and messagealways: |
|
156 | if dropcommitter and messagealways: | |
156 | raise error.Abort( |
|
157 | raise error.Abort( | |
157 | _( |
|
158 | _( | |
158 | b'committeractions cannot define both ' |
|
159 | b'committeractions cannot define both ' | |
159 | b'dropcommitter and messagealways' |
|
160 | b'dropcommitter and messagealways' | |
160 | ) |
|
161 | ) | |
161 | ) |
|
162 | ) | |
162 |
|
163 | |||
163 | if not messagedifferent and not messagealways: |
|
164 | if not messagedifferent and not messagealways: | |
164 | messagedifferent = b'committer:' |
|
165 | messagedifferent = b'committer:' | |
165 |
|
166 | |||
166 | self.committeractions = { |
|
167 | self.committeractions = { | |
167 | b'dropcommitter': dropcommitter, |
|
168 | b'dropcommitter': dropcommitter, | |
168 | b'replaceauthor': replaceauthor, |
|
169 | b'replaceauthor': replaceauthor, | |
169 | b'messagedifferent': messagedifferent, |
|
170 | b'messagedifferent': messagedifferent, | |
170 | b'messagealways': messagealways, |
|
171 | b'messagealways': messagealways, | |
171 | } |
|
172 | } | |
172 |
|
173 | |||
173 | def after(self): |
|
174 | def after(self): | |
174 | for f in self.catfilepipe: |
|
175 | for f in self.catfilepipe: | |
175 | f.close() |
|
176 | f.close() | |
176 |
|
177 | |||
177 | def getheads(self): |
|
178 | def getheads(self): | |
178 | if not self.revs: |
|
179 | if not self.revs: | |
179 | output, status = self.gitrun( |
|
180 | output, status = self.gitrun( | |
180 | b'rev-parse', b'--branches', b'--remotes' |
|
181 | b'rev-parse', b'--branches', b'--remotes' | |
181 | ) |
|
182 | ) | |
182 | heads = output.splitlines() |
|
183 | heads = output.splitlines() | |
183 | if status: |
|
184 | if status: | |
184 | raise error.Abort(_(b'cannot retrieve git heads')) |
|
185 | raise error.Abort(_(b'cannot retrieve git heads')) | |
185 | else: |
|
186 | else: | |
186 | heads = [] |
|
187 | heads = [] | |
187 | for rev in self.revs: |
|
188 | for rev in self.revs: | |
188 | rawhead, ret = self.gitrun(b'rev-parse', b'--verify', rev) |
|
189 | rawhead, ret = self.gitrun(b'rev-parse', b'--verify', rev) | |
189 | heads.append(rawhead[:-1]) |
|
190 | heads.append(rawhead[:-1]) | |
190 | if ret: |
|
191 | if ret: | |
191 | raise error.Abort(_(b'cannot retrieve git head "%s"') % rev) |
|
192 | raise error.Abort(_(b'cannot retrieve git head "%s"') % rev) | |
192 | return heads |
|
193 | return heads | |
193 |
|
194 | |||
194 | def catfile(self, rev, ftype): |
|
195 | def catfile(self, rev, ftype): | |
195 | if rev == nullhex: |
|
196 | if rev == sha1nodeconstants.nullhex: | |
196 | raise IOError |
|
197 | raise IOError | |
197 | self.catfilepipe[0].write(rev + b'\n') |
|
198 | self.catfilepipe[0].write(rev + b'\n') | |
198 | self.catfilepipe[0].flush() |
|
199 | self.catfilepipe[0].flush() | |
199 | info = self.catfilepipe[1].readline().split() |
|
200 | info = self.catfilepipe[1].readline().split() | |
200 | if info[1] != ftype: |
|
201 | if info[1] != ftype: | |
201 | raise error.Abort( |
|
202 | raise error.Abort( | |
202 | _(b'cannot read %r object at %s') |
|
203 | _(b'cannot read %r object at %s') | |
203 | % (pycompat.bytestr(ftype), rev) |
|
204 | % (pycompat.bytestr(ftype), rev) | |
204 | ) |
|
205 | ) | |
205 | size = int(info[2]) |
|
206 | size = int(info[2]) | |
206 | data = self.catfilepipe[1].read(size) |
|
207 | data = self.catfilepipe[1].read(size) | |
207 | if len(data) < size: |
|
208 | if len(data) < size: | |
208 | raise error.Abort( |
|
209 | raise error.Abort( | |
209 | _(b'cannot read %r object at %s: unexpected size') |
|
210 | _(b'cannot read %r object at %s: unexpected size') | |
210 | % (ftype, rev) |
|
211 | % (ftype, rev) | |
211 | ) |
|
212 | ) | |
212 | # read the trailing newline |
|
213 | # read the trailing newline | |
213 | self.catfilepipe[1].read(1) |
|
214 | self.catfilepipe[1].read(1) | |
214 | return data |
|
215 | return data | |
215 |
|
216 | |||
216 | def getfile(self, name, rev): |
|
217 | def getfile(self, name, rev): | |
217 | if rev == nullhex: |
|
218 | if rev == sha1nodeconstants.nullhex: | |
218 | return None, None |
|
219 | return None, None | |
219 | if name == b'.hgsub': |
|
220 | if name == b'.hgsub': | |
220 | data = b'\n'.join([m.hgsub() for m in self.submoditer()]) |
|
221 | data = b'\n'.join([m.hgsub() for m in self.submoditer()]) | |
221 | mode = b'' |
|
222 | mode = b'' | |
222 | elif name == b'.hgsubstate': |
|
223 | elif name == b'.hgsubstate': | |
223 | data = b'\n'.join([m.hgsubstate() for m in self.submoditer()]) |
|
224 | data = b'\n'.join([m.hgsubstate() for m in self.submoditer()]) | |
224 | mode = b'' |
|
225 | mode = b'' | |
225 | else: |
|
226 | else: | |
226 | data = self.catfile(rev, b"blob") |
|
227 | data = self.catfile(rev, b"blob") | |
227 | mode = self.modecache[(name, rev)] |
|
228 | mode = self.modecache[(name, rev)] | |
228 | return data, mode |
|
229 | return data, mode | |
229 |
|
230 | |||
230 | def submoditer(self): |
|
231 | def submoditer(self): | |
231 | null = nullhex |
|
232 | null = sha1nodeconstants.nullhex | |
232 | for m in sorted(self.submodules, key=lambda p: p.path): |
|
233 | for m in sorted(self.submodules, key=lambda p: p.path): | |
233 | if m.node != null: |
|
234 | if m.node != null: | |
234 | yield m |
|
235 | yield m | |
235 |
|
236 | |||
236 | def parsegitmodules(self, content): |
|
237 | def parsegitmodules(self, content): | |
237 | """Parse the formatted .gitmodules file, example file format: |
|
238 | """Parse the formatted .gitmodules file, example file format: | |
238 | [submodule "sub"]\n |
|
239 | [submodule "sub"]\n | |
239 | \tpath = sub\n |
|
240 | \tpath = sub\n | |
240 | \turl = git://giturl\n |
|
241 | \turl = git://giturl\n | |
241 | """ |
|
242 | """ | |
242 | self.submodules = [] |
|
243 | self.submodules = [] | |
243 | c = config.config() |
|
244 | c = config.config() | |
244 | # Each item in .gitmodules starts with whitespace that cant be parsed |
|
245 | # Each item in .gitmodules starts with whitespace that cant be parsed | |
245 | c.parse( |
|
246 | c.parse( | |
246 | b'.gitmodules', |
|
247 | b'.gitmodules', | |
247 | b'\n'.join(line.strip() for line in content.split(b'\n')), |
|
248 | b'\n'.join(line.strip() for line in content.split(b'\n')), | |
248 | ) |
|
249 | ) | |
249 | for sec in c.sections(): |
|
250 | for sec in c.sections(): | |
250 | # turn the config object into a real dict |
|
251 | # turn the config object into a real dict | |
251 | s = dict(c.items(sec)) |
|
252 | s = dict(c.items(sec)) | |
252 | if b'url' in s and b'path' in s: |
|
253 | if b'url' in s and b'path' in s: | |
253 | self.submodules.append(submodule(s[b'path'], b'', s[b'url'])) |
|
254 | self.submodules.append(submodule(s[b'path'], b'', s[b'url'])) | |
254 |
|
255 | |||
255 | def retrievegitmodules(self, version): |
|
256 | def retrievegitmodules(self, version): | |
256 | modules, ret = self.gitrun( |
|
257 | modules, ret = self.gitrun( | |
257 | b'show', b'%s:%s' % (version, b'.gitmodules') |
|
258 | b'show', b'%s:%s' % (version, b'.gitmodules') | |
258 | ) |
|
259 | ) | |
259 | if ret: |
|
260 | if ret: | |
260 | # This can happen if a file is in the repo that has permissions |
|
261 | # This can happen if a file is in the repo that has permissions | |
261 | # 160000, but there is no .gitmodules file. |
|
262 | # 160000, but there is no .gitmodules file. | |
262 | self.ui.warn( |
|
263 | self.ui.warn( | |
263 | _(b"warning: cannot read submodules config file in %s\n") |
|
264 | _(b"warning: cannot read submodules config file in %s\n") | |
264 | % version |
|
265 | % version | |
265 | ) |
|
266 | ) | |
266 | return |
|
267 | return | |
267 |
|
268 | |||
268 | try: |
|
269 | try: | |
269 | self.parsegitmodules(modules) |
|
270 | self.parsegitmodules(modules) | |
270 | except error.ParseError: |
|
271 | except error.ParseError: | |
271 | self.ui.warn( |
|
272 | self.ui.warn( | |
272 | _(b"warning: unable to parse .gitmodules in %s\n") % version |
|
273 | _(b"warning: unable to parse .gitmodules in %s\n") % version | |
273 | ) |
|
274 | ) | |
274 | return |
|
275 | return | |
275 |
|
276 | |||
276 | for m in self.submodules: |
|
277 | for m in self.submodules: | |
277 | node, ret = self.gitrun(b'rev-parse', b'%s:%s' % (version, m.path)) |
|
278 | node, ret = self.gitrun(b'rev-parse', b'%s:%s' % (version, m.path)) | |
278 | if ret: |
|
279 | if ret: | |
279 | continue |
|
280 | continue | |
280 | m.node = node.strip() |
|
281 | m.node = node.strip() | |
281 |
|
282 | |||
282 | def getchanges(self, version, full): |
|
283 | def getchanges(self, version, full): | |
283 | if full: |
|
284 | if full: | |
284 | raise error.Abort(_(b"convert from git does not support --full")) |
|
285 | raise error.Abort(_(b"convert from git does not support --full")) | |
285 | self.modecache = {} |
|
286 | self.modecache = {} | |
286 | cmd = ( |
|
287 | cmd = ( | |
287 | [b'diff-tree', b'-z', b'--root', b'-m', b'-r'] |
|
288 | [b'diff-tree', b'-z', b'--root', b'-m', b'-r'] | |
288 | + self.simopt |
|
289 | + self.simopt | |
289 | + [version] |
|
290 | + [version] | |
290 | ) |
|
291 | ) | |
291 | output, status = self.gitrun(*cmd) |
|
292 | output, status = self.gitrun(*cmd) | |
292 | if status: |
|
293 | if status: | |
293 | raise error.Abort(_(b'cannot read changes in %s') % version) |
|
294 | raise error.Abort(_(b'cannot read changes in %s') % version) | |
294 | changes = [] |
|
295 | changes = [] | |
295 | copies = {} |
|
296 | copies = {} | |
296 | seen = set() |
|
297 | seen = set() | |
297 | entry = None |
|
298 | entry = None | |
298 | subexists = [False] |
|
299 | subexists = [False] | |
299 | subdeleted = [False] |
|
300 | subdeleted = [False] | |
300 | difftree = output.split(b'\x00') |
|
301 | difftree = output.split(b'\x00') | |
301 | lcount = len(difftree) |
|
302 | lcount = len(difftree) | |
302 | i = 0 |
|
303 | i = 0 | |
303 |
|
304 | |||
304 | skipsubmodules = self.ui.configbool(b'convert', b'git.skipsubmodules') |
|
305 | skipsubmodules = self.ui.configbool(b'convert', b'git.skipsubmodules') | |
305 |
|
306 | |||
306 | def add(entry, f, isdest): |
|
307 | def add(entry, f, isdest): | |
307 | seen.add(f) |
|
308 | seen.add(f) | |
308 | h = entry[3] |
|
309 | h = entry[3] | |
309 | p = entry[1] == b"100755" |
|
310 | p = entry[1] == b"100755" | |
310 | s = entry[1] == b"120000" |
|
311 | s = entry[1] == b"120000" | |
311 | renamesource = not isdest and entry[4][0] == b'R' |
|
312 | renamesource = not isdest and entry[4][0] == b'R' | |
312 |
|
313 | |||
313 | if f == b'.gitmodules': |
|
314 | if f == b'.gitmodules': | |
314 | if skipsubmodules: |
|
315 | if skipsubmodules: | |
315 | return |
|
316 | return | |
316 |
|
317 | |||
317 | subexists[0] = True |
|
318 | subexists[0] = True | |
318 | if entry[4] == b'D' or renamesource: |
|
319 | if entry[4] == b'D' or renamesource: | |
319 | subdeleted[0] = True |
|
320 | subdeleted[0] = True | |
320 | changes.append((b'.hgsub', nullhex)) |
|
321 | changes.append((b'.hgsub', sha1nodeconstants.nullhex)) | |
321 | else: |
|
322 | else: | |
322 | changes.append((b'.hgsub', b'')) |
|
323 | changes.append((b'.hgsub', b'')) | |
323 | elif entry[1] == b'160000' or entry[0] == b':160000': |
|
324 | elif entry[1] == b'160000' or entry[0] == b':160000': | |
324 | if not skipsubmodules: |
|
325 | if not skipsubmodules: | |
325 | subexists[0] = True |
|
326 | subexists[0] = True | |
326 | else: |
|
327 | else: | |
327 | if renamesource: |
|
328 | if renamesource: | |
328 | h = nullhex |
|
329 | h = sha1nodeconstants.nullhex | |
329 | self.modecache[(f, h)] = (p and b"x") or (s and b"l") or b"" |
|
330 | self.modecache[(f, h)] = (p and b"x") or (s and b"l") or b"" | |
330 | changes.append((f, h)) |
|
331 | changes.append((f, h)) | |
331 |
|
332 | |||
332 | while i < lcount: |
|
333 | while i < lcount: | |
333 | l = difftree[i] |
|
334 | l = difftree[i] | |
334 | i += 1 |
|
335 | i += 1 | |
335 | if not entry: |
|
336 | if not entry: | |
336 | if not l.startswith(b':'): |
|
337 | if not l.startswith(b':'): | |
337 | continue |
|
338 | continue | |
338 | entry = tuple(pycompat.bytestr(p) for p in l.split()) |
|
339 | entry = tuple(pycompat.bytestr(p) for p in l.split()) | |
339 | continue |
|
340 | continue | |
340 | f = l |
|
341 | f = l | |
341 | if entry[4][0] == b'C': |
|
342 | if entry[4][0] == b'C': | |
342 | copysrc = f |
|
343 | copysrc = f | |
343 | copydest = difftree[i] |
|
344 | copydest = difftree[i] | |
344 | i += 1 |
|
345 | i += 1 | |
345 | f = copydest |
|
346 | f = copydest | |
346 | copies[copydest] = copysrc |
|
347 | copies[copydest] = copysrc | |
347 | if f not in seen: |
|
348 | if f not in seen: | |
348 | add(entry, f, False) |
|
349 | add(entry, f, False) | |
349 | # A file can be copied multiple times, or modified and copied |
|
350 | # A file can be copied multiple times, or modified and copied | |
350 | # simultaneously. So f can be repeated even if fdest isn't. |
|
351 | # simultaneously. So f can be repeated even if fdest isn't. | |
351 | if entry[4][0] == b'R': |
|
352 | if entry[4][0] == b'R': | |
352 | # rename: next line is the destination |
|
353 | # rename: next line is the destination | |
353 | fdest = difftree[i] |
|
354 | fdest = difftree[i] | |
354 | i += 1 |
|
355 | i += 1 | |
355 | if fdest not in seen: |
|
356 | if fdest not in seen: | |
356 | add(entry, fdest, True) |
|
357 | add(entry, fdest, True) | |
357 | # .gitmodules isn't imported at all, so it being copied to |
|
358 | # .gitmodules isn't imported at all, so it being copied to | |
358 | # and fro doesn't really make sense |
|
359 | # and fro doesn't really make sense | |
359 | if f != b'.gitmodules' and fdest != b'.gitmodules': |
|
360 | if f != b'.gitmodules' and fdest != b'.gitmodules': | |
360 | copies[fdest] = f |
|
361 | copies[fdest] = f | |
361 | entry = None |
|
362 | entry = None | |
362 |
|
363 | |||
363 | if subexists[0]: |
|
364 | if subexists[0]: | |
364 | if subdeleted[0]: |
|
365 | if subdeleted[0]: | |
365 | changes.append((b'.hgsubstate', nullhex)) |
|
366 | changes.append((b'.hgsubstate', sha1nodeconstants.nullhex)) | |
366 | else: |
|
367 | else: | |
367 | self.retrievegitmodules(version) |
|
368 | self.retrievegitmodules(version) | |
368 | changes.append((b'.hgsubstate', b'')) |
|
369 | changes.append((b'.hgsubstate', b'')) | |
369 | return (changes, copies, set()) |
|
370 | return (changes, copies, set()) | |
370 |
|
371 | |||
371 | def getcommit(self, version): |
|
372 | def getcommit(self, version): | |
372 | c = self.catfile(version, b"commit") # read the commit hash |
|
373 | c = self.catfile(version, b"commit") # read the commit hash | |
373 | end = c.find(b"\n\n") |
|
374 | end = c.find(b"\n\n") | |
374 | message = c[end + 2 :] |
|
375 | message = c[end + 2 :] | |
375 | message = self.recode(message) |
|
376 | message = self.recode(message) | |
376 | l = c[:end].splitlines() |
|
377 | l = c[:end].splitlines() | |
377 | parents = [] |
|
378 | parents = [] | |
378 | author = committer = None |
|
379 | author = committer = None | |
379 | extra = {} |
|
380 | extra = {} | |
380 | for e in l[1:]: |
|
381 | for e in l[1:]: | |
381 | n, v = e.split(b" ", 1) |
|
382 | n, v = e.split(b" ", 1) | |
382 | if n == b"author": |
|
383 | if n == b"author": | |
383 | p = v.split() |
|
384 | p = v.split() | |
384 | tm, tz = p[-2:] |
|
385 | tm, tz = p[-2:] | |
385 | author = b" ".join(p[:-2]) |
|
386 | author = b" ".join(p[:-2]) | |
386 | if author[0] == b"<": |
|
387 | if author[0] == b"<": | |
387 | author = author[1:-1] |
|
388 | author = author[1:-1] | |
388 | author = self.recode(author) |
|
389 | author = self.recode(author) | |
389 | if n == b"committer": |
|
390 | if n == b"committer": | |
390 | p = v.split() |
|
391 | p = v.split() | |
391 | tm, tz = p[-2:] |
|
392 | tm, tz = p[-2:] | |
392 | committer = b" ".join(p[:-2]) |
|
393 | committer = b" ".join(p[:-2]) | |
393 | if committer[0] == b"<": |
|
394 | if committer[0] == b"<": | |
394 | committer = committer[1:-1] |
|
395 | committer = committer[1:-1] | |
395 | committer = self.recode(committer) |
|
396 | committer = self.recode(committer) | |
396 | if n == b"parent": |
|
397 | if n == b"parent": | |
397 | parents.append(v) |
|
398 | parents.append(v) | |
398 | if n in self.copyextrakeys: |
|
399 | if n in self.copyextrakeys: | |
399 | extra[n] = v |
|
400 | extra[n] = v | |
400 |
|
401 | |||
401 | if self.committeractions[b'dropcommitter']: |
|
402 | if self.committeractions[b'dropcommitter']: | |
402 | committer = None |
|
403 | committer = None | |
403 | elif self.committeractions[b'replaceauthor']: |
|
404 | elif self.committeractions[b'replaceauthor']: | |
404 | author = committer |
|
405 | author = committer | |
405 |
|
406 | |||
406 | if committer: |
|
407 | if committer: | |
407 | messagealways = self.committeractions[b'messagealways'] |
|
408 | messagealways = self.committeractions[b'messagealways'] | |
408 | messagedifferent = self.committeractions[b'messagedifferent'] |
|
409 | messagedifferent = self.committeractions[b'messagedifferent'] | |
409 | if messagealways: |
|
410 | if messagealways: | |
410 | message += b'\n%s %s\n' % (messagealways, committer) |
|
411 | message += b'\n%s %s\n' % (messagealways, committer) | |
411 | elif messagedifferent and author != committer: |
|
412 | elif messagedifferent and author != committer: | |
412 | message += b'\n%s %s\n' % (messagedifferent, committer) |
|
413 | message += b'\n%s %s\n' % (messagedifferent, committer) | |
413 |
|
414 | |||
414 | tzs, tzh, tzm = tz[-5:-4] + b"1", tz[-4:-2], tz[-2:] |
|
415 | tzs, tzh, tzm = tz[-5:-4] + b"1", tz[-4:-2], tz[-2:] | |
415 | tz = -int(tzs) * (int(tzh) * 3600 + int(tzm)) |
|
416 | tz = -int(tzs) * (int(tzh) * 3600 + int(tzm)) | |
416 | date = tm + b" " + (b"%d" % tz) |
|
417 | date = tm + b" " + (b"%d" % tz) | |
417 | saverev = self.ui.configbool(b'convert', b'git.saverev') |
|
418 | saverev = self.ui.configbool(b'convert', b'git.saverev') | |
418 |
|
419 | |||
419 | c = common.commit( |
|
420 | c = common.commit( | |
420 | parents=parents, |
|
421 | parents=parents, | |
421 | date=date, |
|
422 | date=date, | |
422 | author=author, |
|
423 | author=author, | |
423 | desc=message, |
|
424 | desc=message, | |
424 | rev=version, |
|
425 | rev=version, | |
425 | extra=extra, |
|
426 | extra=extra, | |
426 | saverev=saverev, |
|
427 | saverev=saverev, | |
427 | ) |
|
428 | ) | |
428 | return c |
|
429 | return c | |
429 |
|
430 | |||
430 | def numcommits(self): |
|
431 | def numcommits(self): | |
431 | output, ret = self.gitrunlines(b'rev-list', b'--all') |
|
432 | output, ret = self.gitrunlines(b'rev-list', b'--all') | |
432 | if ret: |
|
433 | if ret: | |
433 | raise error.Abort( |
|
434 | raise error.Abort( | |
434 | _(b'cannot retrieve number of commits in %s') % self.path |
|
435 | _(b'cannot retrieve number of commits in %s') % self.path | |
435 | ) |
|
436 | ) | |
436 | return len(output) |
|
437 | return len(output) | |
437 |
|
438 | |||
438 | def gettags(self): |
|
439 | def gettags(self): | |
439 | tags = {} |
|
440 | tags = {} | |
440 | alltags = {} |
|
441 | alltags = {} | |
441 | output, status = self.gitrunlines(b'ls-remote', b'--tags', self.path) |
|
442 | output, status = self.gitrunlines(b'ls-remote', b'--tags', self.path) | |
442 |
|
443 | |||
443 | if status: |
|
444 | if status: | |
444 | raise error.Abort(_(b'cannot read tags from %s') % self.path) |
|
445 | raise error.Abort(_(b'cannot read tags from %s') % self.path) | |
445 | prefix = b'refs/tags/' |
|
446 | prefix = b'refs/tags/' | |
446 |
|
447 | |||
447 | # Build complete list of tags, both annotated and bare ones |
|
448 | # Build complete list of tags, both annotated and bare ones | |
448 | for line in output: |
|
449 | for line in output: | |
449 | line = line.strip() |
|
450 | line = line.strip() | |
450 | if line.startswith(b"error:") or line.startswith(b"fatal:"): |
|
451 | if line.startswith(b"error:") or line.startswith(b"fatal:"): | |
451 | raise error.Abort(_(b'cannot read tags from %s') % self.path) |
|
452 | raise error.Abort(_(b'cannot read tags from %s') % self.path) | |
452 | node, tag = line.split(None, 1) |
|
453 | node, tag = line.split(None, 1) | |
453 | if not tag.startswith(prefix): |
|
454 | if not tag.startswith(prefix): | |
454 | continue |
|
455 | continue | |
455 | alltags[tag[len(prefix) :]] = node |
|
456 | alltags[tag[len(prefix) :]] = node | |
456 |
|
457 | |||
457 | # Filter out tag objects for annotated tag refs |
|
458 | # Filter out tag objects for annotated tag refs | |
458 | for tag in alltags: |
|
459 | for tag in alltags: | |
459 | if tag.endswith(b'^{}'): |
|
460 | if tag.endswith(b'^{}'): | |
460 | tags[tag[:-3]] = alltags[tag] |
|
461 | tags[tag[:-3]] = alltags[tag] | |
461 | else: |
|
462 | else: | |
462 | if tag + b'^{}' in alltags: |
|
463 | if tag + b'^{}' in alltags: | |
463 | continue |
|
464 | continue | |
464 | else: |
|
465 | else: | |
465 | tags[tag] = alltags[tag] |
|
466 | tags[tag] = alltags[tag] | |
466 |
|
467 | |||
467 | return tags |
|
468 | return tags | |
468 |
|
469 | |||
469 | def getchangedfiles(self, version, i): |
|
470 | def getchangedfiles(self, version, i): | |
470 | changes = [] |
|
471 | changes = [] | |
471 | if i is None: |
|
472 | if i is None: | |
472 | output, status = self.gitrunlines( |
|
473 | output, status = self.gitrunlines( | |
473 | b'diff-tree', b'--root', b'-m', b'-r', version |
|
474 | b'diff-tree', b'--root', b'-m', b'-r', version | |
474 | ) |
|
475 | ) | |
475 | if status: |
|
476 | if status: | |
476 | raise error.Abort(_(b'cannot read changes in %s') % version) |
|
477 | raise error.Abort(_(b'cannot read changes in %s') % version) | |
477 | for l in output: |
|
478 | for l in output: | |
478 | if b"\t" not in l: |
|
479 | if b"\t" not in l: | |
479 | continue |
|
480 | continue | |
480 | m, f = l[:-1].split(b"\t") |
|
481 | m, f = l[:-1].split(b"\t") | |
481 | changes.append(f) |
|
482 | changes.append(f) | |
482 | else: |
|
483 | else: | |
483 | output, status = self.gitrunlines( |
|
484 | output, status = self.gitrunlines( | |
484 | b'diff-tree', |
|
485 | b'diff-tree', | |
485 | b'--name-only', |
|
486 | b'--name-only', | |
486 | b'--root', |
|
487 | b'--root', | |
487 | b'-r', |
|
488 | b'-r', | |
488 | version, |
|
489 | version, | |
489 | b'%s^%d' % (version, i + 1), |
|
490 | b'%s^%d' % (version, i + 1), | |
490 | b'--', |
|
491 | b'--', | |
491 | ) |
|
492 | ) | |
492 | if status: |
|
493 | if status: | |
493 | raise error.Abort(_(b'cannot read changes in %s') % version) |
|
494 | raise error.Abort(_(b'cannot read changes in %s') % version) | |
494 | changes = [f.rstrip(b'\n') for f in output] |
|
495 | changes = [f.rstrip(b'\n') for f in output] | |
495 |
|
496 | |||
496 | return changes |
|
497 | return changes | |
497 |
|
498 | |||
498 | def getbookmarks(self): |
|
499 | def getbookmarks(self): | |
499 | bookmarks = {} |
|
500 | bookmarks = {} | |
500 |
|
501 | |||
501 | # Handle local and remote branches |
|
502 | # Handle local and remote branches | |
502 | remoteprefix = self.ui.config(b'convert', b'git.remoteprefix') |
|
503 | remoteprefix = self.ui.config(b'convert', b'git.remoteprefix') | |
503 | reftypes = [ |
|
504 | reftypes = [ | |
504 | # (git prefix, hg prefix) |
|
505 | # (git prefix, hg prefix) | |
505 | (b'refs/remotes/origin/', remoteprefix + b'/'), |
|
506 | (b'refs/remotes/origin/', remoteprefix + b'/'), | |
506 | (b'refs/heads/', b''), |
|
507 | (b'refs/heads/', b''), | |
507 | ] |
|
508 | ] | |
508 |
|
509 | |||
509 | exclude = { |
|
510 | exclude = { | |
510 | b'refs/remotes/origin/HEAD', |
|
511 | b'refs/remotes/origin/HEAD', | |
511 | } |
|
512 | } | |
512 |
|
513 | |||
513 | try: |
|
514 | try: | |
514 | output, status = self.gitrunlines(b'show-ref') |
|
515 | output, status = self.gitrunlines(b'show-ref') | |
515 | for line in output: |
|
516 | for line in output: | |
516 | line = line.strip() |
|
517 | line = line.strip() | |
517 | rev, name = line.split(None, 1) |
|
518 | rev, name = line.split(None, 1) | |
518 | # Process each type of branch |
|
519 | # Process each type of branch | |
519 | for gitprefix, hgprefix in reftypes: |
|
520 | for gitprefix, hgprefix in reftypes: | |
520 | if not name.startswith(gitprefix) or name in exclude: |
|
521 | if not name.startswith(gitprefix) or name in exclude: | |
521 | continue |
|
522 | continue | |
522 | name = b'%s%s' % (hgprefix, name[len(gitprefix) :]) |
|
523 | name = b'%s%s' % (hgprefix, name[len(gitprefix) :]) | |
523 | bookmarks[name] = rev |
|
524 | bookmarks[name] = rev | |
524 | except Exception: |
|
525 | except Exception: | |
525 | pass |
|
526 | pass | |
526 |
|
527 | |||
527 | return bookmarks |
|
528 | return bookmarks | |
528 |
|
529 | |||
529 | def checkrevformat(self, revstr, mapname=b'splicemap'): |
|
530 | def checkrevformat(self, revstr, mapname=b'splicemap'): | |
530 | """git revision string is a 40 byte hex""" |
|
531 | """git revision string is a 40 byte hex""" | |
531 | self.checkhexformat(revstr, mapname) |
|
532 | self.checkhexformat(revstr, mapname) |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: file copied from mercurial/dirstate.py to mercurial/dirstatemap.py |
|
NO CONTENT: file copied from mercurial/dirstate.py to mercurial/dirstatemap.py | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: modified file |
|
NO CONTENT: modified file | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed | ||
The requested commit or file is too big and content was truncated. Show full diff |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed | ||
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now