Show More
The requested changes are too big and content was truncated. Show full diff
@@ -0,0 +1,194 b'' | |||
|
1 | # | |
|
2 | # This file is autogenerated by pip-compile | |
|
3 | # To update, run: | |
|
4 | # | |
|
5 | # pip-compile --generate-hashes --output-file=contrib/automation/linux-requirements-py3.5.txt contrib/automation/linux-requirements.txt.in | |
|
6 | # | |
|
7 | astroid==2.4.2 \ | |
|
8 | --hash=sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703 \ | |
|
9 | --hash=sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386 | |
|
10 | # via pylint | |
|
11 | docutils==0.17.1 \ | |
|
12 | --hash=sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125 \ | |
|
13 | --hash=sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61 | |
|
14 | # via -r contrib/automation/linux-requirements.txt.in | |
|
15 | fuzzywuzzy==0.18.0 \ | |
|
16 | --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8 \ | |
|
17 | --hash=sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993 | |
|
18 | # via -r contrib/automation/linux-requirements.txt.in | |
|
19 | idna==3.1 \ | |
|
20 | --hash=sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16 \ | |
|
21 | --hash=sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1 | |
|
22 | # via yarl | |
|
23 | isort==4.3.21 \ | |
|
24 | --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \ | |
|
25 | --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd | |
|
26 | # via | |
|
27 | # -r contrib/automation/linux-requirements.txt.in | |
|
28 | # pylint | |
|
29 | lazy-object-proxy==1.4.3 \ | |
|
30 | --hash=sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d \ | |
|
31 | --hash=sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449 \ | |
|
32 | --hash=sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08 \ | |
|
33 | --hash=sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a \ | |
|
34 | --hash=sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50 \ | |
|
35 | --hash=sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd \ | |
|
36 | --hash=sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239 \ | |
|
37 | --hash=sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb \ | |
|
38 | --hash=sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea \ | |
|
39 | --hash=sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e \ | |
|
40 | --hash=sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156 \ | |
|
41 | --hash=sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142 \ | |
|
42 | --hash=sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442 \ | |
|
43 | --hash=sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62 \ | |
|
44 | --hash=sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db \ | |
|
45 | --hash=sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531 \ | |
|
46 | --hash=sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383 \ | |
|
47 | --hash=sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a \ | |
|
48 | --hash=sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357 \ | |
|
49 | --hash=sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4 \ | |
|
50 | --hash=sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0 | |
|
51 | # via astroid | |
|
52 | mccabe==0.6.1 \ | |
|
53 | --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ | |
|
54 | --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f | |
|
55 | # via pylint | |
|
56 | multidict==5.0.2 \ | |
|
57 | --hash=sha256:060d68ae3e674c913ec41a464916f12c4d7ff17a3a9ebbf37ba7f2c681c2b33e \ | |
|
58 | --hash=sha256:06f39f0ddc308dab4e5fa282d145f90cd38d7ed75390fc83335636909a9ec191 \ | |
|
59 | --hash=sha256:17847fede1aafdb7e74e01bb34ab47a1a1ea726e8184c623c45d7e428d2d5d34 \ | |
|
60 | --hash=sha256:1cd102057b09223b919f9447c669cf2efabeefb42a42ae6233f25ffd7ee31a79 \ | |
|
61 | --hash=sha256:20cc9b2dd31761990abff7d0e63cd14dbfca4ebb52a77afc917b603473951a38 \ | |
|
62 | --hash=sha256:2576e30bbec004e863d87216bc34abe24962cc2e964613241a1c01c7681092ab \ | |
|
63 | --hash=sha256:2ab9cad4c5ef5c41e1123ed1f89f555aabefb9391d4e01fd6182de970b7267ed \ | |
|
64 | --hash=sha256:359ea00e1b53ceef282232308da9d9a3f60d645868a97f64df19485c7f9ef628 \ | |
|
65 | --hash=sha256:3e61cc244fd30bd9fdfae13bdd0c5ec65da51a86575ff1191255cae677045ffe \ | |
|
66 | --hash=sha256:43c7a87d8c31913311a1ab24b138254a0ee89142983b327a2c2eab7a7d10fea9 \ | |
|
67 | --hash=sha256:4a3f19da871befa53b48dd81ee48542f519beffa13090dc135fffc18d8fe36db \ | |
|
68 | --hash=sha256:4df708ef412fd9b59b7e6c77857e64c1f6b4c0116b751cb399384ec9a28baa66 \ | |
|
69 | --hash=sha256:59182e975b8c197d0146a003d0f0d5dc5487ce4899502061d8df585b0f51fba2 \ | |
|
70 | --hash=sha256:6128d2c0956fd60e39ec7d1c8f79426f0c915d36458df59ddd1f0cff0340305f \ | |
|
71 | --hash=sha256:6168839491a533fa75f3f5d48acbb829475e6c7d9fa5c6e245153b5f79b986a3 \ | |
|
72 | --hash=sha256:62abab8088704121297d39c8f47156cb8fab1da731f513e59ba73946b22cf3d0 \ | |
|
73 | --hash=sha256:653b2bbb0bbf282c37279dd04f429947ac92713049e1efc615f68d4e64b1dbc2 \ | |
|
74 | --hash=sha256:6566749cd78cb37cbf8e8171b5cd2cbfc03c99f0891de12255cf17a11c07b1a3 \ | |
|
75 | --hash=sha256:76cbdb22f48de64811f9ce1dd4dee09665f84f32d6a26de249a50c1e90e244e0 \ | |
|
76 | --hash=sha256:8efcf070d60fd497db771429b1c769a3783e3a0dd96c78c027e676990176adc5 \ | |
|
77 | --hash=sha256:8fa4549f341a057feec4c3139056ba73e17ed03a506469f447797a51f85081b5 \ | |
|
78 | --hash=sha256:9380b3f2b00b23a4106ba9dd022df3e6e2e84e1788acdbdd27603b621b3288df \ | |
|
79 | --hash=sha256:9ed9b280f7778ad6f71826b38a73c2fdca4077817c64bc1102fdada58e75c03c \ | |
|
80 | --hash=sha256:a7b8b5bd16376c8ac2977748bd978a200326af5145d8d0e7f799e2b355d425b6 \ | |
|
81 | --hash=sha256:af271c2540d1cd2a137bef8d95a8052230aa1cda26dd3b2c73d858d89993d518 \ | |
|
82 | --hash=sha256:b561e76c9e21402d9a446cdae13398f9942388b9bff529f32dfa46220af54d00 \ | |
|
83 | --hash=sha256:b82400ef848bbac6b9035a105ac6acaa1fb3eea0d164e35bbb21619b88e49fed \ | |
|
84 | --hash=sha256:b98af08d7bb37d3456a22f689819ea793e8d6961b9629322d7728c4039071641 \ | |
|
85 | --hash=sha256:c58e53e1c73109fdf4b759db9f2939325f510a8a5215135330fe6755921e4886 \ | |
|
86 | --hash=sha256:cbabfc12b401d074298bfda099c58dfa5348415ae2e4ec841290627cb7cb6b2e \ | |
|
87 | --hash=sha256:d4a6fb98e9e9be3f7d70fd3e852369c00a027bd5ed0f3e8ade3821bcad257408 \ | |
|
88 | --hash=sha256:d99da85d6890267292065e654a329e1d2f483a5d2485e347383800e616a8c0b1 \ | |
|
89 | --hash=sha256:e58db0e0d60029915f7fc95a8683fa815e204f2e1990f1fb46a7778d57ca8c35 \ | |
|
90 | --hash=sha256:e5bf89fe57f702a046c7ec718fe330ed50efd4bcf74722940db2eb0919cddb1c \ | |
|
91 | --hash=sha256:f612e8ef8408391a4a3366e3508bab8ef97b063b4918a317cb6e6de4415f01af \ | |
|
92 | --hash=sha256:f65a2442c113afde52fb09f9a6276bbc31da71add99dc76c3adf6083234e07c6 \ | |
|
93 | --hash=sha256:fa0503947a99a1be94f799fac89d67a5e20c333e78ddae16e8534b151cdc588a | |
|
94 | # via yarl | |
|
95 | pyflakes==2.3.1 \ | |
|
96 | --hash=sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3 \ | |
|
97 | --hash=sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db | |
|
98 | # via -r contrib/automation/linux-requirements.txt.in | |
|
99 | pygments==2.9.0 \ | |
|
100 | --hash=sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f \ | |
|
101 | --hash=sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e | |
|
102 | # via -r contrib/automation/linux-requirements.txt.in | |
|
103 | pylint==2.6.2 \ | |
|
104 | --hash=sha256:718b74786ea7ed07aa0c58bf572154d4679f960d26e9641cc1de204a30b87fc9 \ | |
|
105 | --hash=sha256:e71c2e9614a4f06e36498f310027942b0f4f2fde20aebb01655b31edc63b9eaf | |
|
106 | # via -r contrib/automation/linux-requirements.txt.in | |
|
107 | python-levenshtein==0.12.2 \ | |
|
108 | --hash=sha256:dc2395fbd148a1ab31090dd113c366695934b9e85fe5a4b2a032745efd0346f6 | |
|
109 | # via -r contrib/automation/linux-requirements.txt.in | |
|
110 | pyyaml==5.3.1 \ | |
|
111 | --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ | |
|
112 | --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ | |
|
113 | --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ | |
|
114 | --hash=sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e \ | |
|
115 | --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ | |
|
116 | --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ | |
|
117 | --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ | |
|
118 | --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ | |
|
119 | --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ | |
|
120 | --hash=sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a \ | |
|
121 | --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ | |
|
122 | --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ | |
|
123 | --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a | |
|
124 | # via vcrpy | |
|
125 | six==1.16.0 \ | |
|
126 | --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ | |
|
127 | --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 | |
|
128 | # via | |
|
129 | # astroid | |
|
130 | # vcrpy | |
|
131 | toml==0.10.2 \ | |
|
132 | --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ | |
|
133 | --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f | |
|
134 | # via pylint | |
|
135 | typed-ast==1.4.3 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \ | |
|
136 | --hash=sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace \ | |
|
137 | --hash=sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff \ | |
|
138 | --hash=sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266 \ | |
|
139 | --hash=sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528 \ | |
|
140 | --hash=sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6 \ | |
|
141 | --hash=sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808 \ | |
|
142 | --hash=sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4 \ | |
|
143 | --hash=sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363 \ | |
|
144 | --hash=sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341 \ | |
|
145 | --hash=sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04 \ | |
|
146 | --hash=sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41 \ | |
|
147 | --hash=sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e \ | |
|
148 | --hash=sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3 \ | |
|
149 | --hash=sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899 \ | |
|
150 | --hash=sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805 \ | |
|
151 | --hash=sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c \ | |
|
152 | --hash=sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c \ | |
|
153 | --hash=sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39 \ | |
|
154 | --hash=sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a \ | |
|
155 | --hash=sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3 \ | |
|
156 | --hash=sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7 \ | |
|
157 | --hash=sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f \ | |
|
158 | --hash=sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075 \ | |
|
159 | --hash=sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0 \ | |
|
160 | --hash=sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40 \ | |
|
161 | --hash=sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428 \ | |
|
162 | --hash=sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927 \ | |
|
163 | --hash=sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3 \ | |
|
164 | --hash=sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f \ | |
|
165 | --hash=sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65 | |
|
166 | # via | |
|
167 | # -r contrib/automation/linux-requirements.txt.in | |
|
168 | # astroid | |
|
169 | vcrpy==4.1.1 \ | |
|
170 | --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \ | |
|
171 | --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 | |
|
172 | # via -r contrib/automation/linux-requirements.txt.in | |
|
173 | wrapt==1.12.1 \ | |
|
174 | --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 | |
|
175 | # via | |
|
176 | # astroid | |
|
177 | # vcrpy | |
|
178 | yarl==1.3.0 \ | |
|
179 | --hash=sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9 \ | |
|
180 | --hash=sha256:2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f \ | |
|
181 | --hash=sha256:3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb \ | |
|
182 | --hash=sha256:3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320 \ | |
|
183 | --hash=sha256:5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842 \ | |
|
184 | --hash=sha256:73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0 \ | |
|
185 | --hash=sha256:7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829 \ | |
|
186 | --hash=sha256:b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310 \ | |
|
187 | --hash=sha256:c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4 \ | |
|
188 | --hash=sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8 \ | |
|
189 | --hash=sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1 | |
|
190 | # via vcrpy | |
|
191 | ||
|
192 | # WARNING: The following packages were not pinned, but pip requires them to be | |
|
193 | # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. | |
|
194 | # setuptools |
|
1 | NO CONTENT: new file 100644 |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100644 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100755 | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: new file 100755 | |
The requested commit or file is too big and content was truncated. Show full diff |
@@ -1,1325 +1,1335 b'' | |||
|
1 | 1 | # aws.py - Automation code for Amazon Web Services |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | # no-check-code because Python 3 native. |
|
9 | 9 | |
|
10 | 10 | import contextlib |
|
11 | 11 | import copy |
|
12 | 12 | import hashlib |
|
13 | 13 | import json |
|
14 | 14 | import os |
|
15 | 15 | import pathlib |
|
16 | 16 | import subprocess |
|
17 | 17 | import time |
|
18 | 18 | |
|
19 | 19 | import boto3 |
|
20 | 20 | import botocore.exceptions |
|
21 | 21 | |
|
22 | 22 | from .linux import BOOTSTRAP_DEBIAN |
|
23 | 23 | from .ssh import ( |
|
24 | 24 | exec_command as ssh_exec_command, |
|
25 | 25 | wait_for_ssh, |
|
26 | 26 | ) |
|
27 | 27 | from .winrm import ( |
|
28 | 28 | run_powershell, |
|
29 | 29 | wait_for_winrm, |
|
30 | 30 | ) |
|
31 | 31 | |
|
32 | 32 | |
|
33 | 33 | SOURCE_ROOT = pathlib.Path( |
|
34 | 34 | os.path.abspath(__file__) |
|
35 | 35 | ).parent.parent.parent.parent |
|
36 | 36 | |
|
37 | 37 | INSTALL_WINDOWS_DEPENDENCIES = ( |
|
38 | 38 | SOURCE_ROOT / 'contrib' / 'install-windows-dependencies.ps1' |
|
39 | 39 | ) |
|
40 | 40 | |
|
41 | 41 | |
|
42 | 42 | INSTANCE_TYPES_WITH_STORAGE = { |
|
43 | 43 | 'c5d', |
|
44 | 44 | 'd2', |
|
45 | 45 | 'h1', |
|
46 | 46 | 'i3', |
|
47 | 47 | 'm5ad', |
|
48 | 48 | 'm5d', |
|
49 | 49 | 'r5d', |
|
50 | 50 | 'r5ad', |
|
51 | 51 | 'x1', |
|
52 | 52 | 'z1d', |
|
53 | 53 | } |
|
54 | 54 | |
|
55 | 55 | |
|
56 | 56 | AMAZON_ACCOUNT_ID = '801119661308' |
|
57 | 57 | DEBIAN_ACCOUNT_ID = '379101102735' |
|
58 | 58 | DEBIAN_ACCOUNT_ID_2 = '136693071363' |
|
59 | 59 | UBUNTU_ACCOUNT_ID = '099720109477' |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | WINDOWS_BASE_IMAGE_NAME = 'Windows_Server-2019-English-Full-Base-*' |
|
63 | 63 | |
|
64 | 64 | |
|
65 | 65 | KEY_PAIRS = { |
|
66 | 66 | 'automation', |
|
67 | 67 | } |
|
68 | 68 | |
|
69 | 69 | |
|
70 | 70 | SECURITY_GROUPS = { |
|
71 | 71 | 'linux-dev-1': { |
|
72 | 72 | 'description': 'Mercurial Linux instances that perform build/test automation', |
|
73 | 73 | 'ingress': [ |
|
74 | 74 | { |
|
75 | 75 | 'FromPort': 22, |
|
76 | 76 | 'ToPort': 22, |
|
77 | 77 | 'IpProtocol': 'tcp', |
|
78 | 78 | 'IpRanges': [ |
|
79 | 79 | { |
|
80 | 80 | 'CidrIp': '0.0.0.0/0', |
|
81 | 81 | 'Description': 'SSH from entire Internet', |
|
82 | 82 | }, |
|
83 | 83 | ], |
|
84 | 84 | }, |
|
85 | 85 | ], |
|
86 | 86 | }, |
|
87 | 87 | 'windows-dev-1': { |
|
88 | 88 | 'description': 'Mercurial Windows instances that perform build automation', |
|
89 | 89 | 'ingress': [ |
|
90 | 90 | { |
|
91 | 91 | 'FromPort': 22, |
|
92 | 92 | 'ToPort': 22, |
|
93 | 93 | 'IpProtocol': 'tcp', |
|
94 | 94 | 'IpRanges': [ |
|
95 | 95 | { |
|
96 | 96 | 'CidrIp': '0.0.0.0/0', |
|
97 | 97 | 'Description': 'SSH from entire Internet', |
|
98 | 98 | }, |
|
99 | 99 | ], |
|
100 | 100 | }, |
|
101 | 101 | { |
|
102 | 102 | 'FromPort': 3389, |
|
103 | 103 | 'ToPort': 3389, |
|
104 | 104 | 'IpProtocol': 'tcp', |
|
105 | 105 | 'IpRanges': [ |
|
106 | 106 | { |
|
107 | 107 | 'CidrIp': '0.0.0.0/0', |
|
108 | 108 | 'Description': 'RDP from entire Internet', |
|
109 | 109 | }, |
|
110 | 110 | ], |
|
111 | 111 | }, |
|
112 | 112 | { |
|
113 | 113 | 'FromPort': 5985, |
|
114 | 114 | 'ToPort': 5986, |
|
115 | 115 | 'IpProtocol': 'tcp', |
|
116 | 116 | 'IpRanges': [ |
|
117 | 117 | { |
|
118 | 118 | 'CidrIp': '0.0.0.0/0', |
|
119 | 119 | 'Description': 'PowerShell Remoting (Windows Remote Management)', |
|
120 | 120 | }, |
|
121 | 121 | ], |
|
122 | 122 | }, |
|
123 | 123 | ], |
|
124 | 124 | }, |
|
125 | 125 | } |
|
126 | 126 | |
|
127 | 127 | |
|
128 | 128 | IAM_ROLES = { |
|
129 | 129 | 'ephemeral-ec2-role-1': { |
|
130 | 130 | 'description': 'Mercurial temporary EC2 instances', |
|
131 | 131 | 'policy_arns': [ |
|
132 | 132 | 'arn:aws:iam::aws:policy/service-role/AmazonEC2RoleforSSM', |
|
133 | 133 | ], |
|
134 | 134 | }, |
|
135 | 135 | } |
|
136 | 136 | |
|
137 | 137 | |
|
138 | 138 | ASSUME_ROLE_POLICY_DOCUMENT = ''' |
|
139 | 139 | { |
|
140 | 140 | "Version": "2012-10-17", |
|
141 | 141 | "Statement": [ |
|
142 | 142 | { |
|
143 | 143 | "Effect": "Allow", |
|
144 | 144 | "Principal": { |
|
145 | 145 | "Service": "ec2.amazonaws.com" |
|
146 | 146 | }, |
|
147 | 147 | "Action": "sts:AssumeRole" |
|
148 | 148 | } |
|
149 | 149 | ] |
|
150 | 150 | } |
|
151 | 151 | '''.strip() |
|
152 | 152 | |
|
153 | 153 | |
|
154 | 154 | IAM_INSTANCE_PROFILES = { |
|
155 | 155 | 'ephemeral-ec2-1': { |
|
156 | 156 | 'roles': [ |
|
157 | 157 | 'ephemeral-ec2-role-1', |
|
158 | 158 | ], |
|
159 | 159 | } |
|
160 | 160 | } |
|
161 | 161 | |
|
162 | 162 | |
|
163 | 163 | # User Data for Windows EC2 instance. Mainly used to set the password |
|
164 | 164 | # and configure WinRM. |
|
165 | 165 | # Inspired by the User Data script used by Packer |
|
166 | 166 | # (from https://www.packer.io/intro/getting-started/build-image.html). |
|
167 | 167 | WINDOWS_USER_DATA = r''' |
|
168 | 168 | <powershell> |
|
169 | 169 | |
|
170 | 170 | # TODO enable this once we figure out what is failing. |
|
171 | 171 | #$ErrorActionPreference = "stop" |
|
172 | 172 | |
|
173 | 173 | # Set administrator password |
|
174 | 174 | net user Administrator "%s" |
|
175 | 175 | wmic useraccount where "name='Administrator'" set PasswordExpires=FALSE |
|
176 | 176 | |
|
177 | 177 | # First, make sure WinRM can't be connected to |
|
178 | 178 | netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new enable=yes action=block |
|
179 | 179 | |
|
180 | 180 | # Delete any existing WinRM listeners |
|
181 | 181 | winrm delete winrm/config/listener?Address=*+Transport=HTTP 2>$Null |
|
182 | 182 | winrm delete winrm/config/listener?Address=*+Transport=HTTPS 2>$Null |
|
183 | 183 | |
|
184 | 184 | # Create a new WinRM listener and configure |
|
185 | 185 | winrm create winrm/config/listener?Address=*+Transport=HTTP |
|
186 | 186 | winrm set winrm/config/winrs '@{MaxMemoryPerShellMB="0"}' |
|
187 | 187 | winrm set winrm/config '@{MaxTimeoutms="7200000"}' |
|
188 | 188 | winrm set winrm/config/service '@{AllowUnencrypted="true"}' |
|
189 | 189 | winrm set winrm/config/service '@{MaxConcurrentOperationsPerUser="12000"}' |
|
190 | 190 | winrm set winrm/config/service/auth '@{Basic="true"}' |
|
191 | 191 | winrm set winrm/config/client/auth '@{Basic="true"}' |
|
192 | 192 | |
|
193 | 193 | # Configure UAC to allow privilege elevation in remote shells |
|
194 | 194 | $Key = 'HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System' |
|
195 | 195 | $Setting = 'LocalAccountTokenFilterPolicy' |
|
196 | 196 | Set-ItemProperty -Path $Key -Name $Setting -Value 1 -Force |
|
197 | 197 | |
|
198 | 198 | # Avoid long usernames in the temp directory path because the '~' causes extra quoting in ssh output |
|
199 | 199 | [System.Environment]::SetEnvironmentVariable('TMP', 'C:\Temp', [System.EnvironmentVariableTarget]::User) |
|
200 | 200 | [System.Environment]::SetEnvironmentVariable('TEMP', 'C:\Temp', [System.EnvironmentVariableTarget]::User) |
|
201 | 201 | |
|
202 | 202 | # Configure and restart the WinRM Service; Enable the required firewall exception |
|
203 | 203 | Stop-Service -Name WinRM |
|
204 | 204 | Set-Service -Name WinRM -StartupType Automatic |
|
205 | 205 | netsh advfirewall firewall set rule name="Windows Remote Management (HTTP-In)" new action=allow localip=any remoteip=any |
|
206 | 206 | Start-Service -Name WinRM |
|
207 | 207 | |
|
208 | 208 | # Disable firewall on private network interfaces so prompts don't appear. |
|
209 | 209 | Set-NetFirewallProfile -Name private -Enabled false |
|
210 | 210 | </powershell> |
|
211 | 211 | '''.lstrip() |
|
212 | 212 | |
|
213 | 213 | |
|
214 | 214 | WINDOWS_BOOTSTRAP_POWERSHELL = ''' |
|
215 | 215 | Write-Output "installing PowerShell dependencies" |
|
216 | 216 | Install-PackageProvider -Name NuGet -MinimumVersion 2.8.5.201 -Force |
|
217 | 217 | Set-PSRepository -Name PSGallery -InstallationPolicy Trusted |
|
218 | 218 | Install-Module -Name OpenSSHUtils -RequiredVersion 0.0.2.0 |
|
219 | 219 | |
|
220 | 220 | Write-Output "installing OpenSSL server" |
|
221 | 221 | Add-WindowsCapability -Online -Name OpenSSH.Server~~~~0.0.1.0 |
|
222 | 222 | # Various tools will attempt to use older versions of .NET. So we enable |
|
223 | 223 | # the feature that provides them so it doesn't have to be auto-enabled |
|
224 | 224 | # later. |
|
225 | 225 | Write-Output "enabling .NET Framework feature" |
|
226 | 226 | Install-WindowsFeature -Name Net-Framework-Core |
|
227 | 227 | ''' |
|
228 | 228 | |
|
229 | 229 | |
|
230 | 230 | class AWSConnection: |
|
231 | 231 | """Manages the state of a connection with AWS.""" |
|
232 | 232 | |
|
233 | 233 | def __init__(self, automation, region: str, ensure_ec2_state: bool = True): |
|
234 | 234 | self.automation = automation |
|
235 | 235 | self.local_state_path = automation.state_path |
|
236 | 236 | |
|
237 | 237 | self.prefix = 'hg-' |
|
238 | 238 | |
|
239 | 239 | self.session = boto3.session.Session(region_name=region) |
|
240 | 240 | self.ec2client = self.session.client('ec2') |
|
241 | 241 | self.ec2resource = self.session.resource('ec2') |
|
242 | 242 | self.iamclient = self.session.client('iam') |
|
243 | 243 | self.iamresource = self.session.resource('iam') |
|
244 | 244 | self.security_groups = {} |
|
245 | 245 | |
|
246 | 246 | if ensure_ec2_state: |
|
247 | 247 | ensure_key_pairs(automation.state_path, self.ec2resource) |
|
248 | 248 | self.security_groups = ensure_security_groups(self.ec2resource) |
|
249 | 249 | ensure_iam_state(self.iamclient, self.iamresource) |
|
250 | 250 | |
|
251 | 251 | def key_pair_path_private(self, name): |
|
252 | 252 | """Path to a key pair private key file.""" |
|
253 | 253 | return self.local_state_path / 'keys' / ('keypair-%s' % name) |
|
254 | 254 | |
|
255 | 255 | def key_pair_path_public(self, name): |
|
256 | 256 | return self.local_state_path / 'keys' / ('keypair-%s.pub' % name) |
|
257 | 257 | |
|
258 | 258 | |
|
259 | 259 | def rsa_key_fingerprint(p: pathlib.Path): |
|
260 | 260 | """Compute the fingerprint of an RSA private key.""" |
|
261 | 261 | |
|
262 | 262 | # TODO use rsa package. |
|
263 | 263 | res = subprocess.run( |
|
264 | 264 | [ |
|
265 | 265 | 'openssl', |
|
266 | 266 | 'pkcs8', |
|
267 | 267 | '-in', |
|
268 | 268 | str(p), |
|
269 | 269 | '-nocrypt', |
|
270 | 270 | '-topk8', |
|
271 | 271 | '-outform', |
|
272 | 272 | 'DER', |
|
273 | 273 | ], |
|
274 | 274 | capture_output=True, |
|
275 | 275 | check=True, |
|
276 | 276 | ) |
|
277 | 277 | |
|
278 | 278 | sha1 = hashlib.sha1(res.stdout).hexdigest() |
|
279 | 279 | return ':'.join(a + b for a, b in zip(sha1[::2], sha1[1::2])) |
|
280 | 280 | |
|
281 | 281 | |
|
282 | 282 | def ensure_key_pairs(state_path: pathlib.Path, ec2resource, prefix='hg-'): |
|
283 | 283 | remote_existing = {} |
|
284 | 284 | |
|
285 | 285 | for kpi in ec2resource.key_pairs.all(): |
|
286 | 286 | if kpi.name.startswith(prefix): |
|
287 | 287 | remote_existing[kpi.name[len(prefix) :]] = kpi.key_fingerprint |
|
288 | 288 | |
|
289 | 289 | # Validate that we have these keys locally. |
|
290 | 290 | key_path = state_path / 'keys' |
|
291 | 291 | key_path.mkdir(exist_ok=True, mode=0o700) |
|
292 | 292 | |
|
293 | 293 | def remove_remote(name): |
|
294 | 294 | print('deleting key pair %s' % name) |
|
295 | 295 | key = ec2resource.KeyPair(name) |
|
296 | 296 | key.delete() |
|
297 | 297 | |
|
298 | 298 | def remove_local(name): |
|
299 | 299 | pub_full = key_path / ('keypair-%s.pub' % name) |
|
300 | 300 | priv_full = key_path / ('keypair-%s' % name) |
|
301 | 301 | |
|
302 | 302 | print('removing %s' % pub_full) |
|
303 | 303 | pub_full.unlink() |
|
304 | 304 | print('removing %s' % priv_full) |
|
305 | 305 | priv_full.unlink() |
|
306 | 306 | |
|
307 | 307 | local_existing = {} |
|
308 | 308 | |
|
309 | 309 | for f in sorted(os.listdir(key_path)): |
|
310 | 310 | if not f.startswith('keypair-') or not f.endswith('.pub'): |
|
311 | 311 | continue |
|
312 | 312 | |
|
313 | 313 | name = f[len('keypair-') : -len('.pub')] |
|
314 | 314 | |
|
315 | 315 | pub_full = key_path / f |
|
316 | 316 | priv_full = key_path / ('keypair-%s' % name) |
|
317 | 317 | |
|
318 | 318 | with open(pub_full, 'r', encoding='ascii') as fh: |
|
319 | 319 | data = fh.read() |
|
320 | 320 | |
|
321 | 321 | if not data.startswith('ssh-rsa '): |
|
322 | 322 | print( |
|
323 | 323 | 'unexpected format for key pair file: %s; removing' % pub_full |
|
324 | 324 | ) |
|
325 | 325 | pub_full.unlink() |
|
326 | 326 | priv_full.unlink() |
|
327 | 327 | continue |
|
328 | 328 | |
|
329 | 329 | local_existing[name] = rsa_key_fingerprint(priv_full) |
|
330 | 330 | |
|
331 | 331 | for name in sorted(set(remote_existing) | set(local_existing)): |
|
332 | 332 | if name not in local_existing: |
|
333 | 333 | actual = '%s%s' % (prefix, name) |
|
334 | 334 | print('remote key %s does not exist locally' % name) |
|
335 | 335 | remove_remote(actual) |
|
336 | 336 | del remote_existing[name] |
|
337 | 337 | |
|
338 | 338 | elif name not in remote_existing: |
|
339 | 339 | print('local key %s does not exist remotely' % name) |
|
340 | 340 | remove_local(name) |
|
341 | 341 | del local_existing[name] |
|
342 | 342 | |
|
343 | 343 | elif remote_existing[name] != local_existing[name]: |
|
344 | 344 | print( |
|
345 | 345 | 'key fingerprint mismatch for %s; ' |
|
346 | 346 | 'removing from local and remote' % name |
|
347 | 347 | ) |
|
348 | 348 | remove_local(name) |
|
349 | 349 | remove_remote('%s%s' % (prefix, name)) |
|
350 | 350 | del local_existing[name] |
|
351 | 351 | del remote_existing[name] |
|
352 | 352 | |
|
353 | 353 | missing = KEY_PAIRS - set(remote_existing) |
|
354 | 354 | |
|
355 | 355 | for name in sorted(missing): |
|
356 | 356 | actual = '%s%s' % (prefix, name) |
|
357 | 357 | print('creating key pair %s' % actual) |
|
358 | 358 | |
|
359 | 359 | priv_full = key_path / ('keypair-%s' % name) |
|
360 | 360 | pub_full = key_path / ('keypair-%s.pub' % name) |
|
361 | 361 | |
|
362 | 362 | kp = ec2resource.create_key_pair(KeyName=actual) |
|
363 | 363 | |
|
364 | 364 | with priv_full.open('w', encoding='ascii') as fh: |
|
365 | 365 | fh.write(kp.key_material) |
|
366 | 366 | fh.write('\n') |
|
367 | 367 | |
|
368 | 368 | priv_full.chmod(0o0600) |
|
369 | 369 | |
|
370 | 370 | # SSH public key can be extracted via `ssh-keygen`. |
|
371 | 371 | with pub_full.open('w', encoding='ascii') as fh: |
|
372 | 372 | subprocess.run( |
|
373 | 373 | ['ssh-keygen', '-y', '-f', str(priv_full)], |
|
374 | 374 | stdout=fh, |
|
375 | 375 | check=True, |
|
376 | 376 | ) |
|
377 | 377 | |
|
378 | 378 | pub_full.chmod(0o0600) |
|
379 | 379 | |
|
380 | 380 | |
|
381 | 381 | def delete_instance_profile(profile): |
|
382 | 382 | for role in profile.roles: |
|
383 | 383 | print( |
|
384 | 384 | 'removing role %s from instance profile %s' |
|
385 | 385 | % (role.name, profile.name) |
|
386 | 386 | ) |
|
387 | 387 | profile.remove_role(RoleName=role.name) |
|
388 | 388 | |
|
389 | 389 | print('deleting instance profile %s' % profile.name) |
|
390 | 390 | profile.delete() |
|
391 | 391 | |
|
392 | 392 | |
|
393 | 393 | def ensure_iam_state(iamclient, iamresource, prefix='hg-'): |
|
394 | 394 | """Ensure IAM state is in sync with our canonical definition.""" |
|
395 | 395 | |
|
396 | 396 | remote_profiles = {} |
|
397 | 397 | |
|
398 | 398 | for profile in iamresource.instance_profiles.all(): |
|
399 | 399 | if profile.name.startswith(prefix): |
|
400 | 400 | remote_profiles[profile.name[len(prefix) :]] = profile |
|
401 | 401 | |
|
402 | 402 | for name in sorted(set(remote_profiles) - set(IAM_INSTANCE_PROFILES)): |
|
403 | 403 | delete_instance_profile(remote_profiles[name]) |
|
404 | 404 | del remote_profiles[name] |
|
405 | 405 | |
|
406 | 406 | remote_roles = {} |
|
407 | 407 | |
|
408 | 408 | for role in iamresource.roles.all(): |
|
409 | 409 | if role.name.startswith(prefix): |
|
410 | 410 | remote_roles[role.name[len(prefix) :]] = role |
|
411 | 411 | |
|
412 | 412 | for name in sorted(set(remote_roles) - set(IAM_ROLES)): |
|
413 | 413 | role = remote_roles[name] |
|
414 | 414 | |
|
415 | 415 | print('removing role %s' % role.name) |
|
416 | 416 | role.delete() |
|
417 | 417 | del remote_roles[name] |
|
418 | 418 | |
|
419 | 419 | # We've purged remote state that doesn't belong. Create missing |
|
420 | 420 | # instance profiles and roles. |
|
421 | 421 | for name in sorted(set(IAM_INSTANCE_PROFILES) - set(remote_profiles)): |
|
422 | 422 | actual = '%s%s' % (prefix, name) |
|
423 | 423 | print('creating IAM instance profile %s' % actual) |
|
424 | 424 | |
|
425 | 425 | profile = iamresource.create_instance_profile( |
|
426 | 426 | InstanceProfileName=actual |
|
427 | 427 | ) |
|
428 | 428 | remote_profiles[name] = profile |
|
429 | 429 | |
|
430 | 430 | waiter = iamclient.get_waiter('instance_profile_exists') |
|
431 | 431 | waiter.wait(InstanceProfileName=actual) |
|
432 | 432 | print('IAM instance profile %s is available' % actual) |
|
433 | 433 | |
|
434 | 434 | for name in sorted(set(IAM_ROLES) - set(remote_roles)): |
|
435 | 435 | entry = IAM_ROLES[name] |
|
436 | 436 | |
|
437 | 437 | actual = '%s%s' % (prefix, name) |
|
438 | 438 | print('creating IAM role %s' % actual) |
|
439 | 439 | |
|
440 | 440 | role = iamresource.create_role( |
|
441 | 441 | RoleName=actual, |
|
442 | 442 | Description=entry['description'], |
|
443 | 443 | AssumeRolePolicyDocument=ASSUME_ROLE_POLICY_DOCUMENT, |
|
444 | 444 | ) |
|
445 | 445 | |
|
446 | 446 | waiter = iamclient.get_waiter('role_exists') |
|
447 | 447 | waiter.wait(RoleName=actual) |
|
448 | 448 | print('IAM role %s is available' % actual) |
|
449 | 449 | |
|
450 | 450 | remote_roles[name] = role |
|
451 | 451 | |
|
452 | 452 | for arn in entry['policy_arns']: |
|
453 | 453 | print('attaching policy %s to %s' % (arn, role.name)) |
|
454 | 454 | role.attach_policy(PolicyArn=arn) |
|
455 | 455 | |
|
456 | 456 | # Now reconcile state of profiles. |
|
457 | 457 | for name, meta in sorted(IAM_INSTANCE_PROFILES.items()): |
|
458 | 458 | profile = remote_profiles[name] |
|
459 | 459 | wanted = {'%s%s' % (prefix, role) for role in meta['roles']} |
|
460 | 460 | have = {role.name for role in profile.roles} |
|
461 | 461 | |
|
462 | 462 | for role in sorted(have - wanted): |
|
463 | 463 | print('removing role %s from %s' % (role, profile.name)) |
|
464 | 464 | profile.remove_role(RoleName=role) |
|
465 | 465 | |
|
466 | 466 | for role in sorted(wanted - have): |
|
467 | 467 | print('adding role %s to %s' % (role, profile.name)) |
|
468 | 468 | profile.add_role(RoleName=role) |
|
469 | 469 | |
|
470 | 470 | |
|
471 | 471 | def find_image(ec2resource, owner_id, name, reverse_sort_field=None): |
|
472 | 472 | """Find an AMI by its owner ID and name.""" |
|
473 | 473 | |
|
474 | 474 | images = ec2resource.images.filter( |
|
475 | 475 | Filters=[ |
|
476 | 476 | { |
|
477 | 477 | 'Name': 'owner-id', |
|
478 | 478 | 'Values': [owner_id], |
|
479 | 479 | }, |
|
480 | 480 | { |
|
481 | 481 | 'Name': 'state', |
|
482 | 482 | 'Values': ['available'], |
|
483 | 483 | }, |
|
484 | 484 | { |
|
485 | 485 | 'Name': 'image-type', |
|
486 | 486 | 'Values': ['machine'], |
|
487 | 487 | }, |
|
488 | 488 | { |
|
489 | 489 | 'Name': 'name', |
|
490 | 490 | 'Values': [name], |
|
491 | 491 | }, |
|
492 | 492 | ] |
|
493 | 493 | ) |
|
494 | 494 | |
|
495 | 495 | if reverse_sort_field: |
|
496 | 496 | images = sorted( |
|
497 | 497 | images, |
|
498 | 498 | key=lambda image: getattr(image, reverse_sort_field), |
|
499 | 499 | reverse=True, |
|
500 | 500 | ) |
|
501 | 501 | |
|
502 | 502 | for image in images: |
|
503 | 503 | return image |
|
504 | 504 | |
|
505 | 505 | raise Exception('unable to find image for %s' % name) |
|
506 | 506 | |
|
507 | 507 | |
|
508 | 508 | def ensure_security_groups(ec2resource, prefix='hg-'): |
|
509 | 509 | """Ensure all necessary Mercurial security groups are present. |
|
510 | 510 | |
|
511 | 511 | All security groups are prefixed with ``hg-`` by default. Any security |
|
512 | 512 | groups having this prefix but aren't in our list are deleted. |
|
513 | 513 | """ |
|
514 | 514 | existing = {} |
|
515 | 515 | |
|
516 | 516 | for group in ec2resource.security_groups.all(): |
|
517 | 517 | if group.group_name.startswith(prefix): |
|
518 | 518 | existing[group.group_name[len(prefix) :]] = group |
|
519 | 519 | |
|
520 | 520 | purge = set(existing) - set(SECURITY_GROUPS) |
|
521 | 521 | |
|
522 | 522 | for name in sorted(purge): |
|
523 | 523 | group = existing[name] |
|
524 | 524 | print('removing legacy security group: %s' % group.group_name) |
|
525 | 525 | group.delete() |
|
526 | 526 | |
|
527 | 527 | security_groups = {} |
|
528 | 528 | |
|
529 | 529 | for name, group in sorted(SECURITY_GROUPS.items()): |
|
530 | 530 | if name in existing: |
|
531 | 531 | security_groups[name] = existing[name] |
|
532 | 532 | continue |
|
533 | 533 | |
|
534 | 534 | actual = '%s%s' % (prefix, name) |
|
535 | 535 | print('adding security group %s' % actual) |
|
536 | 536 | |
|
537 | 537 | group_res = ec2resource.create_security_group( |
|
538 | 538 | Description=group['description'], |
|
539 | 539 | GroupName=actual, |
|
540 | 540 | ) |
|
541 | 541 | |
|
542 | 542 | group_res.authorize_ingress( |
|
543 | 543 | IpPermissions=group['ingress'], |
|
544 | 544 | ) |
|
545 | 545 | |
|
546 | 546 | security_groups[name] = group_res |
|
547 | 547 | |
|
548 | 548 | return security_groups |
|
549 | 549 | |
|
550 | 550 | |
|
551 | 551 | def terminate_ec2_instances(ec2resource, prefix='hg-'): |
|
552 | 552 | """Terminate all EC2 instances managed by us.""" |
|
553 | 553 | waiting = [] |
|
554 | 554 | |
|
555 | 555 | for instance in ec2resource.instances.all(): |
|
556 | 556 | if instance.state['Name'] == 'terminated': |
|
557 | 557 | continue |
|
558 | 558 | |
|
559 | 559 | for tag in instance.tags or []: |
|
560 | 560 | if tag['Key'] == 'Name' and tag['Value'].startswith(prefix): |
|
561 | 561 | print('terminating %s' % instance.id) |
|
562 | 562 | instance.terminate() |
|
563 | 563 | waiting.append(instance) |
|
564 | 564 | |
|
565 | 565 | for instance in waiting: |
|
566 | 566 | instance.wait_until_terminated() |
|
567 | 567 | |
|
568 | 568 | |
|
569 | 569 | def remove_resources(c, prefix='hg-'): |
|
570 | 570 | """Purge all of our resources in this EC2 region.""" |
|
571 | 571 | ec2resource = c.ec2resource |
|
572 | 572 | iamresource = c.iamresource |
|
573 | 573 | |
|
574 | 574 | terminate_ec2_instances(ec2resource, prefix=prefix) |
|
575 | 575 | |
|
576 | 576 | for image in ec2resource.images.filter(Owners=['self']): |
|
577 | 577 | if image.name.startswith(prefix): |
|
578 | 578 | remove_ami(ec2resource, image) |
|
579 | 579 | |
|
580 | 580 | for group in ec2resource.security_groups.all(): |
|
581 | 581 | if group.group_name.startswith(prefix): |
|
582 | 582 | print('removing security group %s' % group.group_name) |
|
583 | 583 | group.delete() |
|
584 | 584 | |
|
585 | 585 | for profile in iamresource.instance_profiles.all(): |
|
586 | 586 | if profile.name.startswith(prefix): |
|
587 | 587 | delete_instance_profile(profile) |
|
588 | 588 | |
|
589 | 589 | for role in iamresource.roles.all(): |
|
590 | 590 | if role.name.startswith(prefix): |
|
591 | 591 | for p in role.attached_policies.all(): |
|
592 | 592 | print('detaching policy %s from %s' % (p.arn, role.name)) |
|
593 | 593 | role.detach_policy(PolicyArn=p.arn) |
|
594 | 594 | |
|
595 | 595 | print('removing role %s' % role.name) |
|
596 | 596 | role.delete() |
|
597 | 597 | |
|
598 | 598 | |
|
599 | 599 | def wait_for_ip_addresses(instances): |
|
600 | 600 | """Wait for the public IP addresses of an iterable of instances.""" |
|
601 | 601 | for instance in instances: |
|
602 | 602 | while True: |
|
603 | 603 | if not instance.public_ip_address: |
|
604 | 604 | time.sleep(2) |
|
605 | 605 | instance.reload() |
|
606 | 606 | continue |
|
607 | 607 | |
|
608 | 608 | print( |
|
609 | 609 | 'public IP address for %s: %s' |
|
610 | 610 | % (instance.id, instance.public_ip_address) |
|
611 | 611 | ) |
|
612 | 612 | break |
|
613 | 613 | |
|
614 | 614 | |
|
615 | 615 | def remove_ami(ec2resource, image): |
|
616 | 616 | """Remove an AMI and its underlying snapshots.""" |
|
617 | 617 | snapshots = [] |
|
618 | 618 | |
|
619 | 619 | for device in image.block_device_mappings: |
|
620 | 620 | if 'Ebs' in device: |
|
621 | 621 | snapshots.append(ec2resource.Snapshot(device['Ebs']['SnapshotId'])) |
|
622 | 622 | |
|
623 | 623 | print('deregistering %s' % image.id) |
|
624 | 624 | image.deregister() |
|
625 | 625 | |
|
626 | 626 | for snapshot in snapshots: |
|
627 | 627 | print('deleting snapshot %s' % snapshot.id) |
|
628 | 628 | snapshot.delete() |
|
629 | 629 | |
|
630 | 630 | |
|
631 | 631 | def wait_for_ssm(ssmclient, instances): |
|
632 | 632 | """Wait for SSM to come online for an iterable of instance IDs.""" |
|
633 | 633 | while True: |
|
634 | 634 | res = ssmclient.describe_instance_information( |
|
635 | 635 | Filters=[ |
|
636 | 636 | { |
|
637 | 637 | 'Key': 'InstanceIds', |
|
638 | 638 | 'Values': [i.id for i in instances], |
|
639 | 639 | }, |
|
640 | 640 | ], |
|
641 | 641 | ) |
|
642 | 642 | |
|
643 | 643 | available = len(res['InstanceInformationList']) |
|
644 | 644 | wanted = len(instances) |
|
645 | 645 | |
|
646 | 646 | print('%d/%d instances available in SSM' % (available, wanted)) |
|
647 | 647 | |
|
648 | 648 | if available == wanted: |
|
649 | 649 | return |
|
650 | 650 | |
|
651 | 651 | time.sleep(2) |
|
652 | 652 | |
|
653 | 653 | |
|
654 | 654 | def run_ssm_command(ssmclient, instances, document_name, parameters): |
|
655 | 655 | """Run a PowerShell script on an EC2 instance.""" |
|
656 | 656 | |
|
657 | 657 | res = ssmclient.send_command( |
|
658 | 658 | InstanceIds=[i.id for i in instances], |
|
659 | 659 | DocumentName=document_name, |
|
660 | 660 | Parameters=parameters, |
|
661 | 661 | CloudWatchOutputConfig={ |
|
662 | 662 | 'CloudWatchOutputEnabled': True, |
|
663 | 663 | }, |
|
664 | 664 | ) |
|
665 | 665 | |
|
666 | 666 | command_id = res['Command']['CommandId'] |
|
667 | 667 | |
|
668 | 668 | for instance in instances: |
|
669 | 669 | while True: |
|
670 | 670 | try: |
|
671 | 671 | res = ssmclient.get_command_invocation( |
|
672 | 672 | CommandId=command_id, |
|
673 | 673 | InstanceId=instance.id, |
|
674 | 674 | ) |
|
675 | 675 | except botocore.exceptions.ClientError as e: |
|
676 | 676 | if e.response['Error']['Code'] == 'InvocationDoesNotExist': |
|
677 | 677 | print('could not find SSM command invocation; waiting') |
|
678 | 678 | time.sleep(1) |
|
679 | 679 | continue |
|
680 | 680 | else: |
|
681 | 681 | raise |
|
682 | 682 | |
|
683 | 683 | if res['Status'] == 'Success': |
|
684 | 684 | break |
|
685 | 685 | elif res['Status'] in ('Pending', 'InProgress', 'Delayed'): |
|
686 | 686 | time.sleep(2) |
|
687 | 687 | else: |
|
688 | 688 | raise Exception( |
|
689 | 689 | 'command failed on %s: %s' % (instance.id, res['Status']) |
|
690 | 690 | ) |
|
691 | 691 | |
|
692 | 692 | |
|
693 | 693 | @contextlib.contextmanager |
|
694 | 694 | def temporary_ec2_instances(ec2resource, config): |
|
695 | 695 | """Create temporary EC2 instances. |
|
696 | 696 | |
|
697 | 697 | This is a proxy to ``ec2client.run_instances(**config)`` that takes care of |
|
698 | 698 | managing the lifecycle of the instances. |
|
699 | 699 | |
|
700 | 700 | When the context manager exits, the instances are terminated. |
|
701 | 701 | |
|
702 | 702 | The context manager evaluates to the list of data structures |
|
703 | 703 | describing each created instance. The instances may not be available |
|
704 | 704 | for work immediately: it is up to the caller to wait for the instance |
|
705 | 705 | to start responding. |
|
706 | 706 | """ |
|
707 | 707 | |
|
708 | 708 | ids = None |
|
709 | 709 | |
|
710 | 710 | try: |
|
711 | 711 | res = ec2resource.create_instances(**config) |
|
712 | 712 | |
|
713 | 713 | ids = [i.id for i in res] |
|
714 | 714 | print('started instances: %s' % ' '.join(ids)) |
|
715 | 715 | |
|
716 | 716 | yield res |
|
717 | 717 | finally: |
|
718 | 718 | if ids: |
|
719 | 719 | print('terminating instances: %s' % ' '.join(ids)) |
|
720 | 720 | for instance in res: |
|
721 | 721 | instance.terminate() |
|
722 | 722 | print('terminated %d instances' % len(ids)) |
|
723 | 723 | |
|
724 | 724 | |
|
725 | 725 | @contextlib.contextmanager |
|
726 | 726 | def create_temp_windows_ec2_instances( |
|
727 | 727 | c: AWSConnection, config, bootstrap: bool = False |
|
728 | 728 | ): |
|
729 | 729 | """Create temporary Windows EC2 instances. |
|
730 | 730 | |
|
731 | 731 | This is a higher-level wrapper around ``create_temp_ec2_instances()`` that |
|
732 | 732 | configures the Windows instance for Windows Remote Management. The emitted |
|
733 | 733 | instances will have a ``winrm_client`` attribute containing a |
|
734 | 734 | ``pypsrp.client.Client`` instance bound to the instance. |
|
735 | 735 | """ |
|
736 | 736 | if 'IamInstanceProfile' in config: |
|
737 | 737 | raise ValueError('IamInstanceProfile cannot be provided in config') |
|
738 | 738 | if 'UserData' in config: |
|
739 | 739 | raise ValueError('UserData cannot be provided in config') |
|
740 | 740 | |
|
741 | 741 | password = c.automation.default_password() |
|
742 | 742 | |
|
743 | 743 | config = copy.deepcopy(config) |
|
744 | 744 | config['IamInstanceProfile'] = { |
|
745 | 745 | 'Name': 'hg-ephemeral-ec2-1', |
|
746 | 746 | } |
|
747 | 747 | config.setdefault('TagSpecifications', []).append( |
|
748 | 748 | { |
|
749 | 749 | 'ResourceType': 'instance', |
|
750 | 750 | 'Tags': [{'Key': 'Name', 'Value': 'hg-temp-windows'}], |
|
751 | 751 | } |
|
752 | 752 | ) |
|
753 | 753 | |
|
754 | 754 | if bootstrap: |
|
755 | 755 | config['UserData'] = WINDOWS_USER_DATA % password |
|
756 | 756 | |
|
757 | 757 | with temporary_ec2_instances(c.ec2resource, config) as instances: |
|
758 | 758 | wait_for_ip_addresses(instances) |
|
759 | 759 | |
|
760 | 760 | print('waiting for Windows Remote Management service...') |
|
761 | 761 | |
|
762 | 762 | for instance in instances: |
|
763 | 763 | client = wait_for_winrm( |
|
764 | 764 | instance.public_ip_address, 'Administrator', password |
|
765 | 765 | ) |
|
766 | 766 | print('established WinRM connection to %s' % instance.id) |
|
767 | 767 | instance.winrm_client = client |
|
768 | 768 | |
|
769 | 769 | yield instances |
|
770 | 770 | |
|
771 | 771 | |
|
772 | 772 | def resolve_fingerprint(fingerprint): |
|
773 | 773 | fingerprint = json.dumps(fingerprint, sort_keys=True) |
|
774 | 774 | return hashlib.sha256(fingerprint.encode('utf-8')).hexdigest() |
|
775 | 775 | |
|
776 | 776 | |
|
777 | 777 | def find_and_reconcile_image(ec2resource, name, fingerprint): |
|
778 | 778 | """Attempt to find an existing EC2 AMI with a name and fingerprint. |
|
779 | 779 | |
|
780 | 780 | If an image with the specified fingerprint is found, it is returned. |
|
781 | 781 | Otherwise None is returned. |
|
782 | 782 | |
|
783 | 783 | Existing images for the specified name that don't have the specified |
|
784 | 784 | fingerprint or are missing required metadata or deleted. |
|
785 | 785 | """ |
|
786 | 786 | # Find existing AMIs with this name and delete the ones that are invalid. |
|
787 | 787 | # Store a reference to a good image so it can be returned one the |
|
788 | 788 | # image state is reconciled. |
|
789 | 789 | images = ec2resource.images.filter( |
|
790 | 790 | Filters=[{'Name': 'name', 'Values': [name]}] |
|
791 | 791 | ) |
|
792 | 792 | |
|
793 | 793 | existing_image = None |
|
794 | 794 | |
|
795 | 795 | for image in images: |
|
796 | 796 | if image.tags is None: |
|
797 | 797 | print( |
|
798 | 798 | 'image %s for %s lacks required tags; removing' |
|
799 | 799 | % (image.id, image.name) |
|
800 | 800 | ) |
|
801 | 801 | remove_ami(ec2resource, image) |
|
802 | 802 | else: |
|
803 | 803 | tags = {t['Key']: t['Value'] for t in image.tags} |
|
804 | 804 | |
|
805 | 805 | if tags.get('HGIMAGEFINGERPRINT') == fingerprint: |
|
806 | 806 | existing_image = image |
|
807 | 807 | else: |
|
808 | 808 | print( |
|
809 | 809 | 'image %s for %s has wrong fingerprint; removing' |
|
810 | 810 | % (image.id, image.name) |
|
811 | 811 | ) |
|
812 | 812 | remove_ami(ec2resource, image) |
|
813 | 813 | |
|
814 | 814 | return existing_image |
|
815 | 815 | |
|
816 | 816 | |
|
817 | 817 | def create_ami_from_instance( |
|
818 | 818 | ec2client, instance, name, description, fingerprint |
|
819 | 819 | ): |
|
820 | 820 | """Create an AMI from a running instance. |
|
821 | 821 | |
|
822 | 822 | Returns the ``ec2resource.Image`` representing the created AMI. |
|
823 | 823 | """ |
|
824 | 824 | instance.stop() |
|
825 | 825 | |
|
826 | 826 | ec2client.get_waiter('instance_stopped').wait( |
|
827 | 827 | InstanceIds=[instance.id], |
|
828 | 828 | WaiterConfig={ |
|
829 | 829 | 'Delay': 5, |
|
830 | 830 | }, |
|
831 | 831 | ) |
|
832 | 832 | print('%s is stopped' % instance.id) |
|
833 | 833 | |
|
834 | 834 | image = instance.create_image( |
|
835 | 835 | Name=name, |
|
836 | 836 | Description=description, |
|
837 | 837 | ) |
|
838 | 838 | |
|
839 | 839 | image.create_tags( |
|
840 | 840 | Tags=[ |
|
841 | 841 | { |
|
842 | 842 | 'Key': 'HGIMAGEFINGERPRINT', |
|
843 | 843 | 'Value': fingerprint, |
|
844 | 844 | }, |
|
845 | 845 | ] |
|
846 | 846 | ) |
|
847 | 847 | |
|
848 | 848 | print('waiting for image %s' % image.id) |
|
849 | 849 | |
|
850 | 850 | ec2client.get_waiter('image_available').wait( |
|
851 | 851 | ImageIds=[image.id], |
|
852 | 852 | ) |
|
853 | 853 | |
|
854 | 854 | print('image %s available as %s' % (image.id, image.name)) |
|
855 | 855 | |
|
856 | 856 | return image |
|
857 | 857 | |
|
858 | 858 | |
|
859 | 859 | def ensure_linux_dev_ami(c: AWSConnection, distro='debian10', prefix='hg-'): |
|
860 | 860 | """Ensures a Linux development AMI is available and up-to-date. |
|
861 | 861 | |
|
862 | 862 | Returns an ``ec2.Image`` of either an existing AMI or a newly-built one. |
|
863 | 863 | """ |
|
864 | 864 | ec2client = c.ec2client |
|
865 | 865 | ec2resource = c.ec2resource |
|
866 | 866 | |
|
867 | 867 | name = '%s%s-%s' % (prefix, 'linux-dev', distro) |
|
868 | 868 | |
|
869 | 869 | if distro == 'debian9': |
|
870 | 870 | image = find_image( |
|
871 | 871 | ec2resource, |
|
872 | 872 | DEBIAN_ACCOUNT_ID, |
|
873 | 873 | 'debian-stretch-hvm-x86_64-gp2-2019-09-08-17994', |
|
874 | 874 | ) |
|
875 | 875 | ssh_username = 'admin' |
|
876 | 876 | elif distro == 'debian10': |
|
877 | 877 | image = find_image( |
|
878 | 878 | ec2resource, |
|
879 | 879 | DEBIAN_ACCOUNT_ID_2, |
|
880 | 880 | 'debian-10-amd64-20190909-10', |
|
881 | 881 | ) |
|
882 | 882 | ssh_username = 'admin' |
|
883 | 883 | elif distro == 'ubuntu18.04': |
|
884 | 884 | image = find_image( |
|
885 | 885 | ec2resource, |
|
886 | 886 | UBUNTU_ACCOUNT_ID, |
|
887 | 887 | 'ubuntu/images/hvm-ssd/ubuntu-bionic-18.04-amd64-server-20190918', |
|
888 | 888 | ) |
|
889 | 889 | ssh_username = 'ubuntu' |
|
890 | 890 | elif distro == 'ubuntu19.04': |
|
891 | 891 | image = find_image( |
|
892 | 892 | ec2resource, |
|
893 | 893 | UBUNTU_ACCOUNT_ID, |
|
894 | 894 | 'ubuntu/images/hvm-ssd/ubuntu-disco-19.04-amd64-server-20190918', |
|
895 | 895 | ) |
|
896 | 896 | ssh_username = 'ubuntu' |
|
897 | 897 | else: |
|
898 | 898 | raise ValueError('unsupported Linux distro: %s' % distro) |
|
899 | 899 | |
|
900 | 900 | config = { |
|
901 | 901 | 'BlockDeviceMappings': [ |
|
902 | 902 | { |
|
903 | 903 | 'DeviceName': image.block_device_mappings[0]['DeviceName'], |
|
904 | 904 | 'Ebs': { |
|
905 | 905 | 'DeleteOnTermination': True, |
|
906 | 906 | 'VolumeSize': 10, |
|
907 | 907 | 'VolumeType': 'gp2', |
|
908 | 908 | }, |
|
909 | 909 | }, |
|
910 | 910 | ], |
|
911 | 911 | 'EbsOptimized': True, |
|
912 | 912 | 'ImageId': image.id, |
|
913 | 913 | 'InstanceInitiatedShutdownBehavior': 'stop', |
|
914 | 914 | # 8 VCPUs for compiling Python. |
|
915 | 915 | 'InstanceType': 't3.2xlarge', |
|
916 | 916 | 'KeyName': '%sautomation' % prefix, |
|
917 | 917 | 'MaxCount': 1, |
|
918 | 918 | 'MinCount': 1, |
|
919 | 919 | 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id], |
|
920 | 920 | } |
|
921 | 921 | |
|
922 | 922 | requirements2_path = ( |
|
923 | 923 | pathlib.Path(__file__).parent.parent / 'linux-requirements-py2.txt' |
|
924 | 924 | ) |
|
925 | 925 | requirements3_path = ( |
|
926 | 926 | pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.txt' |
|
927 | 927 | ) |
|
928 | requirements35_path = ( | |
|
929 | pathlib.Path(__file__).parent.parent / 'linux-requirements-py3.5.txt' | |
|
930 | ) | |
|
928 | 931 | with requirements2_path.open('r', encoding='utf-8') as fh: |
|
929 | 932 | requirements2 = fh.read() |
|
930 | 933 | with requirements3_path.open('r', encoding='utf-8') as fh: |
|
931 | 934 | requirements3 = fh.read() |
|
935 | with requirements35_path.open('r', encoding='utf-8') as fh: | |
|
936 | requirements35 = fh.read() | |
|
932 | 937 | |
|
933 | 938 | # Compute a deterministic fingerprint to determine whether image needs to |
|
934 | 939 | # be regenerated. |
|
935 | 940 | fingerprint = resolve_fingerprint( |
|
936 | 941 | { |
|
937 | 942 | 'instance_config': config, |
|
938 | 943 | 'bootstrap_script': BOOTSTRAP_DEBIAN, |
|
939 | 944 | 'requirements_py2': requirements2, |
|
940 | 945 | 'requirements_py3': requirements3, |
|
946 | 'requirements_py35': requirements35, | |
|
941 | 947 | } |
|
942 | 948 | ) |
|
943 | 949 | |
|
944 | 950 | existing_image = find_and_reconcile_image(ec2resource, name, fingerprint) |
|
945 | 951 | |
|
946 | 952 | if existing_image: |
|
947 | 953 | return existing_image |
|
948 | 954 | |
|
949 | 955 | print('no suitable %s image found; creating one...' % name) |
|
950 | 956 | |
|
951 | 957 | with temporary_ec2_instances(ec2resource, config) as instances: |
|
952 | 958 | wait_for_ip_addresses(instances) |
|
953 | 959 | |
|
954 | 960 | instance = instances[0] |
|
955 | 961 | |
|
956 | 962 | client = wait_for_ssh( |
|
957 | 963 | instance.public_ip_address, |
|
958 | 964 | 22, |
|
959 | 965 | username=ssh_username, |
|
960 | 966 | key_filename=str(c.key_pair_path_private('automation')), |
|
961 | 967 | ) |
|
962 | 968 | |
|
963 | 969 | home = '/home/%s' % ssh_username |
|
964 | 970 | |
|
965 | 971 | with client: |
|
966 | 972 | print('connecting to SSH server') |
|
967 | 973 | sftp = client.open_sftp() |
|
968 | 974 | |
|
969 | 975 | print('uploading bootstrap files') |
|
970 | 976 | with sftp.open('%s/bootstrap' % home, 'wb') as fh: |
|
971 | 977 | fh.write(BOOTSTRAP_DEBIAN) |
|
972 | 978 | fh.chmod(0o0700) |
|
973 | 979 | |
|
974 | 980 | with sftp.open('%s/requirements-py2.txt' % home, 'wb') as fh: |
|
975 | 981 | fh.write(requirements2) |
|
976 | 982 | fh.chmod(0o0700) |
|
977 | 983 | |
|
978 | 984 | with sftp.open('%s/requirements-py3.txt' % home, 'wb') as fh: |
|
979 | 985 | fh.write(requirements3) |
|
980 | 986 | fh.chmod(0o0700) |
|
981 | 987 | |
|
988 | with sftp.open('%s/requirements-py3.5.txt' % home, 'wb') as fh: | |
|
989 | fh.write(requirements35) | |
|
990 | fh.chmod(0o0700) | |
|
991 | ||
|
982 | 992 | print('executing bootstrap') |
|
983 | 993 | chan, stdin, stdout = ssh_exec_command( |
|
984 | 994 | client, '%s/bootstrap' % home |
|
985 | 995 | ) |
|
986 | 996 | stdin.close() |
|
987 | 997 | |
|
988 | 998 | for line in stdout: |
|
989 | 999 | print(line, end='') |
|
990 | 1000 | |
|
991 | 1001 | res = chan.recv_exit_status() |
|
992 | 1002 | if res: |
|
993 | 1003 | raise Exception('non-0 exit from bootstrap: %d' % res) |
|
994 | 1004 | |
|
995 | 1005 | print( |
|
996 | 1006 | 'bootstrap completed; stopping %s to create %s' |
|
997 | 1007 | % (instance.id, name) |
|
998 | 1008 | ) |
|
999 | 1009 | |
|
1000 | 1010 | return create_ami_from_instance( |
|
1001 | 1011 | ec2client, |
|
1002 | 1012 | instance, |
|
1003 | 1013 | name, |
|
1004 | 1014 | 'Mercurial Linux development environment', |
|
1005 | 1015 | fingerprint, |
|
1006 | 1016 | ) |
|
1007 | 1017 | |
|
1008 | 1018 | |
|
1009 | 1019 | @contextlib.contextmanager |
|
1010 | 1020 | def temporary_linux_dev_instances( |
|
1011 | 1021 | c: AWSConnection, |
|
1012 | 1022 | image, |
|
1013 | 1023 | instance_type, |
|
1014 | 1024 | prefix='hg-', |
|
1015 | 1025 | ensure_extra_volume=False, |
|
1016 | 1026 | ): |
|
1017 | 1027 | """Create temporary Linux development EC2 instances. |
|
1018 | 1028 | |
|
1019 | 1029 | Context manager resolves to a list of ``ec2.Instance`` that were created |
|
1020 | 1030 | and are running. |
|
1021 | 1031 | |
|
1022 | 1032 | ``ensure_extra_volume`` can be set to ``True`` to require that instances |
|
1023 | 1033 | have a 2nd storage volume available other than the primary AMI volume. |
|
1024 | 1034 | For instance types with instance storage, this does nothing special. |
|
1025 | 1035 | But for instance types without instance storage, an additional EBS volume |
|
1026 | 1036 | will be added to the instance. |
|
1027 | 1037 | |
|
1028 | 1038 | Instances have an ``ssh_client`` attribute containing a paramiko SSHClient |
|
1029 | 1039 | instance bound to the instance. |
|
1030 | 1040 | |
|
1031 | 1041 | Instances have an ``ssh_private_key_path`` attributing containing the |
|
1032 | 1042 | str path to the SSH private key to connect to the instance. |
|
1033 | 1043 | """ |
|
1034 | 1044 | |
|
1035 | 1045 | block_device_mappings = [ |
|
1036 | 1046 | { |
|
1037 | 1047 | 'DeviceName': image.block_device_mappings[0]['DeviceName'], |
|
1038 | 1048 | 'Ebs': { |
|
1039 | 1049 | 'DeleteOnTermination': True, |
|
1040 | 1050 | 'VolumeSize': 12, |
|
1041 | 1051 | 'VolumeType': 'gp2', |
|
1042 | 1052 | }, |
|
1043 | 1053 | } |
|
1044 | 1054 | ] |
|
1045 | 1055 | |
|
1046 | 1056 | # This is not an exhaustive list of instance types having instance storage. |
|
1047 | 1057 | # But |
|
1048 | 1058 | if ensure_extra_volume and not instance_type.startswith( |
|
1049 | 1059 | tuple(INSTANCE_TYPES_WITH_STORAGE) |
|
1050 | 1060 | ): |
|
1051 | 1061 | main_device = block_device_mappings[0]['DeviceName'] |
|
1052 | 1062 | |
|
1053 | 1063 | if main_device == 'xvda': |
|
1054 | 1064 | second_device = 'xvdb' |
|
1055 | 1065 | elif main_device == '/dev/sda1': |
|
1056 | 1066 | second_device = '/dev/sdb' |
|
1057 | 1067 | else: |
|
1058 | 1068 | raise ValueError( |
|
1059 | 1069 | 'unhandled primary EBS device name: %s' % main_device |
|
1060 | 1070 | ) |
|
1061 | 1071 | |
|
1062 | 1072 | block_device_mappings.append( |
|
1063 | 1073 | { |
|
1064 | 1074 | 'DeviceName': second_device, |
|
1065 | 1075 | 'Ebs': { |
|
1066 | 1076 | 'DeleteOnTermination': True, |
|
1067 | 1077 | 'VolumeSize': 8, |
|
1068 | 1078 | 'VolumeType': 'gp2', |
|
1069 | 1079 | }, |
|
1070 | 1080 | } |
|
1071 | 1081 | ) |
|
1072 | 1082 | |
|
1073 | 1083 | config = { |
|
1074 | 1084 | 'BlockDeviceMappings': block_device_mappings, |
|
1075 | 1085 | 'EbsOptimized': True, |
|
1076 | 1086 | 'ImageId': image.id, |
|
1077 | 1087 | 'InstanceInitiatedShutdownBehavior': 'terminate', |
|
1078 | 1088 | 'InstanceType': instance_type, |
|
1079 | 1089 | 'KeyName': '%sautomation' % prefix, |
|
1080 | 1090 | 'MaxCount': 1, |
|
1081 | 1091 | 'MinCount': 1, |
|
1082 | 1092 | 'SecurityGroupIds': [c.security_groups['linux-dev-1'].id], |
|
1083 | 1093 | } |
|
1084 | 1094 | |
|
1085 | 1095 | with temporary_ec2_instances(c.ec2resource, config) as instances: |
|
1086 | 1096 | wait_for_ip_addresses(instances) |
|
1087 | 1097 | |
|
1088 | 1098 | ssh_private_key_path = str(c.key_pair_path_private('automation')) |
|
1089 | 1099 | |
|
1090 | 1100 | for instance in instances: |
|
1091 | 1101 | client = wait_for_ssh( |
|
1092 | 1102 | instance.public_ip_address, |
|
1093 | 1103 | 22, |
|
1094 | 1104 | username='hg', |
|
1095 | 1105 | key_filename=ssh_private_key_path, |
|
1096 | 1106 | ) |
|
1097 | 1107 | |
|
1098 | 1108 | instance.ssh_client = client |
|
1099 | 1109 | instance.ssh_private_key_path = ssh_private_key_path |
|
1100 | 1110 | |
|
1101 | 1111 | try: |
|
1102 | 1112 | yield instances |
|
1103 | 1113 | finally: |
|
1104 | 1114 | for instance in instances: |
|
1105 | 1115 | instance.ssh_client.close() |
|
1106 | 1116 | |
|
1107 | 1117 | |
|
1108 | 1118 | def ensure_windows_dev_ami( |
|
1109 | 1119 | c: AWSConnection, |
|
1110 | 1120 | prefix='hg-', |
|
1111 | 1121 | base_image_name=WINDOWS_BASE_IMAGE_NAME, |
|
1112 | 1122 | ): |
|
1113 | 1123 | """Ensure Windows Development AMI is available and up-to-date. |
|
1114 | 1124 | |
|
1115 | 1125 | If necessary, a modern AMI will be built by starting a temporary EC2 |
|
1116 | 1126 | instance and bootstrapping it. |
|
1117 | 1127 | |
|
1118 | 1128 | Obsolete AMIs will be deleted so there is only a single AMI having the |
|
1119 | 1129 | desired name. |
|
1120 | 1130 | |
|
1121 | 1131 | Returns an ``ec2.Image`` of either an existing AMI or a newly-built |
|
1122 | 1132 | one. |
|
1123 | 1133 | """ |
|
1124 | 1134 | ec2client = c.ec2client |
|
1125 | 1135 | ec2resource = c.ec2resource |
|
1126 | 1136 | ssmclient = c.session.client('ssm') |
|
1127 | 1137 | |
|
1128 | 1138 | name = '%s%s' % (prefix, 'windows-dev') |
|
1129 | 1139 | |
|
1130 | 1140 | image = find_image( |
|
1131 | 1141 | ec2resource, |
|
1132 | 1142 | AMAZON_ACCOUNT_ID, |
|
1133 | 1143 | base_image_name, |
|
1134 | 1144 | reverse_sort_field="name", |
|
1135 | 1145 | ) |
|
1136 | 1146 | |
|
1137 | 1147 | config = { |
|
1138 | 1148 | 'BlockDeviceMappings': [ |
|
1139 | 1149 | { |
|
1140 | 1150 | 'DeviceName': '/dev/sda1', |
|
1141 | 1151 | 'Ebs': { |
|
1142 | 1152 | 'DeleteOnTermination': True, |
|
1143 | 1153 | 'VolumeSize': 32, |
|
1144 | 1154 | 'VolumeType': 'gp2', |
|
1145 | 1155 | }, |
|
1146 | 1156 | } |
|
1147 | 1157 | ], |
|
1148 | 1158 | 'ImageId': image.id, |
|
1149 | 1159 | 'InstanceInitiatedShutdownBehavior': 'stop', |
|
1150 | 1160 | 'InstanceType': 't3.medium', |
|
1151 | 1161 | 'KeyName': '%sautomation' % prefix, |
|
1152 | 1162 | 'MaxCount': 1, |
|
1153 | 1163 | 'MinCount': 1, |
|
1154 | 1164 | 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id], |
|
1155 | 1165 | } |
|
1156 | 1166 | |
|
1157 | 1167 | commands = [ |
|
1158 | 1168 | # Need to start the service so sshd_config is generated. |
|
1159 | 1169 | 'Start-Service sshd', |
|
1160 | 1170 | 'Write-Output "modifying sshd_config"', |
|
1161 | 1171 | r'$content = Get-Content C:\ProgramData\ssh\sshd_config', |
|
1162 | 1172 | '$content = $content -replace "Match Group administrators","" -replace "AuthorizedKeysFile __PROGRAMDATA__/ssh/administrators_authorized_keys",""', |
|
1163 | 1173 | r'$content | Set-Content C:\ProgramData\ssh\sshd_config', |
|
1164 | 1174 | 'Import-Module OpenSSHUtils', |
|
1165 | 1175 | r'Repair-SshdConfigPermission C:\ProgramData\ssh\sshd_config -Confirm:$false', |
|
1166 | 1176 | 'Restart-Service sshd', |
|
1167 | 1177 | 'Write-Output "installing OpenSSL client"', |
|
1168 | 1178 | 'Add-WindowsCapability -Online -Name OpenSSH.Client~~~~0.0.1.0', |
|
1169 | 1179 | 'Set-Service -Name sshd -StartupType "Automatic"', |
|
1170 | 1180 | 'Write-Output "OpenSSH server running"', |
|
1171 | 1181 | ] |
|
1172 | 1182 | |
|
1173 | 1183 | with INSTALL_WINDOWS_DEPENDENCIES.open('r', encoding='utf-8') as fh: |
|
1174 | 1184 | commands.extend(l.rstrip() for l in fh) |
|
1175 | 1185 | |
|
1176 | 1186 | # Schedule run of EC2Launch on next boot. This ensures that UserData |
|
1177 | 1187 | # is executed. |
|
1178 | 1188 | # We disable setComputerName because it forces a reboot. |
|
1179 | 1189 | # We set an explicit admin password because this causes UserData to run |
|
1180 | 1190 | # as Administrator instead of System. |
|
1181 | 1191 | commands.extend( |
|
1182 | 1192 | [ |
|
1183 | 1193 | r'''Set-Content -Path C:\ProgramData\Amazon\EC2-Windows\Launch\Config\LaunchConfig.json ''' |
|
1184 | 1194 | r'''-Value '{"setComputerName": false, "setWallpaper": true, "addDnsSuffixList": true, ''' |
|
1185 | 1195 | r'''"extendBootVolumeSize": true, "handleUserData": true, ''' |
|
1186 | 1196 | r'''"adminPasswordType": "Specify", "adminPassword": "%s"}' ''' |
|
1187 | 1197 | % c.automation.default_password(), |
|
1188 | 1198 | r'C:\ProgramData\Amazon\EC2-Windows\Launch\Scripts\InitializeInstance.ps1 ' |
|
1189 | 1199 | r'–Schedule', |
|
1190 | 1200 | ] |
|
1191 | 1201 | ) |
|
1192 | 1202 | |
|
1193 | 1203 | # Disable Windows Defender when bootstrapping because it just slows |
|
1194 | 1204 | # things down. |
|
1195 | 1205 | commands.insert(0, 'Set-MpPreference -DisableRealtimeMonitoring $true') |
|
1196 | 1206 | commands.append('Set-MpPreference -DisableRealtimeMonitoring $false') |
|
1197 | 1207 | |
|
1198 | 1208 | # Compute a deterministic fingerprint to determine whether image needs |
|
1199 | 1209 | # to be regenerated. |
|
1200 | 1210 | fingerprint = resolve_fingerprint( |
|
1201 | 1211 | { |
|
1202 | 1212 | 'instance_config': config, |
|
1203 | 1213 | 'user_data': WINDOWS_USER_DATA, |
|
1204 | 1214 | 'initial_bootstrap': WINDOWS_BOOTSTRAP_POWERSHELL, |
|
1205 | 1215 | 'bootstrap_commands': commands, |
|
1206 | 1216 | 'base_image_name': base_image_name, |
|
1207 | 1217 | } |
|
1208 | 1218 | ) |
|
1209 | 1219 | |
|
1210 | 1220 | existing_image = find_and_reconcile_image(ec2resource, name, fingerprint) |
|
1211 | 1221 | |
|
1212 | 1222 | if existing_image: |
|
1213 | 1223 | return existing_image |
|
1214 | 1224 | |
|
1215 | 1225 | print('no suitable Windows development image found; creating one...') |
|
1216 | 1226 | |
|
1217 | 1227 | with create_temp_windows_ec2_instances( |
|
1218 | 1228 | c, config, bootstrap=True |
|
1219 | 1229 | ) as instances: |
|
1220 | 1230 | assert len(instances) == 1 |
|
1221 | 1231 | instance = instances[0] |
|
1222 | 1232 | |
|
1223 | 1233 | wait_for_ssm(ssmclient, [instance]) |
|
1224 | 1234 | |
|
1225 | 1235 | # On first boot, install various Windows updates. |
|
1226 | 1236 | # We would ideally use PowerShell Remoting for this. However, there are |
|
1227 | 1237 | # trust issues that make it difficult to invoke Windows Update |
|
1228 | 1238 | # remotely. So we use SSM, which has a mechanism for running Windows |
|
1229 | 1239 | # Update. |
|
1230 | 1240 | print('installing Windows features...') |
|
1231 | 1241 | run_ssm_command( |
|
1232 | 1242 | ssmclient, |
|
1233 | 1243 | [instance], |
|
1234 | 1244 | 'AWS-RunPowerShellScript', |
|
1235 | 1245 | { |
|
1236 | 1246 | 'commands': WINDOWS_BOOTSTRAP_POWERSHELL.split('\n'), |
|
1237 | 1247 | }, |
|
1238 | 1248 | ) |
|
1239 | 1249 | |
|
1240 | 1250 | # Reboot so all updates are fully applied. |
|
1241 | 1251 | # |
|
1242 | 1252 | # We don't use instance.reboot() here because it is asynchronous and |
|
1243 | 1253 | # we don't know when exactly the instance has rebooted. It could take |
|
1244 | 1254 | # a while to stop and we may start trying to interact with the instance |
|
1245 | 1255 | # before it has rebooted. |
|
1246 | 1256 | print('rebooting instance %s' % instance.id) |
|
1247 | 1257 | instance.stop() |
|
1248 | 1258 | ec2client.get_waiter('instance_stopped').wait( |
|
1249 | 1259 | InstanceIds=[instance.id], |
|
1250 | 1260 | WaiterConfig={ |
|
1251 | 1261 | 'Delay': 5, |
|
1252 | 1262 | }, |
|
1253 | 1263 | ) |
|
1254 | 1264 | |
|
1255 | 1265 | instance.start() |
|
1256 | 1266 | wait_for_ip_addresses([instance]) |
|
1257 | 1267 | |
|
1258 | 1268 | # There is a race condition here between the User Data PS script running |
|
1259 | 1269 | # and us connecting to WinRM. This can manifest as |
|
1260 | 1270 | # "AuthorizationManager check failed" failures during run_powershell(). |
|
1261 | 1271 | # TODO figure out a workaround. |
|
1262 | 1272 | |
|
1263 | 1273 | print('waiting for Windows Remote Management to come back...') |
|
1264 | 1274 | client = wait_for_winrm( |
|
1265 | 1275 | instance.public_ip_address, |
|
1266 | 1276 | 'Administrator', |
|
1267 | 1277 | c.automation.default_password(), |
|
1268 | 1278 | ) |
|
1269 | 1279 | print('established WinRM connection to %s' % instance.id) |
|
1270 | 1280 | instance.winrm_client = client |
|
1271 | 1281 | |
|
1272 | 1282 | print('bootstrapping instance...') |
|
1273 | 1283 | run_powershell(instance.winrm_client, '\n'.join(commands)) |
|
1274 | 1284 | |
|
1275 | 1285 | print('bootstrap completed; stopping %s to create image' % instance.id) |
|
1276 | 1286 | return create_ami_from_instance( |
|
1277 | 1287 | ec2client, |
|
1278 | 1288 | instance, |
|
1279 | 1289 | name, |
|
1280 | 1290 | 'Mercurial Windows development environment', |
|
1281 | 1291 | fingerprint, |
|
1282 | 1292 | ) |
|
1283 | 1293 | |
|
1284 | 1294 | |
|
1285 | 1295 | @contextlib.contextmanager |
|
1286 | 1296 | def temporary_windows_dev_instances( |
|
1287 | 1297 | c: AWSConnection, |
|
1288 | 1298 | image, |
|
1289 | 1299 | instance_type, |
|
1290 | 1300 | prefix='hg-', |
|
1291 | 1301 | disable_antivirus=False, |
|
1292 | 1302 | ): |
|
1293 | 1303 | """Create a temporary Windows development EC2 instance. |
|
1294 | 1304 | |
|
1295 | 1305 | Context manager resolves to the list of ``EC2.Instance`` that were created. |
|
1296 | 1306 | """ |
|
1297 | 1307 | config = { |
|
1298 | 1308 | 'BlockDeviceMappings': [ |
|
1299 | 1309 | { |
|
1300 | 1310 | 'DeviceName': '/dev/sda1', |
|
1301 | 1311 | 'Ebs': { |
|
1302 | 1312 | 'DeleteOnTermination': True, |
|
1303 | 1313 | 'VolumeSize': 32, |
|
1304 | 1314 | 'VolumeType': 'gp2', |
|
1305 | 1315 | }, |
|
1306 | 1316 | } |
|
1307 | 1317 | ], |
|
1308 | 1318 | 'ImageId': image.id, |
|
1309 | 1319 | 'InstanceInitiatedShutdownBehavior': 'stop', |
|
1310 | 1320 | 'InstanceType': instance_type, |
|
1311 | 1321 | 'KeyName': '%sautomation' % prefix, |
|
1312 | 1322 | 'MaxCount': 1, |
|
1313 | 1323 | 'MinCount': 1, |
|
1314 | 1324 | 'SecurityGroupIds': [c.security_groups['windows-dev-1'].id], |
|
1315 | 1325 | } |
|
1316 | 1326 | |
|
1317 | 1327 | with create_temp_windows_ec2_instances(c, config) as instances: |
|
1318 | 1328 | if disable_antivirus: |
|
1319 | 1329 | for instance in instances: |
|
1320 | 1330 | run_powershell( |
|
1321 | 1331 | instance.winrm_client, |
|
1322 | 1332 | 'Set-MpPreference -DisableRealtimeMonitoring $true', |
|
1323 | 1333 | ) |
|
1324 | 1334 | |
|
1325 | 1335 | yield instances |
@@ -1,597 +1,621 b'' | |||
|
1 | 1 | # linux.py - Linux specific automation functionality |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | # no-check-code because Python 3 native. |
|
9 | 9 | |
|
10 | 10 | import os |
|
11 | 11 | import pathlib |
|
12 | 12 | import shlex |
|
13 | 13 | import subprocess |
|
14 | 14 | import tempfile |
|
15 | 15 | |
|
16 | 16 | from .ssh import exec_command |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | # Linux distributions that are supported. |
|
20 | 20 | DISTROS = { |
|
21 | 21 | 'debian9', |
|
22 | 22 | 'debian10', |
|
23 | 23 | 'ubuntu18.04', |
|
24 | 24 | 'ubuntu19.04', |
|
25 | 25 | } |
|
26 | 26 | |
|
27 | 27 | INSTALL_PYTHONS = r''' |
|
28 | 28 | PYENV2_VERSIONS="2.7.17 pypy2.7-7.2.0" |
|
29 |
PYENV3_VERSIONS="3.5.10 3.6.1 |
|
|
29 | PYENV3_VERSIONS="3.5.10 3.6.13 3.7.10 3.8.10 3.9.5 pypy3.5-7.0.0 pypy3.6-7.3.3 pypy3.7-7.3.3" | |
|
30 | 30 | |
|
31 | 31 | git clone https://github.com/pyenv/pyenv.git /hgdev/pyenv |
|
32 | 32 | pushd /hgdev/pyenv |
|
33 | git checkout 8ac91b4fd678a8c04356f5ec85cfcd565c265e9a | |
|
33 | git checkout 328fd42c3a2fbf14ae46dae2021a087fe27ba7e2 | |
|
34 | 34 | popd |
|
35 | 35 | |
|
36 | 36 | export PYENV_ROOT="/hgdev/pyenv" |
|
37 | 37 | export PATH="$PYENV_ROOT/bin:$PATH" |
|
38 | 38 | |
|
39 | 39 | # pip 19.2.3. |
|
40 | 40 | PIP_SHA256=57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe |
|
41 | 41 | wget -O get-pip.py --progress dot:mega https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py |
|
42 | 42 | echo "${PIP_SHA256} get-pip.py" | sha256sum --check - |
|
43 | 43 | |
|
44 | 44 | VIRTUALENV_SHA256=f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2 |
|
45 | 45 | VIRTUALENV_TARBALL=virtualenv-16.7.5.tar.gz |
|
46 | 46 | wget -O ${VIRTUALENV_TARBALL} --progress dot:mega https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/${VIRTUALENV_TARBALL} |
|
47 | 47 | echo "${VIRTUALENV_SHA256} ${VIRTUALENV_TARBALL}" | sha256sum --check - |
|
48 | 48 | |
|
49 | 49 | for v in ${PYENV2_VERSIONS}; do |
|
50 | 50 | pyenv install -v ${v} |
|
51 | 51 | ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py |
|
52 | 52 | ${PYENV_ROOT}/versions/${v}/bin/pip install ${VIRTUALENV_TARBALL} |
|
53 | 53 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py2.txt |
|
54 | 54 | done |
|
55 | 55 | |
|
56 | 56 | for v in ${PYENV3_VERSIONS}; do |
|
57 | 57 | pyenv install -v ${v} |
|
58 | 58 | ${PYENV_ROOT}/versions/${v}/bin/python get-pip.py |
|
59 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/requirements-py3.txt | |
|
59 | ||
|
60 | case ${v} in | |
|
61 | 3.5.*) | |
|
62 | REQUIREMENTS=requirements-py3.5.txt | |
|
63 | ;; | |
|
64 | pypy3.5*) | |
|
65 | REQUIREMENTS=requirements-py3.5.txt | |
|
66 | ;; | |
|
67 | *) | |
|
68 | REQUIREMENTS=requirements-py3.txt | |
|
69 | ;; | |
|
70 | esac | |
|
71 | ||
|
72 | ${PYENV_ROOT}/versions/${v}/bin/pip install -r /hgdev/${REQUIREMENTS} | |
|
60 | 73 | done |
|
61 | 74 | |
|
62 | 75 | pyenv global ${PYENV2_VERSIONS} ${PYENV3_VERSIONS} system |
|
63 | 76 | '''.lstrip().replace( |
|
64 | 77 | '\r\n', '\n' |
|
65 | 78 | ) |
|
66 | 79 | |
|
80 | INSTALL_PYOXIDIZER = r''' | |
|
81 | PYOXIDIZER_VERSION=0.16.0 | |
|
82 | PYOXIDIZER_SHA256=8875471c270312fbb934007fd30f65f1904cc0f5da6188d61c90ed2129b9f9c1 | |
|
83 | PYOXIDIZER_URL=https://github.com/indygreg/PyOxidizer/releases/download/pyoxidizer%2F${PYOXIDIZER_VERSION}/pyoxidizer-${PYOXIDIZER_VERSION}-linux_x86_64.zip | |
|
84 | ||
|
85 | wget -O pyoxidizer.zip --progress dot:mega ${PYOXIDIZER_URL} | |
|
86 | echo "${PYOXIDIZER_SHA256} pyoxidizer.zip" | sha256sum --check - | |
|
87 | ||
|
88 | unzip pyoxidizer.zip | |
|
89 | chmod +x pyoxidizer | |
|
90 | sudo mv pyoxidizer /usr/local/bin/pyoxidizer | |
|
91 | ''' | |
|
67 | 92 | |
|
68 | 93 | INSTALL_RUST = r''' |
|
69 | 94 | RUSTUP_INIT_SHA256=a46fe67199b7bcbbde2dcbc23ae08db6f29883e260e23899a88b9073effc9076 |
|
70 | 95 | wget -O rustup-init --progress dot:mega https://static.rust-lang.org/rustup/archive/1.18.3/x86_64-unknown-linux-gnu/rustup-init |
|
71 | 96 | echo "${RUSTUP_INIT_SHA256} rustup-init" | sha256sum --check - |
|
72 | 97 | |
|
73 | 98 | chmod +x rustup-init |
|
74 | 99 | sudo -H -u hg -g hg ./rustup-init -y |
|
75 |
sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1. |
|
|
100 | sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup install 1.41.1 1.52.0 | |
|
76 | 101 | sudo -H -u hg -g hg /home/hg/.cargo/bin/rustup component add clippy |
|
77 | ||
|
78 | sudo -H -u hg -g hg /home/hg/.cargo/bin/cargo install --version 0.10.3 pyoxidizer | |
|
79 | 102 | ''' |
|
80 | 103 | |
|
81 | 104 | |
|
82 | 105 | BOOTSTRAP_VIRTUALENV = r''' |
|
83 | 106 | /usr/bin/virtualenv /hgdev/venv-bootstrap |
|
84 | 107 | |
|
85 | 108 | HG_SHA256=35fc8ba5e0379c1b3affa2757e83fb0509e8ac314cbd9f1fd133cf265d16e49f |
|
86 | 109 | HG_TARBALL=mercurial-5.1.1.tar.gz |
|
87 | 110 | |
|
88 | 111 | wget -O ${HG_TARBALL} --progress dot:mega https://www.mercurial-scm.org/release/${HG_TARBALL} |
|
89 | 112 | echo "${HG_SHA256} ${HG_TARBALL}" | sha256sum --check - |
|
90 | 113 | |
|
91 | 114 | /hgdev/venv-bootstrap/bin/pip install ${HG_TARBALL} |
|
92 | 115 | '''.lstrip().replace( |
|
93 | 116 | '\r\n', '\n' |
|
94 | 117 | ) |
|
95 | 118 | |
|
96 | 119 | |
|
97 | 120 | BOOTSTRAP_DEBIAN = ( |
|
98 | 121 | r''' |
|
99 | 122 | #!/bin/bash |
|
100 | 123 | |
|
101 | 124 | set -ex |
|
102 | 125 | |
|
103 | 126 | DISTRO=`grep DISTRIB_ID /etc/lsb-release | awk -F= '{{print $2}}'` |
|
104 | 127 | DEBIAN_VERSION=`cat /etc/debian_version` |
|
105 | 128 | LSB_RELEASE=`lsb_release -cs` |
|
106 | 129 | |
|
107 | 130 | sudo /usr/sbin/groupadd hg |
|
108 | 131 | sudo /usr/sbin/groupadd docker |
|
109 | 132 | sudo /usr/sbin/useradd -g hg -G sudo,docker -d /home/hg -m -s /bin/bash hg |
|
110 | 133 | sudo mkdir /home/hg/.ssh |
|
111 | 134 | sudo cp ~/.ssh/authorized_keys /home/hg/.ssh/authorized_keys |
|
112 | 135 | sudo chown -R hg:hg /home/hg/.ssh |
|
113 | 136 | sudo chmod 700 /home/hg/.ssh |
|
114 | 137 | sudo chmod 600 /home/hg/.ssh/authorized_keys |
|
115 | 138 | |
|
116 | 139 | cat << EOF | sudo tee /etc/sudoers.d/90-hg |
|
117 | 140 | hg ALL=(ALL) NOPASSWD:ALL |
|
118 | 141 | EOF |
|
119 | 142 | |
|
120 | 143 | sudo apt-get update |
|
121 | 144 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq dist-upgrade |
|
122 | 145 | |
|
123 | 146 | # Install packages necessary to set up Docker Apt repo. |
|
124 | 147 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends \ |
|
125 | 148 | apt-transport-https \ |
|
126 | 149 | gnupg |
|
127 | 150 | |
|
128 | 151 | cat > docker-apt-key << EOF |
|
129 | 152 | -----BEGIN PGP PUBLIC KEY BLOCK----- |
|
130 | 153 | |
|
131 | 154 | mQINBFit2ioBEADhWpZ8/wvZ6hUTiXOwQHXMAlaFHcPH9hAtr4F1y2+OYdbtMuth |
|
132 | 155 | lqqwp028AqyY+PRfVMtSYMbjuQuu5byyKR01BbqYhuS3jtqQmljZ/bJvXqnmiVXh |
|
133 | 156 | 38UuLa+z077PxyxQhu5BbqntTPQMfiyqEiU+BKbq2WmANUKQf+1AmZY/IruOXbnq |
|
134 | 157 | L4C1+gJ8vfmXQt99npCaxEjaNRVYfOS8QcixNzHUYnb6emjlANyEVlZzeqo7XKl7 |
|
135 | 158 | UrwV5inawTSzWNvtjEjj4nJL8NsLwscpLPQUhTQ+7BbQXAwAmeHCUTQIvvWXqw0N |
|
136 | 159 | cmhh4HgeQscQHYgOJjjDVfoY5MucvglbIgCqfzAHW9jxmRL4qbMZj+b1XoePEtht |
|
137 | 160 | ku4bIQN1X5P07fNWzlgaRL5Z4POXDDZTlIQ/El58j9kp4bnWRCJW0lya+f8ocodo |
|
138 | 161 | vZZ+Doi+fy4D5ZGrL4XEcIQP/Lv5uFyf+kQtl/94VFYVJOleAv8W92KdgDkhTcTD |
|
139 | 162 | G7c0tIkVEKNUq48b3aQ64NOZQW7fVjfoKwEZdOqPE72Pa45jrZzvUFxSpdiNk2tZ |
|
140 | 163 | XYukHjlxxEgBdC/J3cMMNRE1F4NCA3ApfV1Y7/hTeOnmDuDYwr9/obA8t016Yljj |
|
141 | 164 | q5rdkywPf4JF8mXUW5eCN1vAFHxeg9ZWemhBtQmGxXnw9M+z6hWwc6ahmwARAQAB |
|
142 | 165 | tCtEb2NrZXIgUmVsZWFzZSAoQ0UgZGViKSA8ZG9ja2VyQGRvY2tlci5jb20+iQI3 |
|
143 | 166 | BBMBCgAhBQJYrefAAhsvBQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEI2BgDwO |
|
144 | 167 | v82IsskP/iQZo68flDQmNvn8X5XTd6RRaUH33kXYXquT6NkHJciS7E2gTJmqvMqd |
|
145 | 168 | tI4mNYHCSEYxI5qrcYV5YqX9P6+Ko+vozo4nseUQLPH/ATQ4qL0Zok+1jkag3Lgk |
|
146 | 169 | jonyUf9bwtWxFp05HC3GMHPhhcUSexCxQLQvnFWXD2sWLKivHp2fT8QbRGeZ+d3m |
|
147 | 170 | 6fqcd5Fu7pxsqm0EUDK5NL+nPIgYhN+auTrhgzhK1CShfGccM/wfRlei9Utz6p9P |
|
148 | 171 | XRKIlWnXtT4qNGZNTN0tR+NLG/6Bqd8OYBaFAUcue/w1VW6JQ2VGYZHnZu9S8LMc |
|
149 | 172 | FYBa5Ig9PxwGQOgq6RDKDbV+PqTQT5EFMeR1mrjckk4DQJjbxeMZbiNMG5kGECA8 |
|
150 | 173 | g383P3elhn03WGbEEa4MNc3Z4+7c236QI3xWJfNPdUbXRaAwhy/6rTSFbzwKB0Jm |
|
151 | 174 | ebwzQfwjQY6f55MiI/RqDCyuPj3r3jyVRkK86pQKBAJwFHyqj9KaKXMZjfVnowLh |
|
152 | 175 | 9svIGfNbGHpucATqREvUHuQbNnqkCx8VVhtYkhDb9fEP2xBu5VvHbR+3nfVhMut5 |
|
153 | 176 | G34Ct5RS7Jt6LIfFdtcn8CaSas/l1HbiGeRgc70X/9aYx/V/CEJv0lIe8gP6uDoW |
|
154 | 177 | FPIZ7d6vH+Vro6xuWEGiuMaiznap2KhZmpkgfupyFmplh0s6knymuQINBFit2ioB |
|
155 | 178 | EADneL9S9m4vhU3blaRjVUUyJ7b/qTjcSylvCH5XUE6R2k+ckEZjfAMZPLpO+/tF |
|
156 | 179 | M2JIJMD4SifKuS3xck9KtZGCufGmcwiLQRzeHF7vJUKrLD5RTkNi23ydvWZgPjtx |
|
157 | 180 | Q+DTT1Zcn7BrQFY6FgnRoUVIxwtdw1bMY/89rsFgS5wwuMESd3Q2RYgb7EOFOpnu |
|
158 | 181 | w6da7WakWf4IhnF5nsNYGDVaIHzpiqCl+uTbf1epCjrOlIzkZ3Z3Yk5CM/TiFzPk |
|
159 | 182 | z2lLz89cpD8U+NtCsfagWWfjd2U3jDapgH+7nQnCEWpROtzaKHG6lA3pXdix5zG8 |
|
160 | 183 | eRc6/0IbUSWvfjKxLLPfNeCS2pCL3IeEI5nothEEYdQH6szpLog79xB9dVnJyKJb |
|
161 | 184 | VfxXnseoYqVrRz2VVbUI5Blwm6B40E3eGVfUQWiux54DspyVMMk41Mx7QJ3iynIa |
|
162 | 185 | 1N4ZAqVMAEruyXTRTxc9XW0tYhDMA/1GYvz0EmFpm8LzTHA6sFVtPm/ZlNCX6P1X |
|
163 | 186 | zJwrv7DSQKD6GGlBQUX+OeEJ8tTkkf8QTJSPUdh8P8YxDFS5EOGAvhhpMBYD42kQ |
|
164 | 187 | pqXjEC+XcycTvGI7impgv9PDY1RCC1zkBjKPa120rNhv/hkVk/YhuGoajoHyy4h7 |
|
165 | 188 | ZQopdcMtpN2dgmhEegny9JCSwxfQmQ0zK0g7m6SHiKMwjwARAQABiQQ+BBgBCAAJ |
|
166 | 189 | BQJYrdoqAhsCAikJEI2BgDwOv82IwV0gBBkBCAAGBQJYrdoqAAoJEH6gqcPyc/zY |
|
167 | 190 | 1WAP/2wJ+R0gE6qsce3rjaIz58PJmc8goKrir5hnElWhPgbq7cYIsW5qiFyLhkdp |
|
168 | 191 | YcMmhD9mRiPpQn6Ya2w3e3B8zfIVKipbMBnke/ytZ9M7qHmDCcjoiSmwEXN3wKYI |
|
169 | 192 | mD9VHONsl/CG1rU9Isw1jtB5g1YxuBA7M/m36XN6x2u+NtNMDB9P56yc4gfsZVES |
|
170 | 193 | KA9v+yY2/l45L8d/WUkUi0YXomn6hyBGI7JrBLq0CX37GEYP6O9rrKipfz73XfO7 |
|
171 | 194 | JIGzOKZlljb/D9RX/g7nRbCn+3EtH7xnk+TK/50euEKw8SMUg147sJTcpQmv6UzZ |
|
172 | 195 | cM4JgL0HbHVCojV4C/plELwMddALOFeYQzTif6sMRPf+3DSj8frbInjChC3yOLy0 |
|
173 | 196 | 6br92KFom17EIj2CAcoeq7UPhi2oouYBwPxh5ytdehJkoo+sN7RIWua6P2WSmon5 |
|
174 | 197 | U888cSylXC0+ADFdgLX9K2zrDVYUG1vo8CX0vzxFBaHwN6Px26fhIT1/hYUHQR1z |
|
175 | 198 | VfNDcyQmXqkOnZvvoMfz/Q0s9BhFJ/zU6AgQbIZE/hm1spsfgvtsD1frZfygXJ9f |
|
176 | 199 | irP+MSAI80xHSf91qSRZOj4Pl3ZJNbq4yYxv0b1pkMqeGdjdCYhLU+LZ4wbQmpCk |
|
177 | 200 | SVe2prlLureigXtmZfkqevRz7FrIZiu9ky8wnCAPwC7/zmS18rgP/17bOtL4/iIz |
|
178 | 201 | QhxAAoAMWVrGyJivSkjhSGx1uCojsWfsTAm11P7jsruIL61ZzMUVE2aM3Pmj5G+W |
|
179 | 202 | 9AcZ58Em+1WsVnAXdUR//bMmhyr8wL/G1YO1V3JEJTRdxsSxdYa4deGBBY/Adpsw |
|
180 | 203 | 24jxhOJR+lsJpqIUeb999+R8euDhRHG9eFO7DRu6weatUJ6suupoDTRWtr/4yGqe |
|
181 | 204 | dKxV3qQhNLSnaAzqW/1nA3iUB4k7kCaKZxhdhDbClf9P37qaRW467BLCVO/coL3y |
|
182 | 205 | Vm50dwdrNtKpMBh3ZpbB1uJvgi9mXtyBOMJ3v8RZeDzFiG8HdCtg9RvIt/AIFoHR |
|
183 | 206 | H3S+U79NT6i0KPzLImDfs8T7RlpyuMc4Ufs8ggyg9v3Ae6cN3eQyxcK3w0cbBwsh |
|
184 | 207 | /nQNfsA6uu+9H7NhbehBMhYnpNZyrHzCmzyXkauwRAqoCbGCNykTRwsur9gS41TQ |
|
185 | 208 | M8ssD1jFheOJf3hODnkKU+HKjvMROl1DK7zdmLdNzA1cvtZH/nCC9KPj1z8QC47S |
|
186 | 209 | xx+dTZSx4ONAhwbS/LN3PoKtn8LPjY9NP9uDWI+TWYquS2U+KHDrBDlsgozDbs/O |
|
187 | 210 | jCxcpDzNmXpWQHEtHU7649OXHP7UeNST1mCUCH5qdank0V1iejF6/CfTFU4MfcrG |
|
188 | 211 | YT90qFF93M3v01BbxP+EIY2/9tiIPbrd |
|
189 | 212 | =0YYh |
|
190 | 213 | -----END PGP PUBLIC KEY BLOCK----- |
|
191 | 214 | EOF |
|
192 | 215 | |
|
193 | 216 | sudo apt-key add docker-apt-key |
|
194 | 217 | |
|
195 | 218 | if [ "$LSB_RELEASE" = "stretch" ]; then |
|
196 | 219 | cat << EOF | sudo tee -a /etc/apt/sources.list |
|
197 | 220 | # Need backports for clang-format-6.0 |
|
198 | 221 | deb http://deb.debian.org/debian stretch-backports main |
|
199 | 222 | EOF |
|
200 | 223 | fi |
|
201 | 224 | |
|
202 | 225 | if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "buster" ]; then |
|
203 | 226 | cat << EOF | sudo tee -a /etc/apt/sources.list |
|
204 | 227 | # Sources are useful if we want to compile things locally. |
|
205 | 228 | deb-src http://deb.debian.org/debian $LSB_RELEASE main |
|
206 | 229 | deb-src http://security.debian.org/debian-security $LSB_RELEASE/updates main |
|
207 | 230 | deb-src http://deb.debian.org/debian $LSB_RELEASE-updates main |
|
208 | 231 | deb-src http://deb.debian.org/debian $LSB_RELEASE-backports main |
|
209 | 232 | |
|
210 | 233 | deb [arch=amd64] https://download.docker.com/linux/debian $LSB_RELEASE stable |
|
211 | 234 | EOF |
|
212 | 235 | |
|
213 | 236 | elif [ "$DISTRO" = "Ubuntu" ]; then |
|
214 | 237 | cat << EOF | sudo tee -a /etc/apt/sources.list |
|
215 | 238 | deb [arch=amd64] https://download.docker.com/linux/ubuntu $LSB_RELEASE stable |
|
216 | 239 | EOF |
|
217 | 240 | |
|
218 | 241 | fi |
|
219 | 242 | |
|
220 | 243 | sudo apt-get update |
|
221 | 244 | |
|
222 | 245 | PACKAGES="\ |
|
223 | 246 | awscli \ |
|
224 | 247 | btrfs-progs \ |
|
225 | 248 | build-essential \ |
|
226 | 249 | bzr \ |
|
227 | 250 | clang-format-6.0 \ |
|
228 | 251 | cvs \ |
|
229 | 252 | darcs \ |
|
230 | 253 | debhelper \ |
|
231 | 254 | devscripts \ |
|
232 | 255 | docker-ce \ |
|
233 | 256 | dpkg-dev \ |
|
234 | 257 | dstat \ |
|
235 | 258 | emacs \ |
|
236 | 259 | gettext \ |
|
237 | 260 | git \ |
|
238 | 261 | htop \ |
|
239 | 262 | iotop \ |
|
240 | 263 | jfsutils \ |
|
241 | 264 | libbz2-dev \ |
|
242 | 265 | libexpat1-dev \ |
|
243 | 266 | libffi-dev \ |
|
244 | 267 | libgdbm-dev \ |
|
245 | 268 | liblzma-dev \ |
|
246 | 269 | libncurses5-dev \ |
|
247 | 270 | libnss3-dev \ |
|
248 | 271 | libreadline-dev \ |
|
249 | 272 | libsqlite3-dev \ |
|
250 | 273 | libssl-dev \ |
|
251 | 274 | netbase \ |
|
252 | 275 | ntfs-3g \ |
|
253 | 276 | nvme-cli \ |
|
254 | 277 | pyflakes \ |
|
255 | 278 | pyflakes3 \ |
|
256 | 279 | pylint \ |
|
257 | 280 | pylint3 \ |
|
258 | 281 | python-all-dev \ |
|
259 | 282 | python-dev \ |
|
260 | 283 | python-docutils \ |
|
261 | 284 | python-fuzzywuzzy \ |
|
262 | 285 | python-pygments \ |
|
263 | 286 | python-subversion \ |
|
264 | 287 | python-vcr \ |
|
265 | 288 | python3-boto3 \ |
|
266 | 289 | python3-dev \ |
|
267 | 290 | python3-docutils \ |
|
268 | 291 | python3-fuzzywuzzy \ |
|
269 | 292 | python3-pygments \ |
|
270 | 293 | python3-vcr \ |
|
271 | 294 | python3-venv \ |
|
272 | 295 | rsync \ |
|
273 | 296 | sqlite3 \ |
|
274 | 297 | subversion \ |
|
275 | 298 | tcl-dev \ |
|
276 | 299 | tk-dev \ |
|
277 | 300 | tla \ |
|
278 | 301 | unzip \ |
|
279 | 302 | uuid-dev \ |
|
280 | 303 | vim \ |
|
281 | 304 | virtualenv \ |
|
282 | 305 | wget \ |
|
283 | 306 | xfsprogs \ |
|
284 | 307 | zip \ |
|
285 | 308 | zlib1g-dev" |
|
286 | 309 | |
|
287 | 310 | if [ "LSB_RELEASE" = "stretch" ]; then |
|
288 | 311 | PACKAGES="$PACKAGES linux-perf" |
|
289 | 312 | elif [ "$DISTRO" = "Ubuntu" ]; then |
|
290 | 313 | PACKAGES="$PACKAGES linux-tools-common" |
|
291 | 314 | fi |
|
292 | 315 | |
|
293 | 316 | # Monotone only available in older releases. |
|
294 | 317 | if [ "$LSB_RELEASE" = "stretch" -o "$LSB_RELEASE" = "xenial" ]; then |
|
295 | 318 | PACKAGES="$PACKAGES monotone" |
|
296 | 319 | fi |
|
297 | 320 | |
|
298 | 321 | sudo DEBIAN_FRONTEND=noninteractive apt-get -yq install --no-install-recommends $PACKAGES |
|
299 | 322 | |
|
300 | 323 | # Create clang-format symlink so test harness finds it. |
|
301 | 324 | sudo update-alternatives --install /usr/bin/clang-format clang-format \ |
|
302 | 325 | /usr/bin/clang-format-6.0 1000 |
|
303 | 326 | |
|
304 | 327 | sudo mkdir /hgdev |
|
305 | 328 | # Will be normalized to hg:hg later. |
|
306 | 329 | sudo chown `whoami` /hgdev |
|
307 | 330 | |
|
308 | 331 | {install_rust} |
|
332 | {install_pyoxidizer} | |
|
309 | 333 | |
|
310 |
cp requirements- |
|
|
311 | cp requirements-py3.txt /hgdev/requirements-py3.txt | |
|
334 | cp requirements-*.txt /hgdev/ | |
|
312 | 335 | |
|
313 | 336 | # Disable the pip version check because it uses the network and can |
|
314 | 337 | # be annoying. |
|
315 | 338 | cat << EOF | sudo tee -a /etc/pip.conf |
|
316 | 339 | [global] |
|
317 | 340 | disable-pip-version-check = True |
|
318 | 341 | EOF |
|
319 | 342 | |
|
320 | 343 | {install_pythons} |
|
321 | 344 | {bootstrap_virtualenv} |
|
322 | 345 | |
|
323 | 346 | /hgdev/venv-bootstrap/bin/hg clone https://www.mercurial-scm.org/repo/hg /hgdev/src |
|
324 | 347 | |
|
325 | 348 | # Mark the repo as non-publishing. |
|
326 | 349 | cat >> /hgdev/src/.hg/hgrc << EOF |
|
327 | 350 | [phases] |
|
328 | 351 | publish = false |
|
329 | 352 | EOF |
|
330 | 353 | |
|
331 | 354 | sudo chown -R hg:hg /hgdev |
|
332 | 355 | '''.lstrip() |
|
333 | 356 | .format( |
|
334 | 357 | install_rust=INSTALL_RUST, |
|
358 | install_pyoxidizer=INSTALL_PYOXIDIZER, | |
|
335 | 359 | install_pythons=INSTALL_PYTHONS, |
|
336 | 360 | bootstrap_virtualenv=BOOTSTRAP_VIRTUALENV, |
|
337 | 361 | ) |
|
338 | 362 | .replace('\r\n', '\n') |
|
339 | 363 | ) |
|
340 | 364 | |
|
341 | 365 | |
|
342 | 366 | # Prepares /hgdev for operations. |
|
343 | 367 | PREPARE_HGDEV = ''' |
|
344 | 368 | #!/bin/bash |
|
345 | 369 | |
|
346 | 370 | set -e |
|
347 | 371 | |
|
348 | 372 | FS=$1 |
|
349 | 373 | |
|
350 | 374 | ensure_device() { |
|
351 | 375 | if [ -z "${DEVICE}" ]; then |
|
352 | 376 | echo "could not find block device to format" |
|
353 | 377 | exit 1 |
|
354 | 378 | fi |
|
355 | 379 | } |
|
356 | 380 | |
|
357 | 381 | # Determine device to partition for extra filesystem. |
|
358 | 382 | # If only 1 volume is present, it will be the root volume and |
|
359 | 383 | # should be /dev/nvme0. If multiple volumes are present, the |
|
360 | 384 | # root volume could be nvme0 or nvme1. Use whichever one doesn't have |
|
361 | 385 | # a partition. |
|
362 | 386 | if [ -e /dev/nvme1n1 ]; then |
|
363 | 387 | if [ -e /dev/nvme0n1p1 ]; then |
|
364 | 388 | DEVICE=/dev/nvme1n1 |
|
365 | 389 | else |
|
366 | 390 | DEVICE=/dev/nvme0n1 |
|
367 | 391 | fi |
|
368 | 392 | else |
|
369 | 393 | DEVICE= |
|
370 | 394 | fi |
|
371 | 395 | |
|
372 | 396 | sudo mkdir /hgwork |
|
373 | 397 | |
|
374 | 398 | if [ "${FS}" != "default" -a "${FS}" != "tmpfs" ]; then |
|
375 | 399 | ensure_device |
|
376 | 400 | echo "creating ${FS} filesystem on ${DEVICE}" |
|
377 | 401 | fi |
|
378 | 402 | |
|
379 | 403 | if [ "${FS}" = "default" ]; then |
|
380 | 404 | : |
|
381 | 405 | |
|
382 | 406 | elif [ "${FS}" = "btrfs" ]; then |
|
383 | 407 | sudo mkfs.btrfs ${DEVICE} |
|
384 | 408 | sudo mount ${DEVICE} /hgwork |
|
385 | 409 | |
|
386 | 410 | elif [ "${FS}" = "ext3" ]; then |
|
387 | 411 | # lazy_journal_init speeds up filesystem creation at the expense of |
|
388 | 412 | # integrity if things crash. We are an ephemeral instance, so we don't |
|
389 | 413 | # care about integrity. |
|
390 | 414 | sudo mkfs.ext3 -E lazy_journal_init=1 ${DEVICE} |
|
391 | 415 | sudo mount ${DEVICE} /hgwork |
|
392 | 416 | |
|
393 | 417 | elif [ "${FS}" = "ext4" ]; then |
|
394 | 418 | sudo mkfs.ext4 -E lazy_journal_init=1 ${DEVICE} |
|
395 | 419 | sudo mount ${DEVICE} /hgwork |
|
396 | 420 | |
|
397 | 421 | elif [ "${FS}" = "jfs" ]; then |
|
398 | 422 | sudo mkfs.jfs ${DEVICE} |
|
399 | 423 | sudo mount ${DEVICE} /hgwork |
|
400 | 424 | |
|
401 | 425 | elif [ "${FS}" = "tmpfs" ]; then |
|
402 | 426 | echo "creating tmpfs volume in /hgwork" |
|
403 | 427 | sudo mount -t tmpfs -o size=1024M tmpfs /hgwork |
|
404 | 428 | |
|
405 | 429 | elif [ "${FS}" = "xfs" ]; then |
|
406 | 430 | sudo mkfs.xfs ${DEVICE} |
|
407 | 431 | sudo mount ${DEVICE} /hgwork |
|
408 | 432 | |
|
409 | 433 | else |
|
410 | 434 | echo "unsupported filesystem: ${FS}" |
|
411 | 435 | exit 1 |
|
412 | 436 | fi |
|
413 | 437 | |
|
414 | 438 | echo "/hgwork ready" |
|
415 | 439 | |
|
416 | 440 | sudo chown hg:hg /hgwork |
|
417 | 441 | mkdir /hgwork/tmp |
|
418 | 442 | chown hg:hg /hgwork/tmp |
|
419 | 443 | |
|
420 | 444 | rsync -a /hgdev/src /hgwork/ |
|
421 | 445 | '''.lstrip().replace( |
|
422 | 446 | '\r\n', '\n' |
|
423 | 447 | ) |
|
424 | 448 | |
|
425 | 449 | |
|
426 | 450 | HG_UPDATE_CLEAN = ''' |
|
427 | 451 | set -ex |
|
428 | 452 | |
|
429 | 453 | HG=/hgdev/venv-bootstrap/bin/hg |
|
430 | 454 | |
|
431 | 455 | cd /hgwork/src |
|
432 | 456 | ${HG} --config extensions.purge= purge --all |
|
433 | 457 | ${HG} update -C $1 |
|
434 | 458 | ${HG} log -r . |
|
435 | 459 | '''.lstrip().replace( |
|
436 | 460 | '\r\n', '\n' |
|
437 | 461 | ) |
|
438 | 462 | |
|
439 | 463 | |
|
440 | 464 | def prepare_exec_environment(ssh_client, filesystem='default'): |
|
441 | 465 | """Prepare an EC2 instance to execute things. |
|
442 | 466 | |
|
443 | 467 | The AMI has an ``/hgdev`` bootstrapped with various Python installs |
|
444 | 468 | and a clone of the Mercurial repo. |
|
445 | 469 | |
|
446 | 470 | In EC2, EBS volumes launched from snapshots have wonky performance behavior. |
|
447 | 471 | Notably, blocks have to be copied on first access, which makes volume |
|
448 | 472 | I/O extremely slow on fresh volumes. |
|
449 | 473 | |
|
450 | 474 | Furthermore, we may want to run operations, tests, etc on alternative |
|
451 | 475 | filesystems so we examine behavior on different filesystems. |
|
452 | 476 | |
|
453 | 477 | This function is used to facilitate executing operations on alternate |
|
454 | 478 | volumes. |
|
455 | 479 | """ |
|
456 | 480 | sftp = ssh_client.open_sftp() |
|
457 | 481 | |
|
458 | 482 | with sftp.open('/hgdev/prepare-hgdev', 'wb') as fh: |
|
459 | 483 | fh.write(PREPARE_HGDEV) |
|
460 | 484 | fh.chmod(0o0777) |
|
461 | 485 | |
|
462 | 486 | command = 'sudo /hgdev/prepare-hgdev %s' % filesystem |
|
463 | 487 | chan, stdin, stdout = exec_command(ssh_client, command) |
|
464 | 488 | stdin.close() |
|
465 | 489 | |
|
466 | 490 | for line in stdout: |
|
467 | 491 | print(line, end='') |
|
468 | 492 | |
|
469 | 493 | res = chan.recv_exit_status() |
|
470 | 494 | |
|
471 | 495 | if res: |
|
472 | 496 | raise Exception('non-0 exit code updating working directory; %d' % res) |
|
473 | 497 | |
|
474 | 498 | |
|
475 | 499 | def synchronize_hg( |
|
476 | 500 | source_path: pathlib.Path, ec2_instance, revision: str = None |
|
477 | 501 | ): |
|
478 | 502 | """Synchronize a local Mercurial source path to remote EC2 instance.""" |
|
479 | 503 | |
|
480 | 504 | with tempfile.TemporaryDirectory() as temp_dir: |
|
481 | 505 | temp_dir = pathlib.Path(temp_dir) |
|
482 | 506 | |
|
483 | 507 | ssh_dir = temp_dir / '.ssh' |
|
484 | 508 | ssh_dir.mkdir() |
|
485 | 509 | ssh_dir.chmod(0o0700) |
|
486 | 510 | |
|
487 | 511 | public_ip = ec2_instance.public_ip_address |
|
488 | 512 | |
|
489 | 513 | ssh_config = ssh_dir / 'config' |
|
490 | 514 | |
|
491 | 515 | with ssh_config.open('w', encoding='utf-8') as fh: |
|
492 | 516 | fh.write('Host %s\n' % public_ip) |
|
493 | 517 | fh.write(' User hg\n') |
|
494 | 518 | fh.write(' StrictHostKeyChecking no\n') |
|
495 | 519 | fh.write(' UserKnownHostsFile %s\n' % (ssh_dir / 'known_hosts')) |
|
496 | 520 | fh.write(' IdentityFile %s\n' % ec2_instance.ssh_private_key_path) |
|
497 | 521 | |
|
498 | 522 | if not (source_path / '.hg').is_dir(): |
|
499 | 523 | raise Exception( |
|
500 | 524 | '%s is not a Mercurial repository; synchronization ' |
|
501 | 525 | 'not yet supported' % source_path |
|
502 | 526 | ) |
|
503 | 527 | |
|
504 | 528 | env = dict(os.environ) |
|
505 | 529 | env['HGPLAIN'] = '1' |
|
506 | 530 | env['HGENCODING'] = 'utf-8' |
|
507 | 531 | |
|
508 | 532 | hg_bin = source_path / 'hg' |
|
509 | 533 | |
|
510 | 534 | res = subprocess.run( |
|
511 | 535 | ['python2.7', str(hg_bin), 'log', '-r', revision, '-T', '{node}'], |
|
512 | 536 | cwd=str(source_path), |
|
513 | 537 | env=env, |
|
514 | 538 | check=True, |
|
515 | 539 | capture_output=True, |
|
516 | 540 | ) |
|
517 | 541 | |
|
518 | 542 | full_revision = res.stdout.decode('ascii') |
|
519 | 543 | |
|
520 | 544 | args = [ |
|
521 | 545 | 'python2.7', |
|
522 | 546 | str(hg_bin), |
|
523 | 547 | '--config', |
|
524 | 548 | 'ui.ssh=ssh -F %s' % ssh_config, |
|
525 | 549 | '--config', |
|
526 | 550 | 'ui.remotecmd=/hgdev/venv-bootstrap/bin/hg', |
|
527 | 551 | # Also ensure .hgtags changes are present so auto version |
|
528 | 552 | # calculation works. |
|
529 | 553 | 'push', |
|
530 | 554 | '-f', |
|
531 | 555 | '-r', |
|
532 | 556 | full_revision, |
|
533 | 557 | '-r', |
|
534 | 558 | 'file(.hgtags)', |
|
535 | 559 | 'ssh://%s//hgwork/src' % public_ip, |
|
536 | 560 | ] |
|
537 | 561 | |
|
538 | 562 | res = subprocess.run(args, cwd=str(source_path), env=env) |
|
539 | 563 | |
|
540 | 564 | # Allow 1 (no-op) to not trigger error. |
|
541 | 565 | if res.returncode not in (0, 1): |
|
542 | 566 | res.check_returncode() |
|
543 | 567 | |
|
544 | 568 | # TODO support synchronizing dirty working directory. |
|
545 | 569 | |
|
546 | 570 | sftp = ec2_instance.ssh_client.open_sftp() |
|
547 | 571 | |
|
548 | 572 | with sftp.open('/hgdev/hgup', 'wb') as fh: |
|
549 | 573 | fh.write(HG_UPDATE_CLEAN) |
|
550 | 574 | fh.chmod(0o0700) |
|
551 | 575 | |
|
552 | 576 | chan, stdin, stdout = exec_command( |
|
553 | 577 | ec2_instance.ssh_client, '/hgdev/hgup %s' % full_revision |
|
554 | 578 | ) |
|
555 | 579 | stdin.close() |
|
556 | 580 | |
|
557 | 581 | for line in stdout: |
|
558 | 582 | print(line, end='') |
|
559 | 583 | |
|
560 | 584 | res = chan.recv_exit_status() |
|
561 | 585 | |
|
562 | 586 | if res: |
|
563 | 587 | raise Exception( |
|
564 | 588 | 'non-0 exit code updating working directory; %d' % res |
|
565 | 589 | ) |
|
566 | 590 | |
|
567 | 591 | |
|
568 | 592 | def run_tests(ssh_client, python_version, test_flags=None): |
|
569 | 593 | """Run tests on a remote Linux machine via an SSH client.""" |
|
570 | 594 | test_flags = test_flags or [] |
|
571 | 595 | |
|
572 | 596 | print('running tests') |
|
573 | 597 | |
|
574 | 598 | if python_version == 'system2': |
|
575 | 599 | python = '/usr/bin/python2' |
|
576 | 600 | elif python_version == 'system3': |
|
577 | 601 | python = '/usr/bin/python3' |
|
578 | 602 | elif python_version.startswith('pypy'): |
|
579 | 603 | python = '/hgdev/pyenv/shims/%s' % python_version |
|
580 | 604 | else: |
|
581 | 605 | python = '/hgdev/pyenv/shims/python%s' % python_version |
|
582 | 606 | |
|
583 | 607 | test_flags = ' '.join(shlex.quote(a) for a in test_flags) |
|
584 | 608 | |
|
585 | 609 | command = ( |
|
586 | 610 | '/bin/sh -c "export TMPDIR=/hgwork/tmp; ' |
|
587 | 611 | 'cd /hgwork/src/tests && %s run-tests.py %s"' % (python, test_flags) |
|
588 | 612 | ) |
|
589 | 613 | |
|
590 | 614 | chan, stdin, stdout = exec_command(ssh_client, command) |
|
591 | 615 | |
|
592 | 616 | stdin.close() |
|
593 | 617 | |
|
594 | 618 | for line in stdout: |
|
595 | 619 | print(line, end='') |
|
596 | 620 | |
|
597 | 621 | return chan.recv_exit_status() |
@@ -1,215 +1,306 b'' | |||
|
1 | 1 | # |
|
2 | 2 | # This file is autogenerated by pip-compile |
|
3 | 3 | # To update, run: |
|
4 | 4 | # |
|
5 | 5 | # pip-compile --generate-hashes --output-file=contrib/automation/linux-requirements-py3.txt contrib/automation/linux-requirements.txt.in |
|
6 | 6 | # |
|
7 | 7 | appdirs==1.4.4 \ |
|
8 | 8 | --hash=sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41 \ |
|
9 |
--hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 |
|
|
9 | --hash=sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128 | |
|
10 | 10 | # via black |
|
11 |
astroid==2. |
|
|
12 | --hash=sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703 \ | |
|
13 | --hash=sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386 \ | |
|
11 | astroid==2.5.6 \ | |
|
12 | --hash=sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e \ | |
|
13 | --hash=sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975 | |
|
14 | 14 | # via pylint |
|
15 |
attrs==2 |
|
|
16 | --hash=sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594 \ | |
|
17 | --hash=sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc \ | |
|
15 | attrs==21.1.0 \ | |
|
16 | --hash=sha256:3901be1cb7c2a780f14668691474d9252c070a756be0a9ead98cfeabfa11aeb8 \ | |
|
17 | --hash=sha256:8ee1e5f5a1afc5b19bdfae4fdf0c35ed324074bdce3500c939842c8f818645d9 | |
|
18 | 18 | # via black |
|
19 | 19 | black==19.10b0 ; python_version >= "3.6" and platform_python_implementation != "PyPy" \ |
|
20 | 20 | --hash=sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b \ |
|
21 |
--hash=sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539 |
|
|
21 | --hash=sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539 | |
|
22 | 22 | # via -r contrib/automation/linux-requirements.txt.in |
|
23 | 23 | click==7.1.2 \ |
|
24 | 24 | --hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a \ |
|
25 |
--hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc |
|
|
25 | --hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc | |
|
26 | 26 | # via black |
|
27 |
docutils==0.1 |
|
|
28 | --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \ | |
|
29 | --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc \ | |
|
27 | docutils==0.17.1 \ | |
|
28 | --hash=sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125 \ | |
|
29 | --hash=sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61 | |
|
30 | 30 | # via -r contrib/automation/linux-requirements.txt.in |
|
31 | 31 | fuzzywuzzy==0.18.0 \ |
|
32 | 32 | --hash=sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8 \ |
|
33 |
--hash=sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993 |
|
|
33 | --hash=sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993 | |
|
34 | 34 | # via -r contrib/automation/linux-requirements.txt.in |
|
35 |
idna== |
|
|
36 | --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ | |
|
37 | --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 \ | |
|
35 | idna==3.1 \ | |
|
36 | --hash=sha256:5205d03e7bcbb919cc9c19885f9920d622ca52448306f2377daede5cf3faac16 \ | |
|
37 | --hash=sha256:c5b02147e01ea9920e6b0a3f1f7bb833612d507592c837a6c49552768f4054e1 | |
|
38 | 38 | # via yarl |
|
39 | 39 | isort==4.3.21 \ |
|
40 | 40 | --hash=sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1 \ |
|
41 |
--hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd |
|
|
42 | # via -r contrib/automation/linux-requirements.txt.in, pylint | |
|
43 | lazy-object-proxy==1.4.3 \ | |
|
44 | --hash=sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d \ | |
|
45 | --hash=sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449 \ | |
|
46 | --hash=sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08 \ | |
|
47 | --hash=sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a \ | |
|
48 | --hash=sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50 \ | |
|
49 | --hash=sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd \ | |
|
50 | --hash=sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239 \ | |
|
51 | --hash=sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb \ | |
|
52 | --hash=sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea \ | |
|
53 | --hash=sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e \ | |
|
54 | --hash=sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156 \ | |
|
55 | --hash=sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142 \ | |
|
56 | --hash=sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442 \ | |
|
57 | --hash=sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62 \ | |
|
58 | --hash=sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db \ | |
|
59 | --hash=sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531 \ | |
|
60 | --hash=sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383 \ | |
|
61 | --hash=sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a \ | |
|
62 | --hash=sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357 \ | |
|
63 | --hash=sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4 \ | |
|
64 | --hash=sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0 \ | |
|
41 | --hash=sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd | |
|
42 | # via | |
|
43 | # -r contrib/automation/linux-requirements.txt.in | |
|
44 | # pylint | |
|
45 | lazy-object-proxy==1.6.0 \ | |
|
46 | --hash=sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653 \ | |
|
47 | --hash=sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61 \ | |
|
48 | --hash=sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2 \ | |
|
49 | --hash=sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837 \ | |
|
50 | --hash=sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3 \ | |
|
51 | --hash=sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43 \ | |
|
52 | --hash=sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726 \ | |
|
53 | --hash=sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3 \ | |
|
54 | --hash=sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587 \ | |
|
55 | --hash=sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8 \ | |
|
56 | --hash=sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a \ | |
|
57 | --hash=sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd \ | |
|
58 | --hash=sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f \ | |
|
59 | --hash=sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad \ | |
|
60 | --hash=sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4 \ | |
|
61 | --hash=sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b \ | |
|
62 | --hash=sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf \ | |
|
63 | --hash=sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981 \ | |
|
64 | --hash=sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741 \ | |
|
65 | --hash=sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e \ | |
|
66 | --hash=sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93 \ | |
|
67 | --hash=sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b | |
|
65 | 68 | # via astroid |
|
66 | 69 | mccabe==0.6.1 \ |
|
67 | 70 | --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ |
|
68 |
--hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f |
|
|
71 | --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f | |
|
69 | 72 | # via pylint |
|
70 |
multidict== |
|
|
71 | --hash=sha256:1ece5a3369835c20ed57adadc663400b5525904e53bae59ec854a5d36b39b21a \ | |
|
72 | --hash=sha256:275ca32383bc5d1894b6975bb4ca6a7ff16ab76fa622967625baeebcf8079000 \ | |
|
73 | --hash=sha256:3750f2205b800aac4bb03b5ae48025a64e474d2c6cc79547988ba1d4122a09e2 \ | |
|
74 | --hash=sha256:4538273208e7294b2659b1602490f4ed3ab1c8cf9dbdd817e0e9db8e64be2507 \ | |
|
75 | --hash=sha256:5141c13374e6b25fe6bf092052ab55c0c03d21bd66c94a0e3ae371d3e4d865a5 \ | |
|
76 | --hash=sha256:51a4d210404ac61d32dada00a50ea7ba412e6ea945bbe992e4d7a595276d2ec7 \ | |
|
77 | --hash=sha256:5cf311a0f5ef80fe73e4f4c0f0998ec08f954a6ec72b746f3c179e37de1d210d \ | |
|
78 | --hash=sha256:6513728873f4326999429a8b00fc7ceddb2509b01d5fd3f3be7881a257b8d463 \ | |
|
79 | --hash=sha256:7388d2ef3c55a8ba80da62ecfafa06a1c097c18032a501ffd4cabbc52d7f2b19 \ | |
|
80 | --hash=sha256:9456e90649005ad40558f4cf51dbb842e32807df75146c6d940b6f5abb4a78f3 \ | |
|
81 | --hash=sha256:c026fe9a05130e44157b98fea3ab12969e5b60691a276150db9eda71710cd10b \ | |
|
82 | --hash=sha256:d14842362ed4cf63751648e7672f7174c9818459d169231d03c56e84daf90b7c \ | |
|
83 | --hash=sha256:e0d072ae0f2a179c375f67e3da300b47e1a83293c554450b29c900e50afaae87 \ | |
|
84 | --hash=sha256:f07acae137b71af3bb548bd8da720956a3bc9f9a0b87733e0899226a2317aeb7 \ | |
|
85 | --hash=sha256:fbb77a75e529021e7c4a8d4e823d88ef4d23674a202be4f5addffc72cbb91430 \ | |
|
86 | --hash=sha256:fcfbb44c59af3f8ea984de67ec7c306f618a3ec771c2843804069917a8f2e255 \ | |
|
87 | --hash=sha256:feed85993dbdb1dbc29102f50bca65bdc68f2c0c8d352468c25b54874f23c39d \ | |
|
73 | multidict==5.1.0 \ | |
|
74 | --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \ | |
|
75 | --hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \ | |
|
76 | --hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \ | |
|
77 | --hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \ | |
|
78 | --hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \ | |
|
79 | --hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \ | |
|
80 | --hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \ | |
|
81 | --hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5 \ | |
|
82 | --hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \ | |
|
83 | --hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \ | |
|
84 | --hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \ | |
|
85 | --hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \ | |
|
86 | --hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \ | |
|
87 | --hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \ | |
|
88 | --hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \ | |
|
89 | --hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \ | |
|
90 | --hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \ | |
|
91 | --hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \ | |
|
92 | --hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \ | |
|
93 | --hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \ | |
|
94 | --hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \ | |
|
95 | --hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \ | |
|
96 | --hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \ | |
|
97 | --hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \ | |
|
98 | --hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \ | |
|
99 | --hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \ | |
|
100 | --hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \ | |
|
101 | --hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \ | |
|
102 | --hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \ | |
|
103 | --hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \ | |
|
104 | --hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \ | |
|
105 | --hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \ | |
|
106 | --hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \ | |
|
107 | --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \ | |
|
108 | --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \ | |
|
109 | --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \ | |
|
110 | --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 | |
|
88 | 111 | # via yarl |
|
89 |
pathspec==0.8. |
|
|
90 | --hash=sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0 \ | |
|
91 | --hash=sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061 \ | |
|
112 | pathspec==0.8.1 \ | |
|
113 | --hash=sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd \ | |
|
114 | --hash=sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d | |
|
92 | 115 | # via black |
|
93 |
pyflakes==2. |
|
|
94 | --hash=sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92 \ | |
|
95 | --hash=sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8 \ | |
|
116 | pyflakes==2.3.1 \ | |
|
117 | --hash=sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3 \ | |
|
118 | --hash=sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db | |
|
96 | 119 | # via -r contrib/automation/linux-requirements.txt.in |
|
97 |
pygments==2. |
|
|
98 | --hash=sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998 \ | |
|
99 | --hash=sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7 \ | |
|
120 | pygments==2.9.0 \ | |
|
121 | --hash=sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f \ | |
|
122 | --hash=sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e | |
|
100 | 123 | # via -r contrib/automation/linux-requirements.txt.in |
|
101 |
pylint==2. |
|
|
102 | --hash=sha256:bb4a908c9dadbc3aac18860550e870f58e1a02c9f2c204fdf5693d73be061210 \ | |
|
103 | --hash=sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f \ | |
|
124 | pylint==2.8.2 \ | |
|
125 | --hash=sha256:586d8fa9b1891f4b725f587ef267abe2a1bad89d6b184520c7f07a253dd6e217 \ | |
|
126 | --hash=sha256:f7e2072654a6b6afdf5e2fb38147d3e2d2d43c89f648637baab63e026481279b | |
|
127 | # via -r contrib/automation/linux-requirements.txt.in | |
|
128 | python-levenshtein==0.12.2 \ | |
|
129 | --hash=sha256:dc2395fbd148a1ab31090dd113c366695934b9e85fe5a4b2a032745efd0346f6 | |
|
104 | 130 | # via -r contrib/automation/linux-requirements.txt.in |
|
105 | python-levenshtein==0.12.0 \ | |
|
106 | --hash=sha256:033a11de5e3d19ea25c9302d11224e1a1898fe5abd23c61c7c360c25195e3eb1 \ | |
|
107 | # via -r contrib/automation/linux-requirements.txt.in | |
|
108 | pyyaml==5.3.1 \ | |
|
109 | --hash=sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97 \ | |
|
110 | --hash=sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76 \ | |
|
111 | --hash=sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2 \ | |
|
112 | --hash=sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648 \ | |
|
113 | --hash=sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf \ | |
|
114 | --hash=sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f \ | |
|
115 | --hash=sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2 \ | |
|
116 | --hash=sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee \ | |
|
117 | --hash=sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d \ | |
|
118 | --hash=sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c \ | |
|
119 | --hash=sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a \ | |
|
131 | pyyaml==5.4.1 \ | |
|
132 | --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ | |
|
133 | --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ | |
|
134 | --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \ | |
|
135 | --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \ | |
|
136 | --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \ | |
|
137 | --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \ | |
|
138 | --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \ | |
|
139 | --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \ | |
|
140 | --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \ | |
|
141 | --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ | |
|
142 | --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ | |
|
143 | --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ | |
|
144 | --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ | |
|
145 | --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ | |
|
146 | --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ | |
|
147 | --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ | |
|
148 | --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ | |
|
149 | --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ | |
|
150 | --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ | |
|
151 | --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ | |
|
152 | --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ | |
|
153 | --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ | |
|
154 | --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ | |
|
155 | --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ | |
|
156 | --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ | |
|
157 | --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ | |
|
158 | --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ | |
|
159 | --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ | |
|
160 | --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 | |
|
120 | 161 | # via vcrpy |
|
121 |
regex==202 |
|
|
122 | --hash=sha256:088afc8c63e7bd187a3c70a94b9e50ab3f17e1d3f52a32750b5b77dbe99ef5ef \ | |
|
123 | --hash=sha256:1fe0a41437bbd06063aa184c34804efa886bcc128222e9916310c92cd54c3b4c \ | |
|
124 | --hash=sha256:3d20024a70b97b4f9546696cbf2fd30bae5f42229fbddf8661261b1eaff0deb7 \ | |
|
125 | --hash=sha256:41bb65f54bba392643557e617316d0d899ed5b4946dccee1cb6696152b29844b \ | |
|
126 | --hash=sha256:4318d56bccfe7d43e5addb272406ade7a2274da4b70eb15922a071c58ab0108c \ | |
|
127 | --hash=sha256:4707f3695b34335afdfb09be3802c87fa0bc27030471dbc082f815f23688bc63 \ | |
|
128 | --hash=sha256:49f23ebd5ac073765ecbcf046edc10d63dcab2f4ae2bce160982cb30df0c0302 \ | |
|
129 | --hash=sha256:5533a959a1748a5c042a6da71fe9267a908e21eded7a4f373efd23a2cbdb0ecc \ | |
|
130 | --hash=sha256:5d892a4f1c999834eaa3c32bc9e8b976c5825116cde553928c4c8e7e48ebda67 \ | |
|
131 | --hash=sha256:5f18875ac23d9aa2f060838e8b79093e8bb2313dbaaa9f54c6d8e52a5df097be \ | |
|
132 | --hash=sha256:60b0e9e6dc45683e569ec37c55ac20c582973841927a85f2d8a7d20ee80216ab \ | |
|
133 | --hash=sha256:816064fc915796ea1f26966163f6845de5af78923dfcecf6551e095f00983650 \ | |
|
134 | --hash=sha256:84cada8effefe9a9f53f9b0d2ba9b7b6f5edf8d2155f9fdbe34616e06ececf81 \ | |
|
135 | --hash=sha256:84e9407db1b2eb368b7ecc283121b5e592c9aaedbe8c78b1a2f1102eb2e21d19 \ | |
|
136 | --hash=sha256:8d69cef61fa50c8133382e61fd97439de1ae623fe943578e477e76a9d9471637 \ | |
|
137 | --hash=sha256:9a02d0ae31d35e1ec12a4ea4d4cca990800f66a917d0fb997b20fbc13f5321fc \ | |
|
138 | --hash=sha256:9bc13e0d20b97ffb07821aa3e113f9998e84994fe4d159ffa3d3a9d1b805043b \ | |
|
139 | --hash=sha256:a6f32aea4260dfe0e55dc9733ea162ea38f0ea86aa7d0f77b15beac5bf7b369d \ | |
|
140 | --hash=sha256:ae91972f8ac958039920ef6e8769277c084971a142ce2b660691793ae44aae6b \ | |
|
141 | --hash=sha256:c570f6fa14b9c4c8a4924aaad354652366577b4f98213cf76305067144f7b100 \ | |
|
142 | --hash=sha256:c9443124c67b1515e4fe0bb0aa18df640965e1030f468a2a5dc2589b26d130ad \ | |
|
143 | --hash=sha256:d23a18037313714fb3bb5a94434d3151ee4300bae631894b1ac08111abeaa4a3 \ | |
|
144 | --hash=sha256:eaf548d117b6737df379fdd53bdde4f08870e66d7ea653e230477f071f861121 \ | |
|
145 | --hash=sha256:ebbe29186a3d9b0c591e71b7393f1ae08c83cb2d8e517d2a822b8f7ec99dfd8b \ | |
|
146 | --hash=sha256:eda4771e0ace7f67f58bc5b560e27fb20f32a148cbc993b0c3835970935c2707 \ | |
|
147 | --hash=sha256:f1b3afc574a3db3b25c89161059d857bd4909a1269b0b3cb3c904677c8c4a3f7 \ | |
|
148 | --hash=sha256:f2388013e68e750eaa16ccbea62d4130180c26abb1d8e5d584b9baf69672b30f \ | |
|
162 | regex==2021.4.4 \ | |
|
163 | --hash=sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5 \ | |
|
164 | --hash=sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79 \ | |
|
165 | --hash=sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31 \ | |
|
166 | --hash=sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500 \ | |
|
167 | --hash=sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11 \ | |
|
168 | --hash=sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14 \ | |
|
169 | --hash=sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3 \ | |
|
170 | --hash=sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439 \ | |
|
171 | --hash=sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c \ | |
|
172 | --hash=sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82 \ | |
|
173 | --hash=sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711 \ | |
|
174 | --hash=sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093 \ | |
|
175 | --hash=sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a \ | |
|
176 | --hash=sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb \ | |
|
177 | --hash=sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8 \ | |
|
178 | --hash=sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17 \ | |
|
179 | --hash=sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000 \ | |
|
180 | --hash=sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d \ | |
|
181 | --hash=sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480 \ | |
|
182 | --hash=sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc \ | |
|
183 | --hash=sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0 \ | |
|
184 | --hash=sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9 \ | |
|
185 | --hash=sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765 \ | |
|
186 | --hash=sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e \ | |
|
187 | --hash=sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a \ | |
|
188 | --hash=sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07 \ | |
|
189 | --hash=sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f \ | |
|
190 | --hash=sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac \ | |
|
191 | --hash=sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7 \ | |
|
192 | --hash=sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed \ | |
|
193 | --hash=sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968 \ | |
|
194 | --hash=sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7 \ | |
|
195 | --hash=sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2 \ | |
|
196 | --hash=sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4 \ | |
|
197 | --hash=sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87 \ | |
|
198 | --hash=sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8 \ | |
|
199 | --hash=sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10 \ | |
|
200 | --hash=sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29 \ | |
|
201 | --hash=sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605 \ | |
|
202 | --hash=sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6 \ | |
|
203 | --hash=sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042 | |
|
149 | 204 | # via black |
|
150 |
six==1.1 |
|
|
151 | --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ | |
|
152 | --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \ | |
|
153 |
# via |
|
|
154 |
toml==0.10. |
|
|
155 | --hash=sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f \ | |
|
156 | --hash=sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88 \ | |
|
157 | # via black, pylint | |
|
158 | typed-ast==1.4.1 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \ | |
|
159 | --hash=sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355 \ | |
|
160 | --hash=sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919 \ | |
|
161 | --hash=sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa \ | |
|
162 | --hash=sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652 \ | |
|
163 | --hash=sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75 \ | |
|
164 | --hash=sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01 \ | |
|
165 | --hash=sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d \ | |
|
166 | --hash=sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1 \ | |
|
167 | --hash=sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907 \ | |
|
168 | --hash=sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c \ | |
|
169 | --hash=sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3 \ | |
|
170 | --hash=sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b \ | |
|
171 | --hash=sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614 \ | |
|
172 | --hash=sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb \ | |
|
173 | --hash=sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b \ | |
|
174 | --hash=sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41 \ | |
|
175 | --hash=sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6 \ | |
|
176 | --hash=sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34 \ | |
|
177 | --hash=sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe \ | |
|
178 | --hash=sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4 \ | |
|
179 | --hash=sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7 \ | |
|
180 | # via -r contrib/automation/linux-requirements.txt.in, astroid, black | |
|
181 | typing-extensions==3.7.4.3 \ | |
|
182 | --hash=sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918 \ | |
|
183 | --hash=sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c \ | |
|
184 | --hash=sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f \ | |
|
205 | six==1.16.0 \ | |
|
206 | --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ | |
|
207 | --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 | |
|
208 | # via vcrpy | |
|
209 | toml==0.10.2 \ | |
|
210 | --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ | |
|
211 | --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f | |
|
212 | # via | |
|
213 | # black | |
|
214 | # pylint | |
|
215 | typed-ast==1.4.3 ; python_version >= "3.0" and platform_python_implementation != "PyPy" \ | |
|
216 | --hash=sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace \ | |
|
217 | --hash=sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff \ | |
|
218 | --hash=sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266 \ | |
|
219 | --hash=sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528 \ | |
|
220 | --hash=sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6 \ | |
|
221 | --hash=sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808 \ | |
|
222 | --hash=sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4 \ | |
|
223 | --hash=sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363 \ | |
|
224 | --hash=sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341 \ | |
|
225 | --hash=sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04 \ | |
|
226 | --hash=sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41 \ | |
|
227 | --hash=sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e \ | |
|
228 | --hash=sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3 \ | |
|
229 | --hash=sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899 \ | |
|
230 | --hash=sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805 \ | |
|
231 | --hash=sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c \ | |
|
232 | --hash=sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c \ | |
|
233 | --hash=sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39 \ | |
|
234 | --hash=sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a \ | |
|
235 | --hash=sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3 \ | |
|
236 | --hash=sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7 \ | |
|
237 | --hash=sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f \ | |
|
238 | --hash=sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075 \ | |
|
239 | --hash=sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0 \ | |
|
240 | --hash=sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40 \ | |
|
241 | --hash=sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428 \ | |
|
242 | --hash=sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927 \ | |
|
243 | --hash=sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3 \ | |
|
244 | --hash=sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f \ | |
|
245 | --hash=sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65 | |
|
246 | # via | |
|
247 | # -r contrib/automation/linux-requirements.txt.in | |
|
248 | # astroid | |
|
249 | # black | |
|
250 | typing-extensions==3.10.0.0 \ | |
|
251 | --hash=sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497 \ | |
|
252 | --hash=sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342 \ | |
|
253 | --hash=sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84 | |
|
185 | 254 | # via yarl |
|
186 |
vcrpy==4.1. |
|
|
187 | --hash=sha256:4138e79eb35981ad391406cbb7227bce7eba8bad788dcf1a89c2e4a8b740debe \ | |
|
188 | --hash=sha256:d833248442bbc560599add895c9ab0ef518676579e8dc72d8b0933bdb3880253 \ | |
|
255 | vcrpy==4.1.1 \ | |
|
256 | --hash=sha256:12c3fcdae7b88ecf11fc0d3e6d77586549d4575a2ceee18e82eee75c1f626162 \ | |
|
257 | --hash=sha256:57095bf22fc0a2d99ee9674cdafebed0f3ba763018582450706f7d3a74fff599 | |
|
189 | 258 | # via -r contrib/automation/linux-requirements.txt.in |
|
190 | 259 | wrapt==1.12.1 \ |
|
191 |
--hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 |
|
|
192 | # via astroid, vcrpy | |
|
193 | yarl==1.6.0 \ | |
|
194 | --hash=sha256:04a54f126a0732af75e5edc9addeaa2113e2ca7c6fce8974a63549a70a25e50e \ | |
|
195 | --hash=sha256:3cc860d72ed989f3b1f3abbd6ecf38e412de722fb38b8f1b1a086315cf0d69c5 \ | |
|
196 | --hash=sha256:5d84cc36981eb5a8533be79d6c43454c8e6a39ee3118ceaadbd3c029ab2ee580 \ | |
|
197 | --hash=sha256:5e447e7f3780f44f890360ea973418025e8c0cdcd7d6a1b221d952600fd945dc \ | |
|
198 | --hash=sha256:61d3ea3c175fe45f1498af868879c6ffeb989d4143ac542163c45538ba5ec21b \ | |
|
199 | --hash=sha256:67c5ea0970da882eaf9efcf65b66792557c526f8e55f752194eff8ec722c75c2 \ | |
|
200 | --hash=sha256:6f6898429ec3c4cfbef12907047136fd7b9e81a6ee9f105b45505e633427330a \ | |
|
201 | --hash=sha256:7ce35944e8e61927a8f4eb78f5bc5d1e6da6d40eadd77e3f79d4e9399e263921 \ | |
|
202 | --hash=sha256:b7c199d2cbaf892ba0f91ed36d12ff41ecd0dde46cbf64ff4bfe997a3ebc925e \ | |
|
203 | --hash=sha256:c15d71a640fb1f8e98a1423f9c64d7f1f6a3a168f803042eaf3a5b5022fde0c1 \ | |
|
204 | --hash=sha256:c22607421f49c0cb6ff3ed593a49b6a99c6ffdeaaa6c944cdda83c2393c8864d \ | |
|
205 | --hash=sha256:c604998ab8115db802cc55cb1b91619b2831a6128a62ca7eea577fc8ea4d3131 \ | |
|
206 | --hash=sha256:d088ea9319e49273f25b1c96a3763bf19a882cff774d1792ae6fba34bd40550a \ | |
|
207 | --hash=sha256:db9eb8307219d7e09b33bcb43287222ef35cbcf1586ba9472b0a4b833666ada1 \ | |
|
208 | --hash=sha256:e31fef4e7b68184545c3d68baec7074532e077bd1906b040ecfba659737df188 \ | |
|
209 | --hash=sha256:e32f0fb443afcfe7f01f95172b66f279938fbc6bdaebe294b0ff6747fb6db020 \ | |
|
210 | --hash=sha256:fcbe419805c9b20db9a51d33b942feddbf6e7fb468cb20686fd7089d4164c12a \ | |
|
260 | --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 | |
|
261 | # via | |
|
262 | # astroid | |
|
263 | # vcrpy | |
|
264 | yarl==1.6.3 \ | |
|
265 | --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \ | |
|
266 | --hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \ | |
|
267 | --hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \ | |
|
268 | --hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \ | |
|
269 | --hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \ | |
|
270 | --hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \ | |
|
271 | --hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \ | |
|
272 | --hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \ | |
|
273 | --hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \ | |
|
274 | --hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \ | |
|
275 | --hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \ | |
|
276 | --hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \ | |
|
277 | --hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \ | |
|
278 | --hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \ | |
|
279 | --hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \ | |
|
280 | --hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \ | |
|
281 | --hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \ | |
|
282 | --hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \ | |
|
283 | --hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \ | |
|
284 | --hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10 \ | |
|
285 | --hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \ | |
|
286 | --hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \ | |
|
287 | --hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \ | |
|
288 | --hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \ | |
|
289 | --hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \ | |
|
290 | --hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \ | |
|
291 | --hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \ | |
|
292 | --hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \ | |
|
293 | --hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \ | |
|
294 | --hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \ | |
|
295 | --hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \ | |
|
296 | --hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \ | |
|
297 | --hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \ | |
|
298 | --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \ | |
|
299 | --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \ | |
|
300 | --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \ | |
|
301 | --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 | |
|
211 | 302 | # via vcrpy |
|
212 | 303 | |
|
213 | 304 | # WARNING: The following packages were not pinned, but pip requires them to be |
|
214 | 305 | # pinned when the requirements file includes hashes. Consider using the --allow-unsafe flag. |
|
215 | 306 | # setuptools |
@@ -1,1109 +1,1131 b'' | |||
|
1 | 1 | #!/usr/bin/env python3 |
|
2 | 2 | # |
|
3 | 3 | # check-code - a style and portability checker for Mercurial |
|
4 | 4 | # |
|
5 | 5 | # Copyright 2010 Olivia Mackall <olivia@selenic.com> |
|
6 | 6 | # |
|
7 | 7 | # This software may be used and distributed according to the terms of the |
|
8 | 8 | # GNU General Public License version 2 or any later version. |
|
9 | 9 | |
|
10 | 10 | """style and portability checker for Mercurial |
|
11 | 11 | |
|
12 | 12 | when a rule triggers wrong, do one of the following (prefer one from top): |
|
13 | 13 | * do the work-around the rule suggests |
|
14 | 14 | * doublecheck that it is a false match |
|
15 | 15 | * improve the rule pattern |
|
16 | 16 | * add an ignore pattern to the rule (3rd arg) which matches your good line |
|
17 | 17 | (you can append a short comment and match this, like: #re-raises) |
|
18 | 18 | * change the pattern to a warning and list the exception in test-check-code-hg |
|
19 | 19 | * ONLY use no--check-code for skipping entire files from external sources |
|
20 | 20 | """ |
|
21 | 21 | |
|
22 | 22 | from __future__ import absolute_import, print_function |
|
23 | 23 | import glob |
|
24 | 24 | import keyword |
|
25 | 25 | import optparse |
|
26 | 26 | import os |
|
27 | 27 | import re |
|
28 | 28 | import sys |
|
29 | 29 | |
|
30 | 30 | if sys.version_info[0] < 3: |
|
31 | 31 | opentext = open |
|
32 | 32 | else: |
|
33 | 33 | |
|
34 | 34 | def opentext(f): |
|
35 | 35 | return open(f, encoding='latin1') |
|
36 | 36 | |
|
37 | 37 | |
|
38 | 38 | try: |
|
39 | 39 | xrange |
|
40 | 40 | except NameError: |
|
41 | 41 | xrange = range |
|
42 | 42 | try: |
|
43 | 43 | import re2 |
|
44 | 44 | except ImportError: |
|
45 | 45 | re2 = None |
|
46 | 46 | |
|
47 | 47 | import testparseutil |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | def compilere(pat, multiline=False): |
|
51 | 51 | if multiline: |
|
52 | 52 | pat = '(?m)' + pat |
|
53 | 53 | if re2: |
|
54 | 54 | try: |
|
55 | 55 | return re2.compile(pat) |
|
56 | 56 | except re2.error: |
|
57 | 57 | pass |
|
58 | 58 | return re.compile(pat) |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | # check "rules depending on implementation of repquote()" in each |
|
62 | 62 | # patterns (especially pypats), before changing around repquote() |
|
63 | 63 | _repquotefixedmap = { |
|
64 | 64 | ' ': ' ', |
|
65 | 65 | '\n': '\n', |
|
66 | 66 | '.': 'p', |
|
67 | 67 | ':': 'q', |
|
68 | 68 | '%': '%', |
|
69 | 69 | '\\': 'b', |
|
70 | 70 | '*': 'A', |
|
71 | 71 | '+': 'P', |
|
72 | 72 | '-': 'M', |
|
73 | 73 | } |
|
74 | 74 | |
|
75 | 75 | |
|
76 | 76 | def _repquoteencodechr(i): |
|
77 | 77 | if i > 255: |
|
78 | 78 | return 'u' |
|
79 | 79 | c = chr(i) |
|
80 | 80 | if c in _repquotefixedmap: |
|
81 | 81 | return _repquotefixedmap[c] |
|
82 | 82 | if c.isalpha(): |
|
83 | 83 | return 'x' |
|
84 | 84 | if c.isdigit(): |
|
85 | 85 | return 'n' |
|
86 | 86 | return 'o' |
|
87 | 87 | |
|
88 | 88 | |
|
89 | 89 | _repquotett = ''.join(_repquoteencodechr(i) for i in xrange(256)) |
|
90 | 90 | |
|
91 | 91 | |
|
92 | 92 | def repquote(m): |
|
93 | 93 | t = m.group('text') |
|
94 | 94 | t = t.translate(_repquotett) |
|
95 | 95 | return m.group('quote') + t + m.group('quote') |
|
96 | 96 | |
|
97 | 97 | |
|
98 | 98 | def reppython(m): |
|
99 | 99 | comment = m.group('comment') |
|
100 | 100 | if comment: |
|
101 | 101 | l = len(comment.rstrip()) |
|
102 | 102 | return "#" * l + comment[l:] |
|
103 | 103 | return repquote(m) |
|
104 | 104 | |
|
105 | 105 | |
|
106 | 106 | def repcomment(m): |
|
107 | 107 | return m.group(1) + "#" * len(m.group(2)) |
|
108 | 108 | |
|
109 | 109 | |
|
110 | 110 | def repccomment(m): |
|
111 | 111 | t = re.sub(r"((?<=\n) )|\S", "x", m.group(2)) |
|
112 | 112 | return m.group(1) + t + "*/" |
|
113 | 113 | |
|
114 | 114 | |
|
115 | 115 | def repcallspaces(m): |
|
116 | 116 | t = re.sub(r"\n\s+", "\n", m.group(2)) |
|
117 | 117 | return m.group(1) + t |
|
118 | 118 | |
|
119 | 119 | |
|
120 | 120 | def repinclude(m): |
|
121 | 121 | return m.group(1) + "<foo>" |
|
122 | 122 | |
|
123 | 123 | |
|
124 | 124 | def rephere(m): |
|
125 | 125 | t = re.sub(r"\S", "x", m.group(2)) |
|
126 | 126 | return m.group(1) + t |
|
127 | 127 | |
|
128 | 128 | |
|
129 | 129 | testpats = [ |
|
130 | 130 | [ |
|
131 | 131 | (r'\b(push|pop)d\b', "don't use 'pushd' or 'popd', use 'cd'"), |
|
132 | 132 | (r'\W\$?\(\([^\)\n]*\)\)', "don't use (()) or $(()), use 'expr'"), |
|
133 | 133 | (r'grep.*-q', "don't use 'grep -q', redirect to /dev/null"), |
|
134 | 134 | (r'(?<!hg )grep.* -a', "don't use 'grep -a', use in-line python"), |
|
135 | 135 | (r'sed.*-i', "don't use 'sed -i', use a temporary file"), |
|
136 | 136 | (r'\becho\b.*\\n', "don't use 'echo \\n', use printf"), |
|
137 | 137 | (r'echo -n', "don't use 'echo -n', use printf"), |
|
138 | 138 | (r'(^|\|\s*)\bwc\b[^|]*$\n(?!.*\(re\))', "filter wc output"), |
|
139 | 139 | (r'head -c', "don't use 'head -c', use 'dd'"), |
|
140 | 140 | (r'tail -n', "don't use the '-n' option to tail, just use '-<num>'"), |
|
141 | 141 | (r'sha1sum', "don't use sha1sum, use $TESTDIR/md5sum.py"), |
|
142 | 142 | (r'\bls\b.*-\w*R', "don't use 'ls -R', use 'find'"), |
|
143 | 143 | (r'printf.*[^\\]\\([1-9]|0\d)', r"don't use 'printf \NNN', use Python"), |
|
144 | 144 | (r'printf.*[^\\]\\x', "don't use printf \\x, use Python"), |
|
145 | 145 | (r'rm -rf \*', "don't use naked rm -rf, target a directory"), |
|
146 | 146 | ( |
|
147 | 147 | r'\[[^\]]+==', |
|
148 | 148 | '[ foo == bar ] is a bashism, use [ foo = bar ] instead', |
|
149 | 149 | ), |
|
150 | 150 | ( |
|
151 | 151 | r'(^|\|\s*)grep (-\w\s+)*[^|]*[(|]\w', |
|
152 | 152 | "use egrep for extended grep syntax", |
|
153 | 153 | ), |
|
154 | 154 | (r'(^|\|\s*)e?grep .*\\S', "don't use \\S in regular expression"), |
|
155 | 155 | (r'(?<!!)/bin/', "don't use explicit paths for tools"), |
|
156 | 156 | (r'#!.*/bash', "don't use bash in shebang, use sh"), |
|
157 | 157 | (r'[^\n]\Z', "no trailing newline"), |
|
158 | 158 | (r'export .*=', "don't export and assign at once"), |
|
159 | 159 | (r'^source\b', "don't use 'source', use '.'"), |
|
160 | 160 | (r'touch -d', "don't use 'touch -d', use 'touch -t' instead"), |
|
161 | 161 | (r'\bls +[^|\n-]+ +-', "options to 'ls' must come before filenames"), |
|
162 | 162 | (r'[^>\n]>\s*\$HGRCPATH', "don't overwrite $HGRCPATH, append to it"), |
|
163 | 163 | (r'^stop\(\)', "don't use 'stop' as a shell function name"), |
|
164 | 164 | (r'(\[|\btest\b).*-e ', "don't use 'test -e', use 'test -f'"), |
|
165 | 165 | (r'\[\[\s+[^\]]*\]\]', "don't use '[[ ]]', use '[ ]'"), |
|
166 | 166 | (r'^alias\b.*=', "don't use alias, use a function"), |
|
167 | 167 | (r'if\s*!', "don't use '!' to negate exit status"), |
|
168 | 168 | (r'/dev/u?random', "don't use entropy, use /dev/zero"), |
|
169 | 169 | (r'do\s*true;\s*done', "don't use true as loop body, use sleep 0"), |
|
170 | 170 | ( |
|
171 | 171 | r'sed (-e )?\'(\d+|/[^/]*/)i(?!\\\n)', |
|
172 | 172 | "put a backslash-escaped newline after sed 'i' command", |
|
173 | 173 | ), |
|
174 | 174 | (r'^diff *-\w*[uU].*$\n(^ \$ |^$)', "prefix diff -u/-U with cmp"), |
|
175 | 175 | (r'^\s+(if)? diff *-\w*[uU]', "prefix diff -u/-U with cmp"), |
|
176 | 176 | (r'[\s="`\']python\s(?!bindings)', "don't use 'python', use '$PYTHON'"), |
|
177 | 177 | (r'seq ', "don't use 'seq', use $TESTDIR/seq.py"), |
|
178 | 178 | (r'\butil\.Abort\b', "directly use error.Abort"), |
|
179 | 179 | (r'\|&', "don't use |&, use 2>&1"), |
|
180 | 180 | (r'\w = +\w', "only one space after = allowed"), |
|
181 | 181 | ( |
|
182 | 182 | r'\bsed\b.*[^\\]\\n', |
|
183 | 183 | "don't use 'sed ... \\n', use a \\ and a newline", |
|
184 | 184 | ), |
|
185 | 185 | (r'env.*-u', "don't use 'env -u VAR', use 'unset VAR'"), |
|
186 | 186 | (r'cp.* -r ', "don't use 'cp -r', use 'cp -R'"), |
|
187 | 187 | (r'grep.* -[ABC]', "don't use grep's context flags"), |
|
188 | 188 | ( |
|
189 | 189 | r'find.*-printf', |
|
190 | 190 | "don't use 'find -printf', it doesn't exist on BSD find(1)", |
|
191 | 191 | ), |
|
192 | 192 | (r'\$RANDOM ', "don't use bash-only $RANDOM to generate random values"), |
|
193 | 193 | ], |
|
194 | 194 | # warnings |
|
195 | 195 | [ |
|
196 | 196 | (r'^function', "don't use 'function', use old style"), |
|
197 | 197 | (r'^diff.*-\w*N', "don't use 'diff -N'"), |
|
198 | 198 | (r'\$PWD|\${PWD}', "don't use $PWD, use `pwd`"), |
|
199 | 199 | (r'^([^"\'\n]|("[^"\n]*")|(\'[^\'\n]*\'))*\^', "^ must be quoted"), |
|
200 | 200 | (r'kill (`|\$\()', "don't use kill, use killdaemons.py"), |
|
201 | 201 | ], |
|
202 | 202 | ] |
|
203 | 203 | |
|
204 | 204 | testfilters = [ |
|
205 | 205 | (r"( *)(#([^!][^\n]*\S)?)", repcomment), |
|
206 | 206 | (r"<<(\S+)((.|\n)*?\n\1)", rephere), |
|
207 | 207 | ] |
|
208 | 208 | |
|
209 | 209 | uprefix = r"^ \$ " |
|
210 | 210 | utestpats = [ |
|
211 | 211 | [ |
|
212 | 212 | (r'^(\S.*|| [$>] \S.*)[ \t]\n', "trailing whitespace on non-output"), |
|
213 | 213 | ( |
|
214 | 214 | uprefix + r'.*\|\s*sed[^|>\n]*\n', |
|
215 | 215 | "use regex test output patterns instead of sed", |
|
216 | 216 | ), |
|
217 | 217 | (uprefix + r'(true|exit 0)', "explicit zero exit unnecessary"), |
|
218 | (uprefix + r'.*(?<!\[)\$\?', "explicit exit code checks unnecessary"), | |
|
219 | 218 | ( |
|
220 | 219 | uprefix + r'.*\|\| echo.*(fail|error)', |
|
221 | 220 | "explicit exit code checks unnecessary", |
|
222 | 221 | ), |
|
223 | 222 | (uprefix + r'set -e', "don't use set -e"), |
|
224 | 223 | (uprefix + r'(\s|fi\b|done\b)', "use > for continued lines"), |
|
225 | 224 | ( |
|
226 | 225 | uprefix + r'.*:\.\S*/', |
|
227 | 226 | "x:.y in a path does not work on msys, rewrite " |
|
228 | 227 | "as x://.y, or see `hg log -k msys` for alternatives", |
|
229 | 228 | r'-\S+:\.|' '# no-msys', # -Rxxx |
|
230 | 229 | ), # in test-pull.t which is skipped on windows |
|
231 | 230 | ( |
|
232 | 231 | r'^ [^$>].*27\.0\.0\.1', |
|
233 | 232 | 'use $LOCALIP not an explicit loopback address', |
|
234 | 233 | ), |
|
235 | 234 | ( |
|
236 | 235 | r'^ (?![>$] ).*\$LOCALIP.*[^)]$', |
|
237 | 236 | 'mark $LOCALIP output lines with (glob) to help tests in BSD jails', |
|
238 | 237 | ), |
|
239 | 238 | ( |
|
240 | 239 | r'^ (cat|find): .*: \$ENOENT\$', |
|
241 | 240 | 'use test -f to test for file existence', |
|
242 | 241 | ), |
|
243 | 242 | ( |
|
244 | 243 | r'^ diff -[^ -]*p', |
|
245 | 244 | "don't use (external) diff with -p for portability", |
|
246 | 245 | ), |
|
247 | 246 | (r' readlink ', 'use readlink.py instead of readlink'), |
|
248 | 247 | ( |
|
249 | 248 | r'^ [-+][-+][-+] .* [-+]0000 \(glob\)', |
|
250 | 249 | "glob timezone field in diff output for portability", |
|
251 | 250 | ), |
|
252 | 251 | ( |
|
253 | 252 | r'^ @@ -[0-9]+ [+][0-9]+,[0-9]+ @@', |
|
254 | 253 | "use '@@ -N* +N,n @@ (glob)' style chunk header for portability", |
|
255 | 254 | ), |
|
256 | 255 | ( |
|
257 | 256 | r'^ @@ -[0-9]+,[0-9]+ [+][0-9]+ @@', |
|
258 | 257 | "use '@@ -N,n +N* @@ (glob)' style chunk header for portability", |
|
259 | 258 | ), |
|
260 | 259 | ( |
|
261 | 260 | r'^ @@ -[0-9]+ [+][0-9]+ @@', |
|
262 | 261 | "use '@@ -N* +N* @@ (glob)' style chunk header for portability", |
|
263 | 262 | ), |
|
264 | 263 | ( |
|
265 | 264 | uprefix + r'hg( +-[^ ]+( +[^ ]+)?)* +extdiff' |
|
266 | 265 | r'( +(-[^ po-]+|--(?!program|option)[^ ]+|[^-][^ ]*))*$', |
|
267 | 266 | "use $RUNTESTDIR/pdiff via extdiff (or -o/-p for false-positives)", |
|
268 | 267 | ), |
|
269 | 268 | ], |
|
270 | 269 | # warnings |
|
271 | 270 | [ |
|
272 | 271 | ( |
|
273 | 272 | r'^ (?!.*\$LOCALIP)[^*?/\n]* \(glob\)$', |
|
274 | 273 | "glob match with no glob string (?, *, /, and $LOCALIP)", |
|
275 | 274 | ), |
|
276 | 275 | ], |
|
277 | 276 | ] |
|
278 | 277 | |
|
279 | 278 | # transform plain test rules to unified test's |
|
280 | 279 | for i in [0, 1]: |
|
281 | 280 | for tp in testpats[i]: |
|
282 | 281 | p = tp[0] |
|
283 | 282 | m = tp[1] |
|
284 | 283 | if p.startswith('^'): |
|
285 | 284 | p = "^ [$>] (%s)" % p[1:] |
|
286 | 285 | else: |
|
287 | 286 | p = "^ [$>] .*(%s)" % p |
|
288 | 287 | utestpats[i].append((p, m) + tp[2:]) |
|
289 | 288 | |
|
290 | 289 | # don't transform the following rules: |
|
291 | 290 | # " > \t" and " \t" should be allowed in unified tests |
|
292 | 291 | testpats[0].append((r'^( *)\t', "don't use tabs to indent")) |
|
293 | 292 | utestpats[0].append((r'^( ?)\t', "don't use tabs to indent")) |
|
294 | 293 | |
|
295 | 294 | utestfilters = [ |
|
296 | 295 | (r"<<(\S+)((.|\n)*?\n > \1)", rephere), |
|
297 | 296 | (r"( +)(#([^!][^\n]*\S)?)", repcomment), |
|
298 | 297 | ] |
|
299 | 298 | |
|
300 | 299 | # common patterns to check *.py |
|
301 | 300 | commonpypats = [ |
|
302 | 301 | [ |
|
303 | 302 | (r'\\$', 'Use () to wrap long lines in Python, not \\'), |
|
304 | 303 | ( |
|
305 | 304 | r'^\s*def\s*\w+\s*\(.*,\s*\(', |
|
306 | 305 | "tuple parameter unpacking not available in Python 3+", |
|
307 | 306 | ), |
|
308 | 307 | ( |
|
309 | 308 | r'lambda\s*\(.*,.*\)', |
|
310 | 309 | "tuple parameter unpacking not available in Python 3+", |
|
311 | 310 | ), |
|
312 | 311 | (r'(?<!def)\s+(cmp)\(', "cmp is not available in Python 3+"), |
|
313 | 312 | (r'(?<!\.)\breduce\s*\(.*', "reduce is not available in Python 3+"), |
|
314 | 313 | ( |
|
315 | 314 | r'\bdict\(.*=', |
|
316 | 315 | 'dict() is different in Py2 and 3 and is slower than {}', |
|
317 | 316 | 'dict-from-generator', |
|
318 | 317 | ), |
|
319 | 318 | (r'\.has_key\b', "dict.has_key is not available in Python 3+"), |
|
320 | 319 | (r'\s<>\s', '<> operator is not available in Python 3+, use !='), |
|
321 | 320 | (r'^\s*\t', "don't use tabs"), |
|
322 | 321 | (r'\S;\s*\n', "semicolon"), |
|
323 | 322 | (r'[^_]_\([ \t\n]*(?:"[^"]+"[ \t\n+]*)+%', "don't use % inside _()"), |
|
324 | 323 | (r"[^_]_\([ \t\n]*(?:'[^']+'[ \t\n+]*)+%", "don't use % inside _()"), |
|
325 | 324 | (r'(\w|\)),\w', "missing whitespace after ,"), |
|
326 | 325 | (r'(\w|\))[+/*\-<>]\w', "missing whitespace in expression"), |
|
327 | 326 | (r'\w\s=\s\s+\w', "gratuitous whitespace after ="), |
|
328 | 327 | ( |
|
329 | 328 | ( |
|
330 | 329 | # a line ending with a colon, potentially with trailing comments |
|
331 | 330 | r':([ \t]*#[^\n]*)?\n' |
|
332 | 331 | # one that is not a pass and not only a comment |
|
333 | 332 | r'(?P<indent>[ \t]+)[^#][^\n]+\n' |
|
334 | 333 | # more lines at the same indent level |
|
335 | 334 | r'((?P=indent)[^\n]+\n)*' |
|
336 | 335 | # a pass at the same indent level, which is bogus |
|
337 | 336 | r'(?P=indent)pass[ \t\n#]' |
|
338 | 337 | ), |
|
339 | 338 | 'omit superfluous pass', |
|
340 | 339 | ), |
|
341 | 340 | (r'[^\n]\Z', "no trailing newline"), |
|
342 | 341 | (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"), |
|
343 | 342 | ( |
|
344 | 343 | r'^\s+(self\.)?[A-Za-z][a-z0-9]+[A-Z]\w* = ', |
|
345 | 344 | "don't use camelcase in identifiers", |
|
346 | 345 | r'#.*camelcase-required', |
|
347 | 346 | ), |
|
348 | 347 | ( |
|
349 | 348 | r'^\s*(if|while|def|class|except|try)\s[^[\n]*:\s*[^\\n]#\s]+', |
|
350 | 349 | "linebreak after :", |
|
351 | 350 | ), |
|
352 | 351 | ( |
|
353 | 352 | r'class\s[^( \n]+:', |
|
354 | 353 | "old-style class, use class foo(object)", |
|
355 | 354 | r'#.*old-style', |
|
356 | 355 | ), |
|
357 | 356 | ( |
|
358 | 357 | r'class\s[^( \n]+\(\):', |
|
359 | 358 | "class foo() creates old style object, use class foo(object)", |
|
360 | 359 | r'#.*old-style', |
|
361 | 360 | ), |
|
362 | 361 | ( |
|
363 | 362 | r'\b(%s)\(' |
|
364 | 363 | % '|'.join(k for k in keyword.kwlist if k not in ('print', 'exec')), |
|
365 | 364 | "Python keyword is not a function", |
|
366 | 365 | ), |
|
367 | 366 | # (r'class\s[A-Z][^\(]*\((?!Exception)', |
|
368 | 367 | # "don't capitalize non-exception classes"), |
|
369 | 368 | # (r'in range\(', "use xrange"), |
|
370 | 369 | # (r'^\s*print\s+', "avoid using print in core and extensions"), |
|
371 | 370 | (r'[\x80-\xff]', "non-ASCII character literal"), |
|
372 | 371 | (r'("\')\.format\(', "str.format() has no bytes counterpart, use %"), |
|
373 | 372 | ( |
|
374 | 373 | r'([\(\[][ \t]\S)|(\S[ \t][\)\]])', |
|
375 | 374 | "gratuitous whitespace in () or []", |
|
376 | 375 | ), |
|
377 | 376 | # (r'\s\s=', "gratuitous whitespace before ="), |
|
378 | 377 | ( |
|
379 | 378 | r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S', |
|
380 | 379 | "missing whitespace around operator", |
|
381 | 380 | ), |
|
382 | 381 | ( |
|
383 | 382 | r'[^>< ](\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\s', |
|
384 | 383 | "missing whitespace around operator", |
|
385 | 384 | ), |
|
386 | 385 | ( |
|
387 | 386 | r'\s(\+=|-=|!=|<>|<=|>=|<<=|>>=|%=)\S', |
|
388 | 387 | "missing whitespace around operator", |
|
389 | 388 | ), |
|
390 | 389 | (r'[^^+=*/!<>&| %-](\s=|=\s)[^= ]', "wrong whitespace around ="), |
|
391 | 390 | ( |
|
392 | 391 | r'\([^()]*( =[^=]|[^<>!=]= )', |
|
393 | 392 | "no whitespace around = for named parameters", |
|
394 | 393 | ), |
|
395 | 394 | ( |
|
396 | 395 | r'raise [^,(]+, (\([^\)]+\)|[^,\(\)]+)$', |
|
397 | 396 | "don't use old-style two-argument raise, use Exception(message)", |
|
398 | 397 | ), |
|
399 | 398 | (r' is\s+(not\s+)?["\'0-9-]', "object comparison with literal"), |
|
400 | 399 | ( |
|
401 | 400 | r' [=!]=\s+(True|False|None)', |
|
402 | 401 | "comparison with singleton, use 'is' or 'is not' instead", |
|
403 | 402 | ), |
|
404 | 403 | ( |
|
405 | 404 | r'^\s*(while|if) [01]:', |
|
406 | 405 | "use True/False for constant Boolean expression", |
|
407 | 406 | ), |
|
408 | 407 | (r'^\s*if False(:| +and)', 'Remove code instead of using `if False`'), |
|
409 | 408 | ( |
|
410 | 409 | r'(?:(?<!def)\s+|\()hasattr\(', |
|
411 | 410 | 'hasattr(foo, bar) is broken on py2, use util.safehasattr(foo, bar) ' |
|
412 | 411 | 'instead', |
|
413 | 412 | r'#.*hasattr-py3-only', |
|
414 | 413 | ), |
|
415 | 414 | (r'opener\([^)]*\).read\(', "use opener.read() instead"), |
|
416 | 415 | (r'opener\([^)]*\).write\(', "use opener.write() instead"), |
|
417 | 416 | (r'(?i)descend[e]nt', "the proper spelling is descendAnt"), |
|
418 | 417 | (r'\.debug\(\_', "don't mark debug messages for translation"), |
|
419 | 418 | (r'\.strip\(\)\.split\(\)', "no need to strip before splitting"), |
|
420 | 419 | (r'^\s*except\s*:', "naked except clause", r'#.*re-raises'), |
|
421 | 420 | ( |
|
422 | 421 | r'^\s*except\s([^\(,]+|\([^\)]+\))\s*,', |
|
423 | 422 | 'legacy exception syntax; use "as" instead of ","', |
|
424 | 423 | ), |
|
425 | 424 | (r'release\(.*wlock, .*lock\)', "wrong lock release order"), |
|
426 | 425 | (r'\bdef\s+__bool__\b', "__bool__ should be __nonzero__ in Python 2"), |
|
427 | 426 | ( |
|
428 | 427 | r'os\.path\.join\(.*, *(""|\'\')\)', |
|
429 | 428 | "use pathutil.normasprefix(path) instead of os.path.join(path, '')", |
|
430 | 429 | ), |
|
431 | 430 | (r'\s0[0-7]+\b', 'legacy octal syntax; use "0o" prefix instead of "0"'), |
|
432 | 431 | # XXX only catch mutable arguments on the first line of the definition |
|
433 | 432 | (r'def.*[( ]\w+=\{\}', "don't use mutable default arguments"), |
|
434 | 433 | (r'\butil\.Abort\b', "directly use error.Abort"), |
|
435 | 434 | ( |
|
436 | 435 | r'^@(\w*\.)?cachefunc', |
|
437 | 436 | "module-level @cachefunc is risky, please avoid", |
|
438 | 437 | ), |
|
439 | 438 | ( |
|
440 | 439 | r'^import Queue', |
|
441 | 440 | "don't use Queue, use pycompat.queue.Queue + " |
|
442 | 441 | "pycompat.queue.Empty", |
|
443 | 442 | ), |
|
444 | 443 | ( |
|
445 | 444 | r'^import cStringIO', |
|
446 | 445 | "don't use cStringIO.StringIO, use util.stringio", |
|
447 | 446 | ), |
|
448 | 447 | (r'^import urllib', "don't use urllib, use util.urlreq/util.urlerr"), |
|
449 | 448 | ( |
|
450 | 449 | r'^import SocketServer', |
|
451 | 450 | "don't use SockerServer, use util.socketserver", |
|
452 | 451 | ), |
|
453 | 452 | (r'^import urlparse', "don't use urlparse, use util.urlreq"), |
|
454 | 453 | (r'^import xmlrpclib', "don't use xmlrpclib, use util.xmlrpclib"), |
|
455 | 454 | (r'^import cPickle', "don't use cPickle, use util.pickle"), |
|
456 | 455 | (r'^import pickle', "don't use pickle, use util.pickle"), |
|
457 | 456 | (r'^import httplib', "don't use httplib, use util.httplib"), |
|
458 | 457 | (r'^import BaseHTTPServer', "use util.httpserver instead"), |
|
459 | 458 | ( |
|
460 | 459 | r'^(from|import) mercurial\.(cext|pure|cffi)', |
|
461 | 460 | "use mercurial.policy.importmod instead", |
|
462 | 461 | ), |
|
463 | 462 | (r'\.next\(\)', "don't use .next(), use next(...)"), |
|
464 | 463 | ( |
|
465 | 464 | r'([a-z]*).revision\(\1\.node\(', |
|
466 | 465 | "don't convert rev to node before passing to revision(nodeorrev)", |
|
467 | 466 | ), |
|
468 | 467 | (r'platform\.system\(\)', "don't use platform.system(), use pycompat"), |
|
469 | 468 | ], |
|
470 | 469 | # warnings |
|
471 | 470 | [], |
|
472 | 471 | ] |
|
473 | 472 | |
|
474 | 473 | # patterns to check normal *.py files |
|
475 | 474 | pypats = [ |
|
476 | 475 | [ |
|
477 | 476 | # Ideally, these should be placed in "commonpypats" for |
|
478 | 477 | # consistency of coding rules in Mercurial source tree. |
|
479 | 478 | # But on the other hand, these are not so seriously required for |
|
480 | 479 | # python code fragments embedded in test scripts. Fixing test |
|
481 | 480 | # scripts for these patterns requires many changes, and has less |
|
482 | 481 | # profit than effort. |
|
483 | 482 | (r'raise Exception', "don't raise generic exceptions"), |
|
484 | 483 | (r'[\s\(](open|file)\([^)]*\)\.read\(', "use util.readfile() instead"), |
|
485 | 484 | ( |
|
486 | 485 | r'[\s\(](open|file)\([^)]*\)\.write\(', |
|
487 | 486 | "use util.writefile() instead", |
|
488 | 487 | ), |
|
489 | 488 | ( |
|
490 | 489 | r'^[\s\(]*(open(er)?|file)\([^)]*\)(?!\.close\(\))', |
|
491 | 490 | "always assign an opened file to a variable, and close it afterwards", |
|
492 | 491 | ), |
|
493 | 492 | ( |
|
494 | 493 | r'[\s\(](open|file)\([^)]*\)\.(?!close\(\))', |
|
495 | 494 | "always assign an opened file to a variable, and close it afterwards", |
|
496 | 495 | ), |
|
497 | 496 | (r':\n( )*( ){1,3}[^ ]', "must indent 4 spaces"), |
|
498 | 497 | (r'^import atexit', "don't use atexit, use ui.atexit"), |
|
499 | 498 | # rules depending on implementation of repquote() |
|
500 | 499 | ( |
|
501 | 500 | r' x+[xpqo%APM][\'"]\n\s+[\'"]x', |
|
502 | 501 | 'string join across lines with no space', |
|
503 | 502 | ), |
|
504 | 503 | ( |
|
505 | 504 | r'''(?x)ui\.(status|progress|write|note|warn)\( |
|
506 | 505 | [ \t\n#]* |
|
507 | 506 | (?# any strings/comments might precede a string, which |
|
508 | 507 | # contains translatable message) |
|
509 | 508 | b?((['"]|\'\'\'|""")[ \npq%bAPMxno]*(['"]|\'\'\'|""")[ \t\n#]+)* |
|
510 | 509 | (?# sequence consisting of below might precede translatable message |
|
511 | 510 | # - formatting string: "% 10s", "%05d", "% -3.2f", "%*s", "%%" ... |
|
512 | 511 | # - escaped character: "\\", "\n", "\0" ... |
|
513 | 512 | # - character other than '%', 'b' as '\', and 'x' as alphabet) |
|
514 | 513 | (['"]|\'\'\'|""") |
|
515 | 514 | ((%([ n]?[PM]?([np]+|A))?x)|%%|b[bnx]|[ \nnpqAPMo])*x |
|
516 | 515 | (?# this regexp can't use [^...] style, |
|
517 | 516 | # because _preparepats forcibly adds "\n" into [^...], |
|
518 | 517 | # even though this regexp wants match it against "\n")''', |
|
519 | 518 | "missing _() in ui message (use () to hide false-positives)", |
|
520 | 519 | ), |
|
521 | 520 | ] |
|
522 | 521 | + commonpypats[0], |
|
523 | 522 | # warnings |
|
524 | 523 | [ |
|
525 | 524 | # rules depending on implementation of repquote() |
|
526 | 525 | (r'(^| )pp +xxxxqq[ \n][^\n]', "add two newlines after '.. note::'"), |
|
527 | 526 | ] |
|
528 | 527 | + commonpypats[1], |
|
529 | 528 | ] |
|
530 | 529 | |
|
531 | 530 | # patterns to check *.py for embedded ones in test script |
|
532 | 531 | embeddedpypats = [ |
|
533 | 532 | [] + commonpypats[0], |
|
534 | 533 | # warnings |
|
535 | 534 | [] + commonpypats[1], |
|
536 | 535 | ] |
|
537 | 536 | |
|
538 | 537 | # common filters to convert *.py |
|
539 | 538 | commonpyfilters = [ |
|
540 | 539 | ( |
|
541 | 540 | r"""(?msx)(?P<comment>\#.*?$)| |
|
542 | 541 | ((?P<quote>('''|\"\"\"|(?<!')'(?!')|(?<!")"(?!"))) |
|
543 | 542 | (?P<text>(([^\\]|\\.)*?)) |
|
544 | 543 | (?P=quote))""", |
|
545 | 544 | reppython, |
|
546 | 545 | ), |
|
547 | 546 | ] |
|
548 | 547 | |
|
548 | # pattern only for mercurial and extensions | |
|
549 | core_py_pats = [ | |
|
550 | [ | |
|
551 | # Windows tend to get confused about capitalization of the drive letter | |
|
552 | # | |
|
553 | # see mercurial.windows.abspath for details | |
|
554 | ( | |
|
555 | r'os\.path\.abspath', | |
|
556 | "use util.abspath instead (windows)", | |
|
557 | r'#.*re-exports', | |
|
558 | ), | |
|
559 | ], | |
|
560 | # warnings | |
|
561 | [], | |
|
562 | ] | |
|
563 | ||
|
549 | 564 | # filters to convert normal *.py files |
|
550 | 565 | pyfilters = [] + commonpyfilters |
|
551 | 566 | |
|
552 | 567 | # non-filter patterns |
|
553 | 568 | pynfpats = [ |
|
554 | 569 | [ |
|
555 | 570 | (r'pycompat\.osname\s*[=!]=\s*[\'"]nt[\'"]', "use pycompat.iswindows"), |
|
556 | 571 | (r'pycompat\.osname\s*[=!]=\s*[\'"]posix[\'"]', "use pycompat.isposix"), |
|
557 | 572 | ( |
|
558 | 573 | r'pycompat\.sysplatform\s*[!=]=\s*[\'"]darwin[\'"]', |
|
559 | 574 | "use pycompat.isdarwin", |
|
560 | 575 | ), |
|
561 | 576 | ], |
|
562 | 577 | # warnings |
|
563 | 578 | [], |
|
564 | 579 | ] |
|
565 | 580 | |
|
566 | 581 | # filters to convert *.py for embedded ones in test script |
|
567 | 582 | embeddedpyfilters = [] + commonpyfilters |
|
568 | 583 | |
|
569 | 584 | # extension non-filter patterns |
|
570 | 585 | pyextnfpats = [ |
|
571 | 586 | [(r'^"""\n?[A-Z]', "don't capitalize docstring title")], |
|
572 | 587 | # warnings |
|
573 | 588 | [], |
|
574 | 589 | ] |
|
575 | 590 | |
|
576 | 591 | txtfilters = [] |
|
577 | 592 | |
|
578 | 593 | txtpats = [ |
|
579 | 594 | [ |
|
580 | 595 | (r'\s$', 'trailing whitespace'), |
|
581 | 596 | ('.. note::[ \n][^\n]', 'add two newlines after note::'), |
|
582 | 597 | ], |
|
583 | 598 | [], |
|
584 | 599 | ] |
|
585 | 600 | |
|
586 | 601 | cpats = [ |
|
587 | 602 | [ |
|
588 | 603 | (r'//', "don't use //-style comments"), |
|
589 | 604 | (r'\S\t', "don't use tabs except for indent"), |
|
590 | 605 | (r'(\S[ \t]+|^[ \t]+)\n', "trailing whitespace"), |
|
591 | 606 | (r'(while|if|do|for)\(', "use space after while/if/do/for"), |
|
592 | 607 | (r'return\(', "return is not a function"), |
|
593 | 608 | (r' ;', "no space before ;"), |
|
594 | 609 | (r'[^;] \)', "no space before )"), |
|
595 | 610 | (r'[)][{]', "space between ) and {"), |
|
596 | 611 | (r'\w+\* \w+', "use int *foo, not int* foo"), |
|
597 | 612 | (r'\W\([^\)]+\) \w+', "use (int)foo, not (int) foo"), |
|
598 | 613 | (r'\w+ (\+\+|--)', "use foo++, not foo ++"), |
|
599 | 614 | (r'\w,\w', "missing whitespace after ,"), |
|
600 | 615 | (r'^[^#]\w[+/*]\w', "missing whitespace in expression"), |
|
601 | 616 | (r'\w\s=\s\s+\w', "gratuitous whitespace after ="), |
|
602 | 617 | (r'^#\s+\w', "use #foo, not # foo"), |
|
603 | 618 | (r'[^\n]\Z', "no trailing newline"), |
|
604 | 619 | (r'^\s*#import\b', "use only #include in standard C code"), |
|
605 | 620 | (r'strcpy\(', "don't use strcpy, use strlcpy or memcpy"), |
|
606 | 621 | (r'strcat\(', "don't use strcat"), |
|
607 | 622 | # rules depending on implementation of repquote() |
|
608 | 623 | ], |
|
609 | 624 | # warnings |
|
610 | 625 | [ |
|
611 | 626 | # rules depending on implementation of repquote() |
|
612 | 627 | ], |
|
613 | 628 | ] |
|
614 | 629 | |
|
615 | 630 | cfilters = [ |
|
616 | 631 | (r'(/\*)(((\*(?!/))|[^*])*)\*/', repccomment), |
|
617 | 632 | (r'''(?P<quote>(?<!")")(?P<text>([^"]|\\")+)"(?!")''', repquote), |
|
618 | 633 | (r'''(#\s*include\s+<)([^>]+)>''', repinclude), |
|
619 | 634 | (r'(\()([^)]+\))', repcallspaces), |
|
620 | 635 | ] |
|
621 | 636 | |
|
622 | 637 | inutilpats = [ |
|
623 | 638 | [ |
|
624 | 639 | (r'\bui\.', "don't use ui in util"), |
|
625 | 640 | ], |
|
626 | 641 | # warnings |
|
627 | 642 | [], |
|
628 | 643 | ] |
|
629 | 644 | |
|
630 | 645 | inrevlogpats = [ |
|
631 | 646 | [ |
|
632 | 647 | (r'\brepo\.', "don't use repo in revlog"), |
|
633 | 648 | ], |
|
634 | 649 | # warnings |
|
635 | 650 | [], |
|
636 | 651 | ] |
|
637 | 652 | |
|
638 | 653 | webtemplatefilters = [] |
|
639 | 654 | |
|
640 | 655 | webtemplatepats = [ |
|
641 | 656 | [], |
|
642 | 657 | [ |
|
643 | 658 | ( |
|
644 | 659 | r'{desc(\|(?!websub|firstline)[^\|]*)+}', |
|
645 | 660 | 'follow desc keyword with either firstline or websub', |
|
646 | 661 | ), |
|
647 | 662 | ], |
|
648 | 663 | ] |
|
649 | 664 | |
|
650 | 665 | allfilesfilters = [] |
|
651 | 666 | |
|
652 | 667 | allfilespats = [ |
|
653 | 668 | [ |
|
654 | 669 | ( |
|
655 | 670 | r'(http|https)://[a-zA-Z0-9./]*selenic.com/', |
|
656 | 671 | 'use mercurial-scm.org domain URL', |
|
657 | 672 | ), |
|
658 | 673 | ( |
|
659 | 674 | r'mercurial@selenic\.com', |
|
660 | 675 | 'use mercurial-scm.org domain for mercurial ML address', |
|
661 | 676 | ), |
|
662 | 677 | ( |
|
663 | 678 | r'mercurial-devel@selenic\.com', |
|
664 | 679 | 'use mercurial-scm.org domain for mercurial-devel ML address', |
|
665 | 680 | ), |
|
666 | 681 | ], |
|
667 | 682 | # warnings |
|
668 | 683 | [], |
|
669 | 684 | ] |
|
670 | 685 | |
|
671 | 686 | py3pats = [ |
|
672 | 687 | [ |
|
673 | 688 | ( |
|
674 | 689 | r'os\.environ', |
|
675 | 690 | "use encoding.environ instead (py3)", |
|
676 | 691 | r'#.*re-exports', |
|
677 | 692 | ), |
|
678 | 693 | (r'os\.name', "use pycompat.osname instead (py3)"), |
|
679 | 694 | (r'os\.getcwd', "use encoding.getcwd instead (py3)", r'#.*re-exports'), |
|
680 | 695 | (r'os\.sep', "use pycompat.ossep instead (py3)"), |
|
681 | 696 | (r'os\.pathsep', "use pycompat.ospathsep instead (py3)"), |
|
682 | 697 | (r'os\.altsep', "use pycompat.osaltsep instead (py3)"), |
|
683 | 698 | (r'sys\.platform', "use pycompat.sysplatform instead (py3)"), |
|
684 | 699 | (r'getopt\.getopt', "use pycompat.getoptb instead (py3)"), |
|
685 | 700 | (r'os\.getenv', "use encoding.environ.get instead"), |
|
686 | 701 | (r'os\.setenv', "modifying the environ dict is not preferred"), |
|
687 | 702 | (r'(?<!pycompat\.)xrange', "use pycompat.xrange instead (py3)"), |
|
688 | 703 | ], |
|
689 | 704 | # warnings |
|
690 | 705 | [], |
|
691 | 706 | ] |
|
692 | 707 | |
|
693 | 708 | checks = [ |
|
694 | 709 | ('python', r'.*\.(py|cgi)$', r'^#!.*python', pyfilters, pypats), |
|
695 | 710 | ('python', r'.*\.(py|cgi)$', r'^#!.*python', [], pynfpats), |
|
696 | 711 | ('python', r'.*hgext.*\.py$', '', [], pyextnfpats), |
|
697 | 712 | ( |
|
698 | 713 | 'python 3', |
|
699 | 714 | r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py', |
|
700 | 715 | '', |
|
701 | 716 | pyfilters, |
|
702 | 717 | py3pats, |
|
703 | 718 | ), |
|
719 | ( | |
|
720 | 'core files', | |
|
721 | r'.*(hgext|mercurial)/(?!demandimport|policy|pycompat).*\.py', | |
|
722 | '', | |
|
723 | pyfilters, | |
|
724 | core_py_pats, | |
|
725 | ), | |
|
704 | 726 | ('test script', r'(.*/)?test-[^.~]*$', '', testfilters, testpats), |
|
705 | 727 | ('c', r'.*\.[ch]$', '', cfilters, cpats), |
|
706 | 728 | ('unified test', r'.*\.t$', '', utestfilters, utestpats), |
|
707 | 729 | ( |
|
708 | 730 | 'layering violation repo in revlog', |
|
709 | 731 | r'mercurial/revlog\.py', |
|
710 | 732 | '', |
|
711 | 733 | pyfilters, |
|
712 | 734 | inrevlogpats, |
|
713 | 735 | ), |
|
714 | 736 | ( |
|
715 | 737 | 'layering violation ui in util', |
|
716 | 738 | r'mercurial/util\.py', |
|
717 | 739 | '', |
|
718 | 740 | pyfilters, |
|
719 | 741 | inutilpats, |
|
720 | 742 | ), |
|
721 | 743 | ('txt', r'.*\.txt$', '', txtfilters, txtpats), |
|
722 | 744 | ( |
|
723 | 745 | 'web template', |
|
724 | 746 | r'mercurial/templates/.*\.tmpl', |
|
725 | 747 | '', |
|
726 | 748 | webtemplatefilters, |
|
727 | 749 | webtemplatepats, |
|
728 | 750 | ), |
|
729 | 751 | ('all except for .po', r'.*(?<!\.po)$', '', allfilesfilters, allfilespats), |
|
730 | 752 | ] |
|
731 | 753 | |
|
732 | 754 | # (desc, |
|
733 | 755 | # func to pick up embedded code fragments, |
|
734 | 756 | # list of patterns to convert target files |
|
735 | 757 | # list of patterns to detect errors/warnings) |
|
736 | 758 | embeddedchecks = [ |
|
737 | 759 | ( |
|
738 | 760 | 'embedded python', |
|
739 | 761 | testparseutil.pyembedded, |
|
740 | 762 | embeddedpyfilters, |
|
741 | 763 | embeddedpypats, |
|
742 | 764 | ) |
|
743 | 765 | ] |
|
744 | 766 | |
|
745 | 767 | |
|
746 | 768 | def _preparepats(): |
|
747 | 769 | def preparefailandwarn(failandwarn): |
|
748 | 770 | for pats in failandwarn: |
|
749 | 771 | for i, pseq in enumerate(pats): |
|
750 | 772 | # fix-up regexes for multi-line searches |
|
751 | 773 | p = pseq[0] |
|
752 | 774 | # \s doesn't match \n (done in two steps) |
|
753 | 775 | # first, we replace \s that appears in a set already |
|
754 | 776 | p = re.sub(r'\[\\s', r'[ \\t', p) |
|
755 | 777 | # now we replace other \s instances. |
|
756 | 778 | p = re.sub(r'(?<!(\\|\[))\\s', r'[ \\t]', p) |
|
757 | 779 | # [^...] doesn't match newline |
|
758 | 780 | p = re.sub(r'(?<!\\)\[\^', r'[^\\n', p) |
|
759 | 781 | |
|
760 | 782 | pats[i] = (re.compile(p, re.MULTILINE),) + pseq[1:] |
|
761 | 783 | |
|
762 | 784 | def preparefilters(filters): |
|
763 | 785 | for i, flt in enumerate(filters): |
|
764 | 786 | filters[i] = re.compile(flt[0]), flt[1] |
|
765 | 787 | |
|
766 | 788 | for cs in (checks, embeddedchecks): |
|
767 | 789 | for c in cs: |
|
768 | 790 | failandwarn = c[-1] |
|
769 | 791 | preparefailandwarn(failandwarn) |
|
770 | 792 | |
|
771 | 793 | filters = c[-2] |
|
772 | 794 | preparefilters(filters) |
|
773 | 795 | |
|
774 | 796 | |
|
775 | 797 | class norepeatlogger(object): |
|
776 | 798 | def __init__(self): |
|
777 | 799 | self._lastseen = None |
|
778 | 800 | |
|
779 | 801 | def log(self, fname, lineno, line, msg, blame): |
|
780 | 802 | """print error related a to given line of a given file. |
|
781 | 803 | |
|
782 | 804 | The faulty line will also be printed but only once in the case |
|
783 | 805 | of multiple errors. |
|
784 | 806 | |
|
785 | 807 | :fname: filename |
|
786 | 808 | :lineno: line number |
|
787 | 809 | :line: actual content of the line |
|
788 | 810 | :msg: error message |
|
789 | 811 | """ |
|
790 | 812 | msgid = fname, lineno, line |
|
791 | 813 | if msgid != self._lastseen: |
|
792 | 814 | if blame: |
|
793 | 815 | print("%s:%d (%s):" % (fname, lineno, blame)) |
|
794 | 816 | else: |
|
795 | 817 | print("%s:%d:" % (fname, lineno)) |
|
796 | 818 | print(" > %s" % line) |
|
797 | 819 | self._lastseen = msgid |
|
798 | 820 | print(" " + msg) |
|
799 | 821 | |
|
800 | 822 | |
|
801 | 823 | _defaultlogger = norepeatlogger() |
|
802 | 824 | |
|
803 | 825 | |
|
804 | 826 | def getblame(f): |
|
805 | 827 | lines = [] |
|
806 | 828 | for l in os.popen('hg annotate -un %s' % f): |
|
807 | 829 | start, line = l.split(':', 1) |
|
808 | 830 | user, rev = start.split() |
|
809 | 831 | lines.append((line[1:-1], user, rev)) |
|
810 | 832 | return lines |
|
811 | 833 | |
|
812 | 834 | |
|
813 | 835 | def checkfile( |
|
814 | 836 | f, |
|
815 | 837 | logfunc=_defaultlogger.log, |
|
816 | 838 | maxerr=None, |
|
817 | 839 | warnings=False, |
|
818 | 840 | blame=False, |
|
819 | 841 | debug=False, |
|
820 | 842 | lineno=True, |
|
821 | 843 | ): |
|
822 | 844 | """checks style and portability of a given file |
|
823 | 845 | |
|
824 | 846 | :f: filepath |
|
825 | 847 | :logfunc: function used to report error |
|
826 | 848 | logfunc(filename, linenumber, linecontent, errormessage) |
|
827 | 849 | :maxerr: number of error to display before aborting. |
|
828 | 850 | Set to false (default) to report all errors |
|
829 | 851 | |
|
830 | 852 | return True if no error is found, False otherwise. |
|
831 | 853 | """ |
|
832 | 854 | result = True |
|
833 | 855 | |
|
834 | 856 | try: |
|
835 | 857 | with opentext(f) as fp: |
|
836 | 858 | try: |
|
837 | 859 | pre = fp.read() |
|
838 | 860 | except UnicodeDecodeError as e: |
|
839 | 861 | print("%s while reading %s" % (e, f)) |
|
840 | 862 | return result |
|
841 | 863 | except IOError as e: |
|
842 | 864 | print("Skipping %s, %s" % (f, str(e).split(':', 1)[0])) |
|
843 | 865 | return result |
|
844 | 866 | |
|
845 | 867 | # context information shared while single checkfile() invocation |
|
846 | 868 | context = {'blamecache': None} |
|
847 | 869 | |
|
848 | 870 | for name, match, magic, filters, pats in checks: |
|
849 | 871 | if debug: |
|
850 | 872 | print(name, f) |
|
851 | 873 | if not (re.match(match, f) or (magic and re.search(magic, pre))): |
|
852 | 874 | if debug: |
|
853 | 875 | print( |
|
854 | 876 | "Skipping %s for %s it doesn't match %s" % (name, match, f) |
|
855 | 877 | ) |
|
856 | 878 | continue |
|
857 | 879 | if "no-" "check-code" in pre: |
|
858 | 880 | # If you're looking at this line, it's because a file has: |
|
859 | 881 | # no- check- code |
|
860 | 882 | # but the reason to output skipping is to make life for |
|
861 | 883 | # tests easier. So, instead of writing it with a normal |
|
862 | 884 | # spelling, we write it with the expected spelling from |
|
863 | 885 | # tests/test-check-code.t |
|
864 | 886 | print("Skipping %s it has no-che?k-code (glob)" % f) |
|
865 | 887 | return "Skip" # skip checking this file |
|
866 | 888 | |
|
867 | 889 | fc = _checkfiledata( |
|
868 | 890 | name, |
|
869 | 891 | f, |
|
870 | 892 | pre, |
|
871 | 893 | filters, |
|
872 | 894 | pats, |
|
873 | 895 | context, |
|
874 | 896 | logfunc, |
|
875 | 897 | maxerr, |
|
876 | 898 | warnings, |
|
877 | 899 | blame, |
|
878 | 900 | debug, |
|
879 | 901 | lineno, |
|
880 | 902 | ) |
|
881 | 903 | if fc: |
|
882 | 904 | result = False |
|
883 | 905 | |
|
884 | 906 | if f.endswith('.t') and "no-" "check-code" not in pre: |
|
885 | 907 | if debug: |
|
886 | 908 | print("Checking embedded code in %s" % f) |
|
887 | 909 | |
|
888 | 910 | prelines = pre.splitlines() |
|
889 | 911 | embeddederros = [] |
|
890 | 912 | for name, embedded, filters, pats in embeddedchecks: |
|
891 | 913 | # "reset curmax at each repetition" treats maxerr as "max |
|
892 | 914 | # nubmer of errors in an actual file per entry of |
|
893 | 915 | # (embedded)checks" |
|
894 | 916 | curmaxerr = maxerr |
|
895 | 917 | |
|
896 | 918 | for found in embedded(f, prelines, embeddederros): |
|
897 | 919 | filename, starts, ends, code = found |
|
898 | 920 | fc = _checkfiledata( |
|
899 | 921 | name, |
|
900 | 922 | f, |
|
901 | 923 | code, |
|
902 | 924 | filters, |
|
903 | 925 | pats, |
|
904 | 926 | context, |
|
905 | 927 | logfunc, |
|
906 | 928 | curmaxerr, |
|
907 | 929 | warnings, |
|
908 | 930 | blame, |
|
909 | 931 | debug, |
|
910 | 932 | lineno, |
|
911 | 933 | offset=starts - 1, |
|
912 | 934 | ) |
|
913 | 935 | if fc: |
|
914 | 936 | result = False |
|
915 | 937 | if curmaxerr: |
|
916 | 938 | if fc >= curmaxerr: |
|
917 | 939 | break |
|
918 | 940 | curmaxerr -= fc |
|
919 | 941 | |
|
920 | 942 | return result |
|
921 | 943 | |
|
922 | 944 | |
|
923 | 945 | def _checkfiledata( |
|
924 | 946 | name, |
|
925 | 947 | f, |
|
926 | 948 | filedata, |
|
927 | 949 | filters, |
|
928 | 950 | pats, |
|
929 | 951 | context, |
|
930 | 952 | logfunc, |
|
931 | 953 | maxerr, |
|
932 | 954 | warnings, |
|
933 | 955 | blame, |
|
934 | 956 | debug, |
|
935 | 957 | lineno, |
|
936 | 958 | offset=None, |
|
937 | 959 | ): |
|
938 | 960 | """Execute actual error check for file data |
|
939 | 961 | |
|
940 | 962 | :name: of the checking category |
|
941 | 963 | :f: filepath |
|
942 | 964 | :filedata: content of a file |
|
943 | 965 | :filters: to be applied before checking |
|
944 | 966 | :pats: to detect errors |
|
945 | 967 | :context: a dict of information shared while single checkfile() invocation |
|
946 | 968 | Valid keys: 'blamecache'. |
|
947 | 969 | :logfunc: function used to report error |
|
948 | 970 | logfunc(filename, linenumber, linecontent, errormessage) |
|
949 | 971 | :maxerr: number of error to display before aborting, or False to |
|
950 | 972 | report all errors |
|
951 | 973 | :warnings: whether warning level checks should be applied |
|
952 | 974 | :blame: whether blame information should be displayed at error reporting |
|
953 | 975 | :debug: whether debug information should be displayed |
|
954 | 976 | :lineno: whether lineno should be displayed at error reporting |
|
955 | 977 | :offset: line number offset of 'filedata' in 'f' for checking |
|
956 | 978 | an embedded code fragment, or None (offset=0 is different |
|
957 | 979 | from offset=None) |
|
958 | 980 | |
|
959 | 981 | returns number of detected errors. |
|
960 | 982 | """ |
|
961 | 983 | blamecache = context['blamecache'] |
|
962 | 984 | if offset is None: |
|
963 | 985 | lineoffset = 0 |
|
964 | 986 | else: |
|
965 | 987 | lineoffset = offset |
|
966 | 988 | |
|
967 | 989 | fc = 0 |
|
968 | 990 | pre = post = filedata |
|
969 | 991 | |
|
970 | 992 | if True: # TODO: get rid of this redundant 'if' block |
|
971 | 993 | for p, r in filters: |
|
972 | 994 | post = re.sub(p, r, post) |
|
973 | 995 | nerrs = len(pats[0]) # nerr elements are errors |
|
974 | 996 | if warnings: |
|
975 | 997 | pats = pats[0] + pats[1] |
|
976 | 998 | else: |
|
977 | 999 | pats = pats[0] |
|
978 | 1000 | # print post # uncomment to show filtered version |
|
979 | 1001 | |
|
980 | 1002 | if debug: |
|
981 | 1003 | print("Checking %s for %s" % (name, f)) |
|
982 | 1004 | |
|
983 | 1005 | prelines = None |
|
984 | 1006 | errors = [] |
|
985 | 1007 | for i, pat in enumerate(pats): |
|
986 | 1008 | if len(pat) == 3: |
|
987 | 1009 | p, msg, ignore = pat |
|
988 | 1010 | else: |
|
989 | 1011 | p, msg = pat |
|
990 | 1012 | ignore = None |
|
991 | 1013 | if i >= nerrs: |
|
992 | 1014 | msg = "warning: " + msg |
|
993 | 1015 | |
|
994 | 1016 | pos = 0 |
|
995 | 1017 | n = 0 |
|
996 | 1018 | for m in p.finditer(post): |
|
997 | 1019 | if prelines is None: |
|
998 | 1020 | prelines = pre.splitlines() |
|
999 | 1021 | postlines = post.splitlines(True) |
|
1000 | 1022 | |
|
1001 | 1023 | start = m.start() |
|
1002 | 1024 | while n < len(postlines): |
|
1003 | 1025 | step = len(postlines[n]) |
|
1004 | 1026 | if pos + step > start: |
|
1005 | 1027 | break |
|
1006 | 1028 | pos += step |
|
1007 | 1029 | n += 1 |
|
1008 | 1030 | l = prelines[n] |
|
1009 | 1031 | |
|
1010 | 1032 | if ignore and re.search(ignore, l, re.MULTILINE): |
|
1011 | 1033 | if debug: |
|
1012 | 1034 | print( |
|
1013 | 1035 | "Skipping %s for %s:%s (ignore pattern)" |
|
1014 | 1036 | % (name, f, (n + lineoffset)) |
|
1015 | 1037 | ) |
|
1016 | 1038 | continue |
|
1017 | 1039 | bd = "" |
|
1018 | 1040 | if blame: |
|
1019 | 1041 | bd = 'working directory' |
|
1020 | 1042 | if blamecache is None: |
|
1021 | 1043 | blamecache = getblame(f) |
|
1022 | 1044 | context['blamecache'] = blamecache |
|
1023 | 1045 | if (n + lineoffset) < len(blamecache): |
|
1024 | 1046 | bl, bu, br = blamecache[(n + lineoffset)] |
|
1025 | 1047 | if offset is None and bl == l: |
|
1026 | 1048 | bd = '%s@%s' % (bu, br) |
|
1027 | 1049 | elif offset is not None and bl.endswith(l): |
|
1028 | 1050 | # "offset is not None" means "checking |
|
1029 | 1051 | # embedded code fragment". In this case, |
|
1030 | 1052 | # "l" does not have information about the |
|
1031 | 1053 | # beginning of an *original* line in the |
|
1032 | 1054 | # file (e.g. ' > '). |
|
1033 | 1055 | # Therefore, use "str.endswith()", and |
|
1034 | 1056 | # show "maybe" for a little loose |
|
1035 | 1057 | # examination. |
|
1036 | 1058 | bd = '%s@%s, maybe' % (bu, br) |
|
1037 | 1059 | |
|
1038 | 1060 | errors.append((f, lineno and (n + lineoffset + 1), l, msg, bd)) |
|
1039 | 1061 | |
|
1040 | 1062 | errors.sort() |
|
1041 | 1063 | for e in errors: |
|
1042 | 1064 | logfunc(*e) |
|
1043 | 1065 | fc += 1 |
|
1044 | 1066 | if maxerr and fc >= maxerr: |
|
1045 | 1067 | print(" (too many errors, giving up)") |
|
1046 | 1068 | break |
|
1047 | 1069 | |
|
1048 | 1070 | return fc |
|
1049 | 1071 | |
|
1050 | 1072 | |
|
1051 | 1073 | def main(): |
|
1052 | 1074 | parser = optparse.OptionParser("%prog [options] [files | -]") |
|
1053 | 1075 | parser.add_option( |
|
1054 | 1076 | "-w", |
|
1055 | 1077 | "--warnings", |
|
1056 | 1078 | action="store_true", |
|
1057 | 1079 | help="include warning-level checks", |
|
1058 | 1080 | ) |
|
1059 | 1081 | parser.add_option( |
|
1060 | 1082 | "-p", "--per-file", type="int", help="max warnings per file" |
|
1061 | 1083 | ) |
|
1062 | 1084 | parser.add_option( |
|
1063 | 1085 | "-b", |
|
1064 | 1086 | "--blame", |
|
1065 | 1087 | action="store_true", |
|
1066 | 1088 | help="use annotate to generate blame info", |
|
1067 | 1089 | ) |
|
1068 | 1090 | parser.add_option( |
|
1069 | 1091 | "", "--debug", action="store_true", help="show debug information" |
|
1070 | 1092 | ) |
|
1071 | 1093 | parser.add_option( |
|
1072 | 1094 | "", |
|
1073 | 1095 | "--nolineno", |
|
1074 | 1096 | action="store_false", |
|
1075 | 1097 | dest='lineno', |
|
1076 | 1098 | help="don't show line numbers", |
|
1077 | 1099 | ) |
|
1078 | 1100 | |
|
1079 | 1101 | parser.set_defaults( |
|
1080 | 1102 | per_file=15, warnings=False, blame=False, debug=False, lineno=True |
|
1081 | 1103 | ) |
|
1082 | 1104 | (options, args) = parser.parse_args() |
|
1083 | 1105 | |
|
1084 | 1106 | if len(args) == 0: |
|
1085 | 1107 | check = glob.glob("*") |
|
1086 | 1108 | elif args == ['-']: |
|
1087 | 1109 | # read file list from stdin |
|
1088 | 1110 | check = sys.stdin.read().splitlines() |
|
1089 | 1111 | else: |
|
1090 | 1112 | check = args |
|
1091 | 1113 | |
|
1092 | 1114 | _preparepats() |
|
1093 | 1115 | |
|
1094 | 1116 | ret = 0 |
|
1095 | 1117 | for f in check: |
|
1096 | 1118 | if not checkfile( |
|
1097 | 1119 | f, |
|
1098 | 1120 | maxerr=options.per_file, |
|
1099 | 1121 | warnings=options.warnings, |
|
1100 | 1122 | blame=options.blame, |
|
1101 | 1123 | debug=options.debug, |
|
1102 | 1124 | lineno=options.lineno, |
|
1103 | 1125 | ): |
|
1104 | 1126 | ret = 1 |
|
1105 | 1127 | return ret |
|
1106 | 1128 | |
|
1107 | 1129 | |
|
1108 | 1130 | if __name__ == "__main__": |
|
1109 | 1131 | sys.exit(main()) |
@@ -1,551 +1,546 b'' | |||
|
1 | 1 | /* |
|
2 | 2 | * A fast client for Mercurial command server |
|
3 | 3 | * |
|
4 | 4 | * Copyright (c) 2011 Yuya Nishihara <yuya@tcha.org> |
|
5 | 5 | * |
|
6 | 6 | * This software may be used and distributed according to the terms of the |
|
7 | 7 | * GNU General Public License version 2 or any later version. |
|
8 | 8 | */ |
|
9 | 9 | |
|
10 | 10 | #include <assert.h> |
|
11 | 11 | #include <dirent.h> |
|
12 | 12 | #include <errno.h> |
|
13 | 13 | #include <fcntl.h> |
|
14 | 14 | #include <signal.h> |
|
15 | 15 | #include <stdio.h> |
|
16 | 16 | #include <stdlib.h> |
|
17 | 17 | #include <string.h> |
|
18 | 18 | #include <sys/file.h> |
|
19 | 19 | #include <sys/stat.h> |
|
20 | 20 | #include <sys/types.h> |
|
21 | 21 | #include <sys/un.h> |
|
22 | 22 | #include <sys/wait.h> |
|
23 | 23 | #include <time.h> |
|
24 | 24 | #include <unistd.h> |
|
25 | 25 | |
|
26 | 26 | #include "hgclient.h" |
|
27 | 27 | #include "procutil.h" |
|
28 | 28 | #include "util.h" |
|
29 | 29 | |
|
30 | 30 | #ifndef PATH_MAX |
|
31 | 31 | #define PATH_MAX 4096 |
|
32 | 32 | #endif |
|
33 | 33 | |
|
34 | 34 | struct cmdserveropts { |
|
35 | 35 | char sockname[PATH_MAX]; |
|
36 | 36 | char initsockname[PATH_MAX]; |
|
37 | 37 | char redirectsockname[PATH_MAX]; |
|
38 | 38 | size_t argsize; |
|
39 | 39 | const char **args; |
|
40 | 40 | }; |
|
41 | 41 | |
|
42 | 42 | static void initcmdserveropts(struct cmdserveropts *opts) |
|
43 | 43 | { |
|
44 | 44 | memset(opts, 0, sizeof(struct cmdserveropts)); |
|
45 | 45 | } |
|
46 | 46 | |
|
47 | 47 | static void freecmdserveropts(struct cmdserveropts *opts) |
|
48 | 48 | { |
|
49 | 49 | free(opts->args); |
|
50 | 50 | opts->args = NULL; |
|
51 | 51 | opts->argsize = 0; |
|
52 | 52 | } |
|
53 | 53 | |
|
54 | 54 | /* |
|
55 | 55 | * Test if an argument is a sensitive flag that should be passed to the server. |
|
56 | 56 | * Return 0 if not, otherwise the number of arguments starting from the current |
|
57 | 57 | * one that should be passed to the server. |
|
58 | 58 | */ |
|
59 | 59 | static size_t testsensitiveflag(const char *arg) |
|
60 | 60 | { |
|
61 | 61 | static const struct { |
|
62 | 62 | const char *name; |
|
63 | 63 | size_t narg; |
|
64 | 64 | } flags[] = { |
|
65 | 65 | {"--config", 1}, {"--cwd", 1}, {"--repo", 1}, |
|
66 | 66 | {"--repository", 1}, {"--traceback", 0}, {"-R", 1}, |
|
67 | 67 | }; |
|
68 | 68 | size_t i; |
|
69 | 69 | for (i = 0; i < sizeof(flags) / sizeof(flags[0]); ++i) { |
|
70 | 70 | size_t len = strlen(flags[i].name); |
|
71 | 71 | size_t narg = flags[i].narg; |
|
72 | 72 | if (memcmp(arg, flags[i].name, len) == 0) { |
|
73 | 73 | if (arg[len] == '\0') { |
|
74 | 74 | /* --flag (value) */ |
|
75 | 75 | return narg + 1; |
|
76 | 76 | } else if (arg[len] == '=' && narg > 0) { |
|
77 | 77 | /* --flag=value */ |
|
78 | 78 | return 1; |
|
79 | 79 | } else if (flags[i].name[1] != '-') { |
|
80 | 80 | /* short flag */ |
|
81 | 81 | return 1; |
|
82 | 82 | } |
|
83 | 83 | } |
|
84 | 84 | } |
|
85 | 85 | return 0; |
|
86 | 86 | } |
|
87 | 87 | |
|
88 | 88 | /* |
|
89 | 89 | * Parse argv[] and put sensitive flags to opts->args |
|
90 | 90 | */ |
|
91 | 91 | static void setcmdserverargs(struct cmdserveropts *opts, int argc, |
|
92 | 92 | const char *argv[]) |
|
93 | 93 | { |
|
94 | 94 | size_t i, step; |
|
95 | 95 | opts->argsize = 0; |
|
96 | 96 | for (i = 0, step = 1; i < (size_t)argc; i += step, step = 1) { |
|
97 | 97 | if (!argv[i]) |
|
98 | 98 | continue; /* pass clang-analyse */ |
|
99 | 99 | if (strcmp(argv[i], "--") == 0) |
|
100 | 100 | break; |
|
101 | 101 | size_t n = testsensitiveflag(argv[i]); |
|
102 | 102 | if (n == 0 || i + n > (size_t)argc) |
|
103 | 103 | continue; |
|
104 | 104 | opts->args = |
|
105 | 105 | reallocx(opts->args, (n + opts->argsize) * sizeof(char *)); |
|
106 | 106 | memcpy(opts->args + opts->argsize, argv + i, |
|
107 | 107 | sizeof(char *) * n); |
|
108 | 108 | opts->argsize += n; |
|
109 | 109 | step = n; |
|
110 | 110 | } |
|
111 | 111 | } |
|
112 | 112 | |
|
113 | 113 | static void preparesockdir(const char *sockdir) |
|
114 | 114 | { |
|
115 | 115 | int r; |
|
116 | 116 | r = mkdir(sockdir, 0700); |
|
117 | 117 | if (r < 0 && errno != EEXIST) |
|
118 | 118 | abortmsgerrno("cannot create sockdir %s", sockdir); |
|
119 | 119 | |
|
120 | 120 | struct stat st; |
|
121 | 121 | r = lstat(sockdir, &st); |
|
122 | 122 | if (r < 0) |
|
123 | 123 | abortmsgerrno("cannot stat %s", sockdir); |
|
124 | 124 | if (!S_ISDIR(st.st_mode)) |
|
125 | 125 | abortmsg("cannot create sockdir %s (file exists)", sockdir); |
|
126 | 126 | if (st.st_uid != geteuid() || st.st_mode & 0077) |
|
127 | 127 | abortmsg("insecure sockdir %s", sockdir); |
|
128 | 128 | } |
|
129 | 129 | |
|
130 | 130 | /* |
|
131 | 131 | * Check if a socket directory exists and is only owned by the current user. |
|
132 | 132 | * Return 1 if so, 0 if not. This is used to check if XDG_RUNTIME_DIR can be |
|
133 | 133 | * used or not. According to the specification [1], XDG_RUNTIME_DIR should be |
|
134 | 134 | * ignored if the directory is not owned by the user with mode 0700. |
|
135 | 135 | * [1]: https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html |
|
136 | 136 | */ |
|
137 | 137 | static int checkruntimedir(const char *sockdir) |
|
138 | 138 | { |
|
139 | 139 | struct stat st; |
|
140 | 140 | int r = lstat(sockdir, &st); |
|
141 | 141 | if (r < 0) /* ex. does not exist */ |
|
142 | 142 | return 0; |
|
143 | 143 | if (!S_ISDIR(st.st_mode)) /* ex. is a file, not a directory */ |
|
144 | 144 | return 0; |
|
145 | 145 | return st.st_uid == geteuid() && (st.st_mode & 0777) == 0700; |
|
146 | 146 | } |
|
147 | 147 | |
|
148 | 148 | static void getdefaultsockdir(char sockdir[], size_t size) |
|
149 | 149 | { |
|
150 | 150 | /* by default, put socket file in secure directory |
|
151 | 151 | * (${XDG_RUNTIME_DIR}/chg, or /${TMPDIR:-tmp}/chg$UID) |
|
152 | 152 | * (permission of socket file may be ignored on some Unices) */ |
|
153 | 153 | const char *runtimedir = getenv("XDG_RUNTIME_DIR"); |
|
154 | 154 | int r; |
|
155 | 155 | if (runtimedir && checkruntimedir(runtimedir)) { |
|
156 | 156 | r = snprintf(sockdir, size, "%s/chg", runtimedir); |
|
157 | 157 | } else { |
|
158 | 158 | const char *tmpdir = getenv("TMPDIR"); |
|
159 | 159 | if (!tmpdir) |
|
160 | 160 | tmpdir = "/tmp"; |
|
161 | 161 | r = snprintf(sockdir, size, "%s/chg%d", tmpdir, geteuid()); |
|
162 | 162 | } |
|
163 | 163 | if (r < 0 || (size_t)r >= size) |
|
164 | 164 | abortmsg("too long TMPDIR (r = %d)", r); |
|
165 | 165 | } |
|
166 | 166 | |
|
167 | 167 | static void setcmdserveropts(struct cmdserveropts *opts) |
|
168 | 168 | { |
|
169 | 169 | int r; |
|
170 | 170 | char sockdir[PATH_MAX]; |
|
171 | 171 | const char *envsockname = getenv("CHGSOCKNAME"); |
|
172 | 172 | if (!envsockname) { |
|
173 | 173 | getdefaultsockdir(sockdir, sizeof(sockdir)); |
|
174 | 174 | preparesockdir(sockdir); |
|
175 | 175 | } |
|
176 | 176 | |
|
177 | 177 | const char *basename = (envsockname) ? envsockname : sockdir; |
|
178 | 178 | const char *sockfmt = (envsockname) ? "%s" : "%s/server"; |
|
179 | 179 | r = snprintf(opts->sockname, sizeof(opts->sockname), sockfmt, basename); |
|
180 | 180 | if (r < 0 || (size_t)r >= sizeof(opts->sockname)) |
|
181 | 181 | abortmsg("too long TMPDIR or CHGSOCKNAME (r = %d)", r); |
|
182 | 182 | r = snprintf(opts->initsockname, sizeof(opts->initsockname), "%s.%u", |
|
183 | 183 | opts->sockname, (unsigned)getpid()); |
|
184 | 184 | if (r < 0 || (size_t)r >= sizeof(opts->initsockname)) |
|
185 | 185 | abortmsg("too long TMPDIR or CHGSOCKNAME (r = %d)", r); |
|
186 | 186 | } |
|
187 | 187 | |
|
188 | 188 | /* If the current program is, say, /a/b/c/chg, returns /a/b/c/hg. */ |
|
189 | 189 | static char *getrelhgcmd(void) |
|
190 | 190 | { |
|
191 | 191 | ssize_t n; |
|
192 | 192 | char *res, *slash; |
|
193 | 193 | int maxsize = 4096; |
|
194 | 194 | res = malloc(maxsize); |
|
195 | 195 | if (res == NULL) |
|
196 | 196 | goto cleanup; |
|
197 | 197 | n = readlink("/proc/self/exe", res, maxsize); |
|
198 | 198 | if (n < 0 || n >= maxsize) |
|
199 | 199 | goto cleanup; |
|
200 | 200 | res[n] = '\0'; |
|
201 | 201 | slash = strrchr(res, '/'); |
|
202 | 202 | if (slash == NULL) |
|
203 | 203 | goto cleanup; |
|
204 | 204 | /* 4 is strlen("/hg") + nul byte */ |
|
205 | 205 | if (slash + 4 >= res + maxsize) |
|
206 | 206 | goto cleanup; |
|
207 | 207 | memcpy(slash, "/hg", 4); |
|
208 | 208 | return res; |
|
209 | 209 | cleanup: |
|
210 | 210 | free(res); |
|
211 | 211 | return NULL; |
|
212 | 212 | } |
|
213 | 213 | |
|
214 | 214 | static const char *gethgcmd(void) |
|
215 | 215 | { |
|
216 | 216 | static const char *hgcmd = NULL; |
|
217 | 217 | #ifdef HGPATHREL |
|
218 | 218 | int tryrelhgcmd = 1; |
|
219 | 219 | #else |
|
220 | 220 | int tryrelhgcmd = 0; |
|
221 | 221 | #endif |
|
222 | 222 | if (!hgcmd) { |
|
223 | 223 | hgcmd = getenv("CHGHG"); |
|
224 | 224 | if (!hgcmd || hgcmd[0] == '\0') |
|
225 | 225 | hgcmd = getenv("HG"); |
|
226 | 226 | if (tryrelhgcmd && (!hgcmd || hgcmd[0] == '\0')) |
|
227 | 227 | hgcmd = getrelhgcmd(); |
|
228 | 228 | if (!hgcmd || hgcmd[0] == '\0') |
|
229 | 229 | #ifdef HGPATH |
|
230 | 230 | hgcmd = (HGPATH); |
|
231 | 231 | #else |
|
232 | 232 | hgcmd = "hg"; |
|
233 | 233 | #endif |
|
234 | 234 | } |
|
235 | 235 | return hgcmd; |
|
236 | 236 | } |
|
237 | 237 | |
|
238 | 238 | static void execcmdserver(const struct cmdserveropts *opts) |
|
239 | 239 | { |
|
240 | 240 | const char *hgcmd = gethgcmd(); |
|
241 | 241 | |
|
242 | 242 | const char *baseargv[] = { |
|
243 | hgcmd, | |
|
244 | "serve", | |
|
245 | "--cmdserver", | |
|
246 | "chgunix", | |
|
247 | "--address", | |
|
248 | opts->initsockname, | |
|
249 | "--daemon-postexec", | |
|
243 | hgcmd, "serve", "--no-profile", "--cmdserver", | |
|
244 | "chgunix", "--address", opts->initsockname, "--daemon-postexec", | |
|
250 | 245 | "chdir:/", |
|
251 | 246 | }; |
|
252 | 247 | size_t baseargvsize = sizeof(baseargv) / sizeof(baseargv[0]); |
|
253 | 248 | size_t argsize = baseargvsize + opts->argsize + 1; |
|
254 | 249 | |
|
255 | 250 | const char **argv = mallocx(sizeof(char *) * argsize); |
|
256 | 251 | memcpy(argv, baseargv, sizeof(baseargv)); |
|
257 | 252 | if (opts->args) { |
|
258 | 253 | size_t size = sizeof(char *) * opts->argsize; |
|
259 | 254 | memcpy(argv + baseargvsize, opts->args, size); |
|
260 | 255 | } |
|
261 | 256 | argv[argsize - 1] = NULL; |
|
262 | 257 | |
|
263 | 258 | const char *lc_ctype_env = getenv("LC_CTYPE"); |
|
264 | 259 | if (lc_ctype_env == NULL) { |
|
265 | 260 | if (putenv("CHG_CLEAR_LC_CTYPE=") != 0) |
|
266 | 261 | abortmsgerrno("failed to putenv CHG_CLEAR_LC_CTYPE"); |
|
267 | 262 | } else { |
|
268 | 263 | if (setenv("CHGORIG_LC_CTYPE", lc_ctype_env, 1) != 0) { |
|
269 | 264 | abortmsgerrno("failed to setenv CHGORIG_LC_CTYPE"); |
|
270 | 265 | } |
|
271 | 266 | } |
|
272 | 267 | |
|
273 | 268 | /* close any open files to avoid hanging locks */ |
|
274 | 269 | DIR *dp = opendir("/proc/self/fd"); |
|
275 | 270 | if (dp != NULL) { |
|
276 | 271 | debugmsg("closing files based on /proc contents"); |
|
277 | 272 | struct dirent *de; |
|
278 | 273 | while ((de = readdir(dp))) { |
|
279 | 274 | errno = 0; |
|
280 | 275 | char *end; |
|
281 | 276 | long fd_value = strtol(de->d_name, &end, 10); |
|
282 | 277 | if (end == de->d_name) { |
|
283 | 278 | /* unable to convert to int (. or ..) */ |
|
284 | 279 | continue; |
|
285 | 280 | } |
|
286 | 281 | if (errno == ERANGE) { |
|
287 | 282 | debugmsg("tried to parse %s, but range error " |
|
288 | 283 | "occurred", |
|
289 | 284 | de->d_name); |
|
290 | 285 | continue; |
|
291 | 286 | } |
|
292 | 287 | if (fd_value > STDERR_FILENO && fd_value != dirfd(dp)) { |
|
293 | 288 | debugmsg("closing fd %ld", fd_value); |
|
294 | 289 | int res = close(fd_value); |
|
295 | 290 | if (res) { |
|
296 | 291 | debugmsg("tried to close fd %ld: %d " |
|
297 | 292 | "(errno: %d)", |
|
298 | 293 | fd_value, res, errno); |
|
299 | 294 | } |
|
300 | 295 | } |
|
301 | 296 | } |
|
302 | 297 | closedir(dp); |
|
303 | 298 | } |
|
304 | 299 | |
|
305 | 300 | if (putenv("CHGINTERNALMARK=") != 0) |
|
306 | 301 | abortmsgerrno("failed to putenv"); |
|
307 | 302 | if (execvp(hgcmd, (char **)argv) < 0) |
|
308 | 303 | abortmsgerrno("failed to exec cmdserver"); |
|
309 | 304 | free(argv); |
|
310 | 305 | } |
|
311 | 306 | |
|
312 | 307 | /* Retry until we can connect to the server. Give up after some time. */ |
|
313 | 308 | static hgclient_t *retryconnectcmdserver(struct cmdserveropts *opts, pid_t pid) |
|
314 | 309 | { |
|
315 | 310 | static const struct timespec sleepreq = {0, 10 * 1000000}; |
|
316 | 311 | int pst = 0; |
|
317 | 312 | |
|
318 | 313 | debugmsg("try connect to %s repeatedly", opts->initsockname); |
|
319 | 314 | |
|
320 | 315 | unsigned int timeoutsec = 60; /* default: 60 seconds */ |
|
321 | 316 | const char *timeoutenv = getenv("CHGTIMEOUT"); |
|
322 | 317 | if (timeoutenv) |
|
323 | 318 | sscanf(timeoutenv, "%u", &timeoutsec); |
|
324 | 319 | |
|
325 | 320 | for (unsigned int i = 0; !timeoutsec || i < timeoutsec * 100; i++) { |
|
326 | 321 | hgclient_t *hgc = hgc_open(opts->initsockname); |
|
327 | 322 | if (hgc) { |
|
328 | 323 | debugmsg("rename %s to %s", opts->initsockname, |
|
329 | 324 | opts->sockname); |
|
330 | 325 | int r = rename(opts->initsockname, opts->sockname); |
|
331 | 326 | if (r != 0) |
|
332 | 327 | abortmsgerrno("cannot rename"); |
|
333 | 328 | return hgc; |
|
334 | 329 | } |
|
335 | 330 | |
|
336 | 331 | if (pid > 0) { |
|
337 | 332 | /* collect zombie if child process fails to start */ |
|
338 | 333 | int r = waitpid(pid, &pst, WNOHANG); |
|
339 | 334 | if (r != 0) |
|
340 | 335 | goto cleanup; |
|
341 | 336 | } |
|
342 | 337 | |
|
343 | 338 | nanosleep(&sleepreq, NULL); |
|
344 | 339 | } |
|
345 | 340 | |
|
346 | 341 | abortmsg("timed out waiting for cmdserver %s", opts->initsockname); |
|
347 | 342 | return NULL; |
|
348 | 343 | |
|
349 | 344 | cleanup: |
|
350 | 345 | if (WIFEXITED(pst)) { |
|
351 | 346 | if (WEXITSTATUS(pst) == 0) |
|
352 | 347 | abortmsg("could not connect to cmdserver " |
|
353 | 348 | "(exited with status 0)"); |
|
354 | 349 | debugmsg("cmdserver exited with status %d", WEXITSTATUS(pst)); |
|
355 | 350 | exit(WEXITSTATUS(pst)); |
|
356 | 351 | } else if (WIFSIGNALED(pst)) { |
|
357 | 352 | abortmsg("cmdserver killed by signal %d", WTERMSIG(pst)); |
|
358 | 353 | } else { |
|
359 | 354 | abortmsg("error while waiting for cmdserver"); |
|
360 | 355 | } |
|
361 | 356 | return NULL; |
|
362 | 357 | } |
|
363 | 358 | |
|
364 | 359 | /* Connect to a cmdserver. Will start a new server on demand. */ |
|
365 | 360 | static hgclient_t *connectcmdserver(struct cmdserveropts *opts) |
|
366 | 361 | { |
|
367 | 362 | const char *sockname = |
|
368 | 363 | opts->redirectsockname[0] ? opts->redirectsockname : opts->sockname; |
|
369 | 364 | debugmsg("try connect to %s", sockname); |
|
370 | 365 | hgclient_t *hgc = hgc_open(sockname); |
|
371 | 366 | if (hgc) |
|
372 | 367 | return hgc; |
|
373 | 368 | |
|
374 | 369 | /* prevent us from being connected to an outdated server: we were |
|
375 | 370 | * told by a server to redirect to opts->redirectsockname and that |
|
376 | 371 | * address does not work. we do not want to connect to the server |
|
377 | 372 | * again because it will probably tell us the same thing. */ |
|
378 | 373 | if (sockname == opts->redirectsockname) |
|
379 | 374 | unlink(opts->sockname); |
|
380 | 375 | |
|
381 | 376 | debugmsg("start cmdserver at %s", opts->initsockname); |
|
382 | 377 | |
|
383 | 378 | pid_t pid = fork(); |
|
384 | 379 | if (pid < 0) |
|
385 | 380 | abortmsg("failed to fork cmdserver process"); |
|
386 | 381 | if (pid == 0) { |
|
387 | 382 | execcmdserver(opts); |
|
388 | 383 | } else { |
|
389 | 384 | hgc = retryconnectcmdserver(opts, pid); |
|
390 | 385 | } |
|
391 | 386 | |
|
392 | 387 | return hgc; |
|
393 | 388 | } |
|
394 | 389 | |
|
395 | 390 | static void killcmdserver(const struct cmdserveropts *opts) |
|
396 | 391 | { |
|
397 | 392 | /* resolve config hash */ |
|
398 | 393 | char *resolvedpath = realpath(opts->sockname, NULL); |
|
399 | 394 | if (resolvedpath) { |
|
400 | 395 | unlink(resolvedpath); |
|
401 | 396 | free(resolvedpath); |
|
402 | 397 | } |
|
403 | 398 | } |
|
404 | 399 | |
|
405 | 400 | /* Run instructions sent from the server like unlink and set redirect path |
|
406 | 401 | * Return 1 if reconnect is needed, otherwise 0 */ |
|
407 | 402 | static int runinstructions(struct cmdserveropts *opts, const char **insts) |
|
408 | 403 | { |
|
409 | 404 | int needreconnect = 0; |
|
410 | 405 | if (!insts) |
|
411 | 406 | return needreconnect; |
|
412 | 407 | |
|
413 | 408 | assert(insts); |
|
414 | 409 | opts->redirectsockname[0] = '\0'; |
|
415 | 410 | const char **pinst; |
|
416 | 411 | for (pinst = insts; *pinst; pinst++) { |
|
417 | 412 | debugmsg("instruction: %s", *pinst); |
|
418 | 413 | if (strncmp(*pinst, "unlink ", 7) == 0) { |
|
419 | 414 | unlink(*pinst + 7); |
|
420 | 415 | } else if (strncmp(*pinst, "redirect ", 9) == 0) { |
|
421 | 416 | int r = snprintf(opts->redirectsockname, |
|
422 | 417 | sizeof(opts->redirectsockname), "%s", |
|
423 | 418 | *pinst + 9); |
|
424 | 419 | if (r < 0 || r >= (int)sizeof(opts->redirectsockname)) |
|
425 | 420 | abortmsg("redirect path is too long (%d)", r); |
|
426 | 421 | needreconnect = 1; |
|
427 | 422 | } else if (strncmp(*pinst, "exit ", 5) == 0) { |
|
428 | 423 | int n = 0; |
|
429 | 424 | if (sscanf(*pinst + 5, "%d", &n) != 1) |
|
430 | 425 | abortmsg("cannot read the exit code"); |
|
431 | 426 | exit(n); |
|
432 | 427 | } else if (strcmp(*pinst, "reconnect") == 0) { |
|
433 | 428 | needreconnect = 1; |
|
434 | 429 | } else { |
|
435 | 430 | abortmsg("unknown instruction: %s", *pinst); |
|
436 | 431 | } |
|
437 | 432 | } |
|
438 | 433 | return needreconnect; |
|
439 | 434 | } |
|
440 | 435 | |
|
441 | 436 | /* |
|
442 | 437 | * Test whether the command and the environment is unsupported or not. |
|
443 | 438 | * |
|
444 | 439 | * If any of the stdio file descriptors are not present (rare, but some tools |
|
445 | 440 | * might spawn new processes without stdio instead of redirecting them to the |
|
446 | 441 | * null device), then mark it as not supported because attachio won't work |
|
447 | 442 | * correctly. |
|
448 | 443 | * |
|
449 | 444 | * The command list is not designed to cover all cases. But it's fast, and does |
|
450 | 445 | * not depend on the server. |
|
451 | 446 | */ |
|
452 | 447 | static int isunsupported(int argc, const char *argv[]) |
|
453 | 448 | { |
|
454 | 449 | enum { |
|
455 | 450 | SERVE = 1, |
|
456 | 451 | DAEMON = 2, |
|
457 | 452 | SERVEDAEMON = SERVE | DAEMON, |
|
458 | 453 | }; |
|
459 | 454 | unsigned int state = 0; |
|
460 | 455 | int i; |
|
461 | 456 | /* use fcntl to test missing stdio fds */ |
|
462 | 457 | if (fcntl(STDIN_FILENO, F_GETFD) == -1 || |
|
463 | 458 | fcntl(STDOUT_FILENO, F_GETFD) == -1 || |
|
464 | 459 | fcntl(STDERR_FILENO, F_GETFD) == -1) { |
|
465 | 460 | debugmsg("stdio fds are missing"); |
|
466 | 461 | return 1; |
|
467 | 462 | } |
|
468 | 463 | for (i = 0; i < argc; ++i) { |
|
469 | 464 | if (strcmp(argv[i], "--") == 0) |
|
470 | 465 | break; |
|
471 | 466 | /* |
|
472 | 467 | * there can be false positives but no false negative |
|
473 | 468 | * we cannot assume `serve` will always be first argument |
|
474 | 469 | * because global options can be passed before the command name |
|
475 | 470 | */ |
|
476 | 471 | if (strcmp("serve", argv[i]) == 0) |
|
477 | 472 | state |= SERVE; |
|
478 | 473 | else if (strcmp("-d", argv[i]) == 0 || |
|
479 | 474 | strcmp("--daemon", argv[i]) == 0) |
|
480 | 475 | state |= DAEMON; |
|
481 | 476 | } |
|
482 | 477 | return (state & SERVEDAEMON) == SERVEDAEMON; |
|
483 | 478 | } |
|
484 | 479 | |
|
485 | 480 | static void execoriginalhg(const char *argv[]) |
|
486 | 481 | { |
|
487 | 482 | debugmsg("execute original hg"); |
|
488 | 483 | if (execvp(gethgcmd(), (char **)argv) < 0) |
|
489 | 484 | abortmsgerrno("failed to exec original hg"); |
|
490 | 485 | } |
|
491 | 486 | |
|
492 | 487 | int main(int argc, const char *argv[], const char *envp[]) |
|
493 | 488 | { |
|
494 | 489 | if (getenv("CHGDEBUG")) |
|
495 | 490 | enabledebugmsg(); |
|
496 | 491 | |
|
497 | 492 | if (!getenv("HGPLAIN") && isatty(fileno(stderr))) |
|
498 | 493 | enablecolor(); |
|
499 | 494 | |
|
500 | 495 | if (getenv("CHGINTERNALMARK")) |
|
501 | 496 | abortmsg("chg started by chg detected.\n" |
|
502 | 497 | "Please make sure ${HG:-hg} is not a symlink or " |
|
503 | 498 | "wrapper to chg. Alternatively, set $CHGHG to the " |
|
504 | 499 | "path of real hg."); |
|
505 | 500 | |
|
506 | 501 | if (isunsupported(argc - 1, argv + 1)) |
|
507 | 502 | execoriginalhg(argv); |
|
508 | 503 | |
|
509 | 504 | struct cmdserveropts opts; |
|
510 | 505 | initcmdserveropts(&opts); |
|
511 | 506 | setcmdserveropts(&opts); |
|
512 | 507 | setcmdserverargs(&opts, argc, argv); |
|
513 | 508 | |
|
514 | 509 | if (argc == 2) { |
|
515 | 510 | if (strcmp(argv[1], "--kill-chg-daemon") == 0) { |
|
516 | 511 | killcmdserver(&opts); |
|
517 | 512 | return 0; |
|
518 | 513 | } |
|
519 | 514 | } |
|
520 | 515 | |
|
521 | 516 | hgclient_t *hgc; |
|
522 | 517 | size_t retry = 0; |
|
523 | 518 | while (1) { |
|
524 | 519 | hgc = connectcmdserver(&opts); |
|
525 | 520 | if (!hgc) |
|
526 | 521 | abortmsg("cannot open hg client"); |
|
527 | 522 | hgc_setenv(hgc, envp); |
|
528 | 523 | const char **insts = hgc_validate(hgc, argv + 1, argc - 1); |
|
529 | 524 | int needreconnect = runinstructions(&opts, insts); |
|
530 | 525 | free(insts); |
|
531 | 526 | if (!needreconnect) |
|
532 | 527 | break; |
|
533 | 528 | hgc_close(hgc); |
|
534 | 529 | if (++retry > 10) |
|
535 | 530 | abortmsg("too many redirections.\n" |
|
536 | 531 | "Please make sure %s is not a wrapper which " |
|
537 | 532 | "changes sensitive environment variables " |
|
538 | 533 | "before executing hg. If you have to use a " |
|
539 | 534 | "wrapper, wrap chg instead of hg.", |
|
540 | 535 | gethgcmd()); |
|
541 | 536 | } |
|
542 | 537 | |
|
543 | 538 | setupsignalhandler(hgc_peerpid(hgc), hgc_peerpgid(hgc)); |
|
544 | 539 | atexit(waitpager); |
|
545 | 540 | int exitcode = hgc_runcommand(hgc, argv + 1, argc - 1); |
|
546 | 541 | restoresignalhandler(); |
|
547 | 542 | hgc_close(hgc); |
|
548 | 543 | freecmdserveropts(&opts); |
|
549 | 544 | |
|
550 | 545 | return exitcode; |
|
551 | 546 | } |
@@ -1,65 +1,69 b'' | |||
|
1 | 1 | # dirstatenonnormalcheck.py - extension to check the consistency of the |
|
2 | 2 | # dirstate's non-normal map |
|
3 | 3 | # |
|
4 | 4 | # For most operations on dirstate, this extensions checks that the nonnormalset |
|
5 | 5 | # contains the right entries. |
|
6 | 6 | # It compares the nonnormal file to a nonnormalset built from the map of all |
|
7 | 7 | # the files in the dirstate to check that they contain the same files. |
|
8 | 8 | |
|
9 | 9 | from __future__ import absolute_import |
|
10 | 10 | |
|
11 | 11 | from mercurial import ( |
|
12 | 12 | dirstate, |
|
13 | 13 | extensions, |
|
14 | pycompat, | |
|
14 | 15 | ) |
|
15 | 16 | |
|
16 | 17 | |
|
17 | 18 | def nonnormalentries(dmap): |
|
18 | 19 | """Compute nonnormal entries from dirstate's dmap""" |
|
19 | 20 | res = set() |
|
20 | 21 | for f, e in dmap.iteritems(): |
|
21 |
if e |
|
|
22 | if e.state != b'n' or e.mtime == -1: | |
|
22 | 23 | res.add(f) |
|
23 | 24 | return res |
|
24 | 25 | |
|
25 | 26 | |
|
26 | 27 | def checkconsistency(ui, orig, dmap, _nonnormalset, label): |
|
27 | 28 | """Compute nonnormalset from dmap, check that it matches _nonnormalset""" |
|
28 | 29 | nonnormalcomputedmap = nonnormalentries(dmap) |
|
29 | 30 | if _nonnormalset != nonnormalcomputedmap: |
|
30 | ui.develwarn(b"%s call to %s\n" % (label, orig), config=b'dirstate') | |
|
31 | b_orig = pycompat.sysbytes(repr(orig)) | |
|
32 | ui.develwarn(b"%s call to %s\n" % (label, b_orig), config=b'dirstate') | |
|
31 | 33 | ui.develwarn(b"inconsistency in nonnormalset\n", config=b'dirstate') |
|
32 | ui.develwarn(b"[nonnormalset] %s\n" % _nonnormalset, config=b'dirstate') | |
|
33 |
ui.develwarn(b"[ |
|
|
34 | b_nonnormal = pycompat.sysbytes(repr(_nonnormalset)) | |
|
35 | ui.develwarn(b"[nonnormalset] %s\n" % b_nonnormal, config=b'dirstate') | |
|
36 | b_nonnormalcomputed = pycompat.sysbytes(repr(nonnormalcomputedmap)) | |
|
37 | ui.develwarn(b"[map] %s\n" % b_nonnormalcomputed, config=b'dirstate') | |
|
34 | 38 | |
|
35 | 39 | |
|
36 | def _checkdirstate(orig, self, arg): | |
|
40 | def _checkdirstate(orig, self, *args, **kwargs): | |
|
37 | 41 | """Check nonnormal set consistency before and after the call to orig""" |
|
38 | 42 | checkconsistency( |
|
39 | 43 | self._ui, orig, self._map, self._map.nonnormalset, b"before" |
|
40 | 44 | ) |
|
41 | r = orig(self, arg) | |
|
45 | r = orig(self, *args, **kwargs) | |
|
42 | 46 | checkconsistency( |
|
43 | 47 | self._ui, orig, self._map, self._map.nonnormalset, b"after" |
|
44 | 48 | ) |
|
45 | 49 | return r |
|
46 | 50 | |
|
47 | 51 | |
|
48 | 52 | def extsetup(ui): |
|
49 | 53 | """Wrap functions modifying dirstate to check nonnormalset consistency""" |
|
50 | 54 | dirstatecl = dirstate.dirstate |
|
51 | 55 | devel = ui.configbool(b'devel', b'all-warnings') |
|
52 | 56 | paranoid = ui.configbool(b'experimental', b'nonnormalparanoidcheck') |
|
53 | 57 | if devel: |
|
54 | 58 | extensions.wrapfunction(dirstatecl, '_writedirstate', _checkdirstate) |
|
55 | 59 | if paranoid: |
|
56 | 60 | # We don't do all these checks when paranoid is disable as it would |
|
57 | 61 | # make the extension run very slowly on large repos |
|
58 | 62 | extensions.wrapfunction(dirstatecl, 'normallookup', _checkdirstate) |
|
59 | 63 | extensions.wrapfunction(dirstatecl, 'otherparent', _checkdirstate) |
|
60 | 64 | extensions.wrapfunction(dirstatecl, 'normal', _checkdirstate) |
|
61 | 65 | extensions.wrapfunction(dirstatecl, 'write', _checkdirstate) |
|
62 | 66 | extensions.wrapfunction(dirstatecl, 'add', _checkdirstate) |
|
63 | 67 | extensions.wrapfunction(dirstatecl, 'remove', _checkdirstate) |
|
64 | 68 | extensions.wrapfunction(dirstatecl, 'merge', _checkdirstate) |
|
65 | 69 | extensions.wrapfunction(dirstatecl, 'drop', _checkdirstate) |
@@ -1,47 +1,60 b'' | |||
|
1 | 1 | #!/usr/bin/env python3 |
|
2 | 2 | # Dump revlogs as raw data stream |
|
3 | 3 | # $ find .hg/store/ -name "*.i" | xargs dumprevlog > repo.dump |
|
4 | 4 | |
|
5 | 5 | from __future__ import absolute_import, print_function |
|
6 | 6 | |
|
7 | 7 | import sys |
|
8 | 8 | from mercurial.node import hex |
|
9 | 9 | from mercurial import ( |
|
10 | 10 | encoding, |
|
11 | 11 | pycompat, |
|
12 | 12 | revlog, |
|
13 | 13 | ) |
|
14 | 14 | from mercurial.utils import procutil |
|
15 | 15 | |
|
16 | from mercurial.revlogutils import ( | |
|
17 | constants as revlog_constants, | |
|
18 | ) | |
|
19 | ||
|
16 | 20 | for fp in (sys.stdin, sys.stdout, sys.stderr): |
|
17 | 21 | procutil.setbinary(fp) |
|
18 | 22 | |
|
19 | 23 | |
|
20 | 24 | def binopen(path, mode=b'rb'): |
|
21 | 25 | if b'b' not in mode: |
|
22 | 26 | mode = mode + b'b' |
|
23 | 27 | return open(path, pycompat.sysstr(mode)) |
|
24 | 28 | |
|
25 | 29 | |
|
26 | 30 | binopen.options = {} |
|
27 | 31 | |
|
28 | 32 | |
|
29 | 33 | def printb(data, end=b'\n'): |
|
30 | 34 | sys.stdout.flush() |
|
31 | 35 | procutil.stdout.write(data + end) |
|
32 | 36 | |
|
33 | 37 | |
|
34 | 38 | for f in sys.argv[1:]: |
|
35 |
|
|
|
39 | localf = encoding.strtolocal(f) | |
|
40 | if not localf.endswith(b'.i'): | |
|
41 | print("file:", f, file=sys.stderr) | |
|
42 | print(" invalid filename", file=sys.stderr) | |
|
43 | ||
|
44 | r = revlog.revlog( | |
|
45 | binopen, | |
|
46 | target=(revlog_constants.KIND_OTHER, b'dump-revlog'), | |
|
47 | radix=localf[:-2], | |
|
48 | ) | |
|
36 | 49 | print("file:", f) |
|
37 | 50 | for i in r: |
|
38 | 51 | n = r.node(i) |
|
39 | 52 | p = r.parents(n) |
|
40 | 53 | d = r.revision(n) |
|
41 | 54 | printb(b"node: %s" % hex(n)) |
|
42 | 55 | printb(b"linkrev: %d" % r.linkrev(i)) |
|
43 | 56 | printb(b"parents: %s %s" % (hex(p[0]), hex(p[1]))) |
|
44 | 57 | printb(b"length: %d" % len(d)) |
|
45 | 58 | printb(b"-start-") |
|
46 | 59 | printb(d) |
|
47 | 60 | printb(b"-end-") |
@@ -1,366 +1,371 b'' | |||
|
1 | 1 | from __future__ import absolute_import, print_function |
|
2 | 2 | |
|
3 | 3 | import argparse |
|
4 | import os | |
|
4 | 5 | import struct |
|
5 | 6 | import sys |
|
6 | 7 | import zipfile |
|
7 | 8 | |
|
9 | # Add ../.. to sys.path as an absolute path so we can import hg modules | |
|
10 | hgloc = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) | |
|
11 | sys.path[0:0] = [hgloc] | |
|
12 | ||
|
8 | 13 | from mercurial import ( |
|
9 | 14 | hg, |
|
10 | 15 | ui as uimod, |
|
11 | 16 | ) |
|
12 | 17 | |
|
13 | 18 | ap = argparse.ArgumentParser() |
|
14 | 19 | ap.add_argument("out", metavar="some.zip", type=str, nargs=1) |
|
15 | 20 | args = ap.parse_args() |
|
16 | 21 | |
|
17 | 22 | |
|
18 | 23 | if sys.version_info[0] < 3: |
|
19 | 24 | |
|
20 | 25 | class py2reprhack(object): |
|
21 | 26 | def __repr__(self): |
|
22 | 27 | """Py2 calls __repr__ for `bytes(foo)`, forward to __bytes__""" |
|
23 | 28 | return self.__bytes__() |
|
24 | 29 | |
|
25 | 30 | |
|
26 | 31 | else: |
|
27 | 32 | |
|
28 | 33 | class py2reprhack(object): |
|
29 | 34 | """Not needed on py3.""" |
|
30 | 35 | |
|
31 | 36 | |
|
32 | 37 | class deltafrag(py2reprhack): |
|
33 | 38 | def __init__(self, start, end, data): |
|
34 | 39 | self.start = start |
|
35 | 40 | self.end = end |
|
36 | 41 | self.data = data |
|
37 | 42 | |
|
38 | 43 | def __bytes__(self): |
|
39 | 44 | return ( |
|
40 | 45 | struct.pack(">lll", self.start, self.end, len(self.data)) |
|
41 | 46 | + self.data |
|
42 | 47 | ) |
|
43 | 48 | |
|
44 | 49 | |
|
45 | 50 | class delta(py2reprhack): |
|
46 | 51 | def __init__(self, frags): |
|
47 | 52 | self.frags = frags |
|
48 | 53 | |
|
49 | 54 | def __bytes__(self): |
|
50 | 55 | return b''.join(bytes(f) for f in self.frags) |
|
51 | 56 | |
|
52 | 57 | |
|
53 | 58 | class corpus(py2reprhack): |
|
54 | 59 | def __init__(self, base, deltas): |
|
55 | 60 | self.base = base |
|
56 | 61 | self.deltas = deltas |
|
57 | 62 | |
|
58 | 63 | def __bytes__(self): |
|
59 | 64 | deltas = [bytes(d) for d in self.deltas] |
|
60 | 65 | parts = ( |
|
61 | 66 | [ |
|
62 | 67 | struct.pack(">B", len(deltas) + 1), |
|
63 | 68 | struct.pack(">H", len(self.base)), |
|
64 | 69 | ] |
|
65 | 70 | + [struct.pack(">H", len(d)) for d in deltas] |
|
66 | 71 | + [self.base] |
|
67 | 72 | + deltas |
|
68 | 73 | ) |
|
69 | 74 | return b''.join(parts) |
|
70 | 75 | |
|
71 | 76 | |
|
72 | 77 | with zipfile.ZipFile(args.out[0], "w", zipfile.ZIP_STORED) as zf: |
|
73 | 78 | # Manually constructed entries |
|
74 | 79 | zf.writestr( |
|
75 | 80 | "one_delta_applies", |
|
76 | 81 | bytes(corpus(b'a', [delta([deltafrag(0, 1, b'b')])])), |
|
77 | 82 | ) |
|
78 | 83 | zf.writestr( |
|
79 | 84 | "one_delta_starts_late", |
|
80 | 85 | bytes(corpus(b'a', [delta([deltafrag(3, 1, b'b')])])), |
|
81 | 86 | ) |
|
82 | 87 | zf.writestr( |
|
83 | 88 | "one_delta_ends_late", |
|
84 | 89 | bytes(corpus(b'a', [delta([deltafrag(0, 20, b'b')])])), |
|
85 | 90 | ) |
|
86 | 91 | |
|
87 | 92 | try: |
|
88 | 93 | # Generated from repo data |
|
89 | 94 | r = hg.repository(uimod.ui(), b'../..') |
|
90 | 95 | fl = r.file(b'mercurial/manifest.py') |
|
91 | 96 | rl = getattr(fl, '_revlog', fl) |
|
92 | 97 | bins = rl._chunks(rl._deltachain(10)[0]) |
|
93 | 98 | zf.writestr('manifest_py_rev_10', bytes(corpus(bins[0], bins[1:]))) |
|
94 | 99 | except: # skip this, so no re-raises |
|
95 | 100 | print('skipping seed file from repo data') |
|
96 | 101 | # Automatically discovered by running the fuzzer |
|
97 | 102 | zf.writestr( |
|
98 | 103 | "mpatch_decode_old_overread", b"\x02\x00\x00\x00\x02\x00\x00\x00" |
|
99 | 104 | ) |
|
100 | 105 | # https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=8876 |
|
101 | 106 | zf.writestr( |
|
102 | 107 | "mpatch_ossfuzz_getbe32_ubsan", |
|
103 | 108 | b"\x02\x00\x00\x00\x0c \xff\xff\xff\xff ", |
|
104 | 109 | ) |
|
105 | 110 | zf.writestr( |
|
106 | 111 | "mpatch_apply_over_memcpy", |
|
107 | 112 | b'\x13\x01\x00\x05\xd0\x00\x00\x00\x00\x00\x00\x00\x00\n \x00\x00\x00' |
|
108 | 113 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
109 | 114 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00' |
|
110 | 115 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
111 | 116 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
112 | 117 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
113 | 118 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
114 | 119 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
115 | 120 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
116 | 121 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
117 | 122 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
118 | 123 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
119 | 124 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
120 | 125 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
121 | 126 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
122 | 127 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
123 | 128 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
124 | 129 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
125 | 130 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
126 | 131 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
127 | 132 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
128 | 133 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
129 | 134 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
130 | 135 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
131 | 136 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
132 | 137 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
133 | 138 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
134 | 139 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
135 | 140 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
136 | 141 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
137 | 142 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x8c\x00\x00\x00\x00' |
|
138 | 143 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
139 | 144 | b'\x00\x00\x00\x00\x00\x00\x00\x00)\x00\x00\x00\x00\x00\x00\x00\x00' |
|
140 | 145 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
141 | 146 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
142 | 147 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
143 | 148 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
144 | 149 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
145 | 150 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
146 | 151 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
147 | 152 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
148 | 153 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
149 | 154 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
150 | 155 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
151 | 156 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
152 | 157 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
153 | 158 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
154 | 159 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
155 | 160 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
156 | 161 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
157 | 162 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
158 | 163 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
159 | 164 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
160 | 165 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
161 | 166 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
162 | 167 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
163 | 168 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
164 | 169 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
165 | 170 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
166 | 171 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
167 | 172 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
168 | 173 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
169 | 174 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
170 | 175 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
171 | 176 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
172 | 177 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00A\x00\x00\x00\x00' |
|
173 | 178 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
174 | 179 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
175 | 180 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
176 | 181 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
177 | 182 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
178 | 183 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
179 | 184 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
180 | 185 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
181 | 186 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
182 | 187 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
183 | 188 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
184 | 189 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
185 | 190 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
186 | 191 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
187 | 192 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
188 | 193 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
189 | 194 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
190 | 195 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
191 | 196 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
192 | 197 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
193 | 198 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
194 | 199 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
195 | 200 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
196 | 201 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
197 | 202 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x94\x18' |
|
198 | 203 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
199 | 204 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
200 | 205 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
201 | 206 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
202 | 207 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
203 | 208 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
204 | 209 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
205 | 210 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
206 | 211 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
207 | 212 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
208 | 213 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
209 | 214 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
210 | 215 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
211 | 216 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
212 | 217 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
213 | 218 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
214 | 219 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
215 | 220 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
216 | 221 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
217 | 222 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
218 | 223 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
219 | 224 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
220 | 225 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
221 | 226 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
222 | 227 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
223 | 228 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
224 | 229 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
225 | 230 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
226 | 231 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
227 | 232 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
228 | 233 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
229 | 234 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
230 | 235 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
231 | 236 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
232 | 237 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
233 | 238 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
234 | 239 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
235 | 240 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
236 | 241 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
237 | 242 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
238 | 243 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
239 | 244 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
240 | 245 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
241 | 246 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
242 | 247 | b'\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
243 | 248 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
244 | 249 | b'\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00\x00\x00\x00\x00' |
|
245 | 250 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
246 | 251 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
247 | 252 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
248 | 253 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
249 | 254 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
250 | 255 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
251 | 256 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
252 | 257 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
253 | 258 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
254 | 259 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
255 | 260 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
256 | 261 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
257 | 262 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
258 | 263 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
259 | 264 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
260 | 265 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
261 | 266 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
262 | 267 | b'\x00\x00\x94\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
263 | 268 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
264 | 269 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
265 | 270 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
266 | 271 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
267 | 272 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
268 | 273 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
269 | 274 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
270 | 275 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
271 | 276 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
272 | 277 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
273 | 278 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
274 | 279 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
275 | 280 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
276 | 281 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
277 | 282 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
278 | 283 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
279 | 284 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
280 | 285 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
281 | 286 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
282 | 287 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
283 | 288 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
284 | 289 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
285 | 290 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
286 | 291 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
287 | 292 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
288 | 293 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
289 | 294 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
290 | 295 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
291 | 296 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
292 | 297 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
293 | 298 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
294 | 299 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
295 | 300 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
296 | 301 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
297 | 302 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
298 | 303 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
299 | 304 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
300 | 305 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
301 | 306 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
302 | 307 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
303 | 308 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
304 | 309 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
305 | 310 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
306 | 311 | b'\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
307 | 312 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
308 | 313 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\x00\x00\x00' |
|
309 | 314 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
310 | 315 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
311 | 316 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
312 | 317 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
313 | 318 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
314 | 319 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
315 | 320 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
316 | 321 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
317 | 322 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
318 | 323 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
319 | 324 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
320 | 325 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
321 | 326 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
322 | 327 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
323 | 328 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
324 | 329 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
325 | 330 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
326 | 331 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
327 | 332 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00' |
|
328 | 333 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
329 | 334 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
330 | 335 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
331 | 336 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
332 | 337 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
333 | 338 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
334 | 339 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
335 | 340 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
336 | 341 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
337 | 342 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
338 | 343 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
339 | 344 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
340 | 345 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
341 | 346 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
342 | 347 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00' |
|
343 | 348 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
344 | 349 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
345 | 350 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
346 | 351 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
347 | 352 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00]\x00\x00' |
|
348 | 353 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
349 | 354 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
350 | 355 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
351 | 356 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
352 | 357 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
353 | 358 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
354 | 359 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
355 | 360 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
356 | 361 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
357 | 362 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
358 | 363 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
359 | 364 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
360 | 365 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
361 | 366 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
362 | 367 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
363 | 368 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00se\x00\x00' |
|
364 | 369 | b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' |
|
365 | 370 | b'\x00\x00\x00\x00', |
|
366 | 371 | ) |
@@ -1,141 +1,174 b'' | |||
|
1 | 1 | stages: |
|
2 | 2 | - tests |
|
3 | 3 | - phabricator |
|
4 | 4 | |
|
5 | 5 | image: registry.heptapod.net/mercurial/ci-images/mercurial-core:$HG_CI_IMAGE_TAG |
|
6 | 6 | |
|
7 | 7 | variables: |
|
8 | 8 | PYTHON: python |
|
9 | 9 | TEST_HGMODULEPOLICY: "allow" |
|
10 | 10 | HG_CI_IMAGE_TAG: "latest" |
|
11 | 11 | TEST_HGTESTS_ALLOW_NETIO: "0" |
|
12 | 12 | |
|
13 | 13 | .runtests_template: &runtests |
|
14 | 14 | stage: tests |
|
15 | 15 | # The runner made a clone as root. |
|
16 | 16 | # We make a new clone owned by user used to run the step. |
|
17 | 17 | before_script: |
|
18 | 18 | - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no |
|
19 | 19 | - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` |
|
20 | 20 | - cd /tmp/mercurial-ci/ |
|
21 | 21 | - ls -1 tests/test-check-*.* > /tmp/check-tests.txt |
|
22 | 22 | - black --version |
|
23 | 23 | - clang-format --version |
|
24 | 24 | script: |
|
25 | 25 | - echo "python used, $PYTHON" |
|
26 | 26 | - echo "$RUNTEST_ARGS" |
|
27 | 27 | - HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" "$PYTHON" tests/run-tests.py --color=always $RUNTEST_ARGS |
|
28 | 28 | |
|
29 | 29 | checks-py2: |
|
30 | 30 | <<: *runtests |
|
31 | 31 | variables: |
|
32 | 32 | RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt" |
|
33 | 33 | |
|
34 | 34 | checks-py3: |
|
35 | 35 | <<: *runtests |
|
36 | 36 | variables: |
|
37 | 37 | RUNTEST_ARGS: "--time --test-list /tmp/check-tests.txt" |
|
38 | 38 | PYTHON: python3 |
|
39 | 39 | |
|
40 | 40 | rust-cargo-test-py2: &rust_cargo_test |
|
41 | 41 | stage: tests |
|
42 | 42 | script: |
|
43 | 43 | - echo "python used, $PYTHON" |
|
44 | 44 | - make rust-tests |
|
45 | 45 | |
|
46 | 46 | rust-cargo-test-py3: |
|
47 | 47 | stage: tests |
|
48 | 48 | <<: *rust_cargo_test |
|
49 | 49 | variables: |
|
50 | 50 | PYTHON: python3 |
|
51 | 51 | |
|
52 | 52 | phabricator-refresh: |
|
53 | 53 | stage: phabricator |
|
54 | 54 | variables: |
|
55 | 55 | DEFAULT_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)" |
|
56 | 56 | STABLE_COMMENT: ":white_check_mark: refresh by Heptapod after a successful CI run (:octopus: :green_heart:)\n⚠ This patch is intended for stable ⚠\n{image https://media.giphy.com/media/nYI8SmmChYXK0/source.gif}" |
|
57 | 57 | script: |
|
58 | 58 | - | |
|
59 | 59 | if [ `hg branch` == "stable" ]; then |
|
60 | 60 | ./contrib/phab-refresh-stack.sh --comment "$STABLE_COMMENT"; |
|
61 | 61 | else |
|
62 | 62 | ./contrib/phab-refresh-stack.sh --comment "$DEFAULT_COMMENT"; |
|
63 | 63 | fi |
|
64 | 64 | |
|
65 | 65 | test-py2: |
|
66 | 66 | <<: *runtests |
|
67 | 67 | variables: |
|
68 | 68 | RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" |
|
69 | 69 | TEST_HGMODULEPOLICY: "c" |
|
70 | 70 | TEST_HGTESTS_ALLOW_NETIO: "1" |
|
71 | 71 | |
|
72 | 72 | test-py3: |
|
73 | 73 | <<: *runtests |
|
74 | 74 | variables: |
|
75 | 75 | RUNTEST_ARGS: " --no-rust --blacklist /tmp/check-tests.txt" |
|
76 | 76 | PYTHON: python3 |
|
77 | 77 | TEST_HGMODULEPOLICY: "c" |
|
78 | 78 | TEST_HGTESTS_ALLOW_NETIO: "1" |
|
79 | 79 | |
|
80 | 80 | test-py2-pure: |
|
81 | 81 | <<: *runtests |
|
82 | 82 | variables: |
|
83 | 83 | RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt" |
|
84 | 84 | TEST_HGMODULEPOLICY: "py" |
|
85 | 85 | |
|
86 | 86 | test-py3-pure: |
|
87 | 87 | <<: *runtests |
|
88 | 88 | variables: |
|
89 | 89 | RUNTEST_ARGS: "--pure --blacklist /tmp/check-tests.txt" |
|
90 | 90 | PYTHON: python3 |
|
91 | 91 | TEST_HGMODULEPOLICY: "py" |
|
92 | 92 | |
|
93 | 93 | test-py2-rust: |
|
94 | 94 | <<: *runtests |
|
95 | 95 | variables: |
|
96 | 96 | HGWITHRUSTEXT: cpython |
|
97 | 97 | RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" |
|
98 | 98 | TEST_HGMODULEPOLICY: "rust+c" |
|
99 | 99 | |
|
100 | 100 | test-py3-rust: |
|
101 | 101 | <<: *runtests |
|
102 | 102 | variables: |
|
103 | 103 | HGWITHRUSTEXT: cpython |
|
104 | 104 | RUNTEST_ARGS: "--rust --blacklist /tmp/check-tests.txt" |
|
105 | 105 | PYTHON: python3 |
|
106 | 106 | TEST_HGMODULEPOLICY: "rust+c" |
|
107 | 107 | |
|
108 | 108 | test-py3-rhg: |
|
109 | 109 | <<: *runtests |
|
110 | 110 | variables: |
|
111 | 111 | HGWITHRUSTEXT: cpython |
|
112 | 112 | RUNTEST_ARGS: "--rust --rhg --blacklist /tmp/check-tests.txt" |
|
113 | 113 | PYTHON: python3 |
|
114 | 114 | TEST_HGMODULEPOLICY: "rust+c" |
|
115 | 115 | |
|
116 | 116 | test-py2-chg: |
|
117 | 117 | <<: *runtests |
|
118 | 118 | variables: |
|
119 | 119 | RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg" |
|
120 | 120 | TEST_HGMODULEPOLICY: "c" |
|
121 | 121 | |
|
122 | 122 | test-py3-chg: |
|
123 | 123 | <<: *runtests |
|
124 | 124 | variables: |
|
125 | 125 | PYTHON: python3 |
|
126 | 126 | RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt --chg" |
|
127 | 127 | TEST_HGMODULEPOLICY: "c" |
|
128 | 128 | |
|
129 | 129 | check-pytype-py3: |
|
130 | 130 | extends: .runtests_template |
|
131 | 131 | when: manual |
|
132 | 132 | before_script: |
|
133 | 133 | - hg clone . /tmp/mercurial-ci/ --noupdate --config phases.publish=no |
|
134 | 134 | - hg -R /tmp/mercurial-ci/ update `hg log --rev '.' --template '{node}'` |
|
135 | 135 | - cd /tmp/mercurial-ci/ |
|
136 | 136 | - make local PYTHON=$PYTHON |
|
137 | 137 | - $PYTHON -m pip install --user -U pytype==2021.04.15 |
|
138 | 138 | variables: |
|
139 | 139 | RUNTEST_ARGS: " --allow-slow-tests tests/test-check-pytype.t" |
|
140 | 140 | PYTHON: python3 |
|
141 | 141 | TEST_HGMODULEPOLICY: "c" |
|
142 | ||
|
143 | # `sh.exe --login` sets a couple of extra environment variables that are defined | |
|
144 | # in the MinGW shell, but switches CWD to /home/$username. The previous value | |
|
145 | # is stored in OLDPWD. Of the added variables, MSYSTEM is crucial to running | |
|
146 | # run-tests.py- it is needed to make run-tests.py generate a `python3` script | |
|
147 | # that satisfies the various shebang lines and delegates to `py -3`. | |
|
148 | .window_runtests_template: &windows_runtests | |
|
149 | stage: tests | |
|
150 | before_script: | |
|
151 | # Temporary until this is adjusted in the environment | |
|
152 | - $Env:TEMP="C:/Temp" | |
|
153 | - $Env:TMP="C:/Temp" | |
|
154 | # TODO: find/install cvs, bzr, perforce, gpg, sqlite3 | |
|
155 | ||
|
156 | script: | |
|
157 | - echo "Entering script section" | |
|
158 | - echo "python used, $Env:PYTHON" | |
|
159 | - Invoke-Expression "$Env:PYTHON -V" | |
|
160 | - Invoke-Expression "$Env:PYTHON -m black --version" | |
|
161 | - echo "$Env:RUNTEST_ARGS" | |
|
162 | ||
|
163 | - C:/MinGW/msys/1.0/bin/sh.exe --login -c 'cd "$OLDPWD" && HGTESTS_ALLOW_NETIO="$TEST_HGTESTS_ALLOW_NETIO" HGMODULEPOLICY="$TEST_HGMODULEPOLICY" $PYTHON tests/run-tests.py --color=always $RUNTEST_ARGS' | |
|
164 | ||
|
165 | windows-py3: | |
|
166 | <<: *windows_runtests | |
|
167 | when: manual | |
|
168 | tags: | |
|
169 | - windows | |
|
170 | timeout: 2h | |
|
171 | variables: | |
|
172 | TEST_HGMODULEPOLICY: "c" | |
|
173 | RUNTEST_ARGS: "--blacklist /tmp/check-tests.txt" | |
|
174 | PYTHON: py -3 |
@@ -1,111 +1,118 b'' | |||
|
1 | 1 | #!/usr/bin/env python3 |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005-2007 by Intevation GmbH <intevation@intevation.de> |
|
4 | 4 | # |
|
5 | 5 | # Author(s): |
|
6 | 6 | # Thomas Arendsen Hein <thomas@intevation.de> |
|
7 | 7 | # |
|
8 | 8 | # This software may be used and distributed according to the terms of the |
|
9 | 9 | # GNU General Public License version 2 or any later version. |
|
10 | 10 | |
|
11 | 11 | """ |
|
12 | 12 | hg-ssh - a wrapper for ssh access to a limited set of mercurial repos |
|
13 | 13 | |
|
14 | 14 | To be used in ~/.ssh/authorized_keys with the "command" option, see sshd(8): |
|
15 | 15 | command="hg-ssh path/to/repo1 /path/to/repo2 ~/repo3 ~user/repo4" ssh-dss ... |
|
16 | 16 | (probably together with these other useful options: |
|
17 | 17 | no-port-forwarding,no-X11-forwarding,no-agent-forwarding) |
|
18 | 18 | |
|
19 | 19 | This allows pull/push over ssh from/to the repositories given as arguments. |
|
20 | 20 | |
|
21 | 21 | If all your repositories are subdirectories of a common directory, you can |
|
22 | 22 | allow shorter paths with: |
|
23 | 23 | command="cd path/to/my/repositories && hg-ssh repo1 subdir/repo2" |
|
24 | 24 | |
|
25 | 25 | You can use pattern matching of your normal shell, e.g.: |
|
26 | 26 | command="cd repos && hg-ssh user/thomas/* projects/{mercurial,foo}" |
|
27 | 27 | |
|
28 | 28 | You can also add a --read-only flag to allow read-only access to a key, e.g.: |
|
29 | 29 | command="hg-ssh --read-only repos/*" |
|
30 | 30 | """ |
|
31 | 31 | from __future__ import absolute_import |
|
32 | 32 | |
|
33 | 33 | import os |
|
34 | import re | |
|
34 | 35 | import shlex |
|
35 | 36 | import sys |
|
36 | 37 | |
|
37 | 38 | # enable importing on demand to reduce startup time |
|
38 | 39 | import hgdemandimport |
|
39 | 40 | |
|
40 | 41 | hgdemandimport.enable() |
|
41 | 42 | |
|
42 | 43 | from mercurial import ( |
|
43 | 44 | dispatch, |
|
44 | 45 | pycompat, |
|
45 | 46 | ui as uimod, |
|
46 | 47 | ) |
|
47 | 48 | |
|
48 | 49 | |
|
49 | 50 | def main(): |
|
50 | 51 | # Prevent insertion/deletion of CRs |
|
51 | 52 | dispatch.initstdio() |
|
52 | 53 | |
|
53 | 54 | cwd = os.getcwd() |
|
55 | if os.name == 'nt': | |
|
56 | # os.getcwd() is inconsistent on the capitalization of the drive | |
|
57 | # letter, so adjust it. see https://bugs.python.org/issue40368 | |
|
58 | if re.match('^[a-z]:', cwd): | |
|
59 | cwd = cwd[0:1].upper() + cwd[1:] | |
|
60 | ||
|
54 | 61 | readonly = False |
|
55 | 62 | args = sys.argv[1:] |
|
56 | 63 | while len(args): |
|
57 | 64 | if args[0] == '--read-only': |
|
58 | 65 | readonly = True |
|
59 | 66 | args.pop(0) |
|
60 | 67 | else: |
|
61 | 68 | break |
|
62 | 69 | allowed_paths = [ |
|
63 | 70 | os.path.normpath(os.path.join(cwd, os.path.expanduser(path))) |
|
64 | 71 | for path in args |
|
65 | 72 | ] |
|
66 | 73 | orig_cmd = os.getenv('SSH_ORIGINAL_COMMAND', '?') |
|
67 | 74 | try: |
|
68 | 75 | cmdargv = shlex.split(orig_cmd) |
|
69 | 76 | except ValueError as e: |
|
70 | 77 | sys.stderr.write('Illegal command "%s": %s\n' % (orig_cmd, e)) |
|
71 | 78 | sys.exit(255) |
|
72 | 79 | |
|
73 | 80 | if cmdargv[:2] == ['hg', '-R'] and cmdargv[3:] == ['serve', '--stdio']: |
|
74 | 81 | path = cmdargv[2] |
|
75 | 82 | repo = os.path.normpath(os.path.join(cwd, os.path.expanduser(path))) |
|
76 | 83 | if repo in allowed_paths: |
|
77 | 84 | cmd = [b'-R', pycompat.fsencode(repo), b'serve', b'--stdio'] |
|
78 | 85 | req = dispatch.request(cmd) |
|
79 | 86 | if readonly: |
|
80 | 87 | if not req.ui: |
|
81 | 88 | req.ui = uimod.ui.load() |
|
82 | 89 | req.ui.setconfig( |
|
83 | 90 | b'hooks', |
|
84 | 91 | b'pretxnopen.hg-ssh', |
|
85 | 92 | b'python:__main__.rejectpush', |
|
86 | 93 | b'hg-ssh', |
|
87 | 94 | ) |
|
88 | 95 | req.ui.setconfig( |
|
89 | 96 | b'hooks', |
|
90 | 97 | b'prepushkey.hg-ssh', |
|
91 | 98 | b'python:__main__.rejectpush', |
|
92 | 99 | b'hg-ssh', |
|
93 | 100 | ) |
|
94 | 101 | dispatch.dispatch(req) |
|
95 | 102 | else: |
|
96 | 103 | sys.stderr.write('Illegal repository "%s"\n' % repo) |
|
97 | 104 | sys.exit(255) |
|
98 | 105 | else: |
|
99 | 106 | sys.stderr.write('Illegal command "%s"\n' % orig_cmd) |
|
100 | 107 | sys.exit(255) |
|
101 | 108 | |
|
102 | 109 | |
|
103 | 110 | def rejectpush(ui, **kwargs): |
|
104 | 111 | ui.warn((b"Permission denied\n")) |
|
105 | 112 | # mercurial hooks use unix process conventions for hook return values |
|
106 | 113 | # so a truthy return means failure |
|
107 | 114 | return True |
|
108 | 115 | |
|
109 | 116 | |
|
110 | 117 | if __name__ == '__main__': |
|
111 | 118 | main() |
@@ -1,821 +1,821 b'' | |||
|
1 | 1 | #!/usr/bin/env python3 |
|
2 | 2 | |
|
3 | 3 | from __future__ import absolute_import, print_function |
|
4 | 4 | |
|
5 | 5 | import ast |
|
6 | 6 | import collections |
|
7 | 7 | import io |
|
8 | 8 | import os |
|
9 | 9 | import sys |
|
10 | 10 | |
|
11 | 11 | # Import a minimal set of stdlib modules needed for list_stdlib_modules() |
|
12 | 12 | # to work when run from a virtualenv. The modules were chosen empirically |
|
13 | 13 | # so that the return value matches the return value without virtualenv. |
|
14 | 14 | if True: # disable lexical sorting checks |
|
15 | 15 | try: |
|
16 | 16 | import BaseHTTPServer as basehttpserver |
|
17 | 17 | except ImportError: |
|
18 | 18 | basehttpserver = None |
|
19 | 19 | import zlib |
|
20 | 20 | |
|
21 | 21 | import testparseutil |
|
22 | 22 | |
|
23 | 23 | # Whitelist of modules that symbols can be directly imported from. |
|
24 | 24 | allowsymbolimports = ( |
|
25 | 25 | '__future__', |
|
26 |
'b |
|
|
26 | 'breezy', | |
|
27 | 27 | 'hgclient', |
|
28 | 28 | 'mercurial', |
|
29 | 29 | 'mercurial.hgweb.common', |
|
30 | 30 | 'mercurial.hgweb.request', |
|
31 | 31 | 'mercurial.i18n', |
|
32 | 32 | 'mercurial.interfaces', |
|
33 | 33 | 'mercurial.node', |
|
34 | 34 | 'mercurial.pycompat', |
|
35 | 35 | # for revlog to re-export constant to extensions |
|
36 | 36 | 'mercurial.revlogutils.constants', |
|
37 | 37 | 'mercurial.revlogutils.flagutil', |
|
38 | 38 | # for cffi modules to re-export pure functions |
|
39 | 39 | 'mercurial.pure.base85', |
|
40 | 40 | 'mercurial.pure.bdiff', |
|
41 | 41 | 'mercurial.pure.mpatch', |
|
42 | 42 | 'mercurial.pure.osutil', |
|
43 | 43 | 'mercurial.pure.parsers', |
|
44 | 44 | # third-party imports should be directly imported |
|
45 | 45 | 'mercurial.thirdparty', |
|
46 | 46 | 'mercurial.thirdparty.attr', |
|
47 | 47 | 'mercurial.thirdparty.zope', |
|
48 | 48 | 'mercurial.thirdparty.zope.interface', |
|
49 | 49 | ) |
|
50 | 50 | |
|
51 | 51 | # Whitelist of symbols that can be directly imported. |
|
52 | 52 | directsymbols = ('demandimport',) |
|
53 | 53 | |
|
54 | 54 | # Modules that must be aliased because they are commonly confused with |
|
55 | 55 | # common variables and can create aliasing and readability issues. |
|
56 | 56 | requirealias = { |
|
57 | 57 | 'ui': 'uimod', |
|
58 | 58 | } |
|
59 | 59 | |
|
60 | 60 | |
|
61 | 61 | def usingabsolute(root): |
|
62 | 62 | """Whether absolute imports are being used.""" |
|
63 | 63 | if sys.version_info[0] >= 3: |
|
64 | 64 | return True |
|
65 | 65 | |
|
66 | 66 | for node in ast.walk(root): |
|
67 | 67 | if isinstance(node, ast.ImportFrom): |
|
68 | 68 | if node.module == '__future__': |
|
69 | 69 | for n in node.names: |
|
70 | 70 | if n.name == 'absolute_import': |
|
71 | 71 | return True |
|
72 | 72 | |
|
73 | 73 | return False |
|
74 | 74 | |
|
75 | 75 | |
|
76 | 76 | def walklocal(root): |
|
77 | 77 | """Recursively yield all descendant nodes but not in a different scope""" |
|
78 | 78 | todo = collections.deque(ast.iter_child_nodes(root)) |
|
79 | 79 | yield root, False |
|
80 | 80 | while todo: |
|
81 | 81 | node = todo.popleft() |
|
82 | 82 | newscope = isinstance(node, ast.FunctionDef) |
|
83 | 83 | if not newscope: |
|
84 | 84 | todo.extend(ast.iter_child_nodes(node)) |
|
85 | 85 | yield node, newscope |
|
86 | 86 | |
|
87 | 87 | |
|
88 | 88 | def dotted_name_of_path(path): |
|
89 | 89 | """Given a relative path to a source file, return its dotted module name. |
|
90 | 90 | |
|
91 | 91 | >>> dotted_name_of_path('mercurial/error.py') |
|
92 | 92 | 'mercurial.error' |
|
93 | 93 | >>> dotted_name_of_path('zlibmodule.so') |
|
94 | 94 | 'zlib' |
|
95 | 95 | """ |
|
96 | 96 | parts = path.replace(os.sep, '/').split('/') |
|
97 | 97 | parts[-1] = parts[-1].split('.', 1)[0] # remove .py and .so and .ARCH.so |
|
98 | 98 | if parts[-1].endswith('module'): |
|
99 | 99 | parts[-1] = parts[-1][:-6] |
|
100 | 100 | return '.'.join(parts) |
|
101 | 101 | |
|
102 | 102 | |
|
103 | 103 | def fromlocalfunc(modulename, localmods): |
|
104 | 104 | """Get a function to examine which locally defined module the |
|
105 | 105 | target source imports via a specified name. |
|
106 | 106 | |
|
107 | 107 | `modulename` is an `dotted_name_of_path()`-ed source file path, |
|
108 | 108 | which may have `.__init__` at the end of it, of the target source. |
|
109 | 109 | |
|
110 | 110 | `localmods` is a set of absolute `dotted_name_of_path()`-ed source file |
|
111 | 111 | paths of locally defined (= Mercurial specific) modules. |
|
112 | 112 | |
|
113 | 113 | This function assumes that module names not existing in |
|
114 | 114 | `localmods` are from the Python standard library. |
|
115 | 115 | |
|
116 | 116 | This function returns the function, which takes `name` argument, |
|
117 | 117 | and returns `(absname, dottedpath, hassubmod)` tuple if `name` |
|
118 | 118 | matches against locally defined module. Otherwise, it returns |
|
119 | 119 | False. |
|
120 | 120 | |
|
121 | 121 | It is assumed that `name` doesn't have `.__init__`. |
|
122 | 122 | |
|
123 | 123 | `absname` is an absolute module name of specified `name` |
|
124 | 124 | (e.g. "hgext.convert"). This can be used to compose prefix for sub |
|
125 | 125 | modules or so. |
|
126 | 126 | |
|
127 | 127 | `dottedpath` is a `dotted_name_of_path()`-ed source file path |
|
128 | 128 | (e.g. "hgext.convert.__init__") of `name`. This is used to look |
|
129 | 129 | module up in `localmods` again. |
|
130 | 130 | |
|
131 | 131 | `hassubmod` is whether it may have sub modules under it (for |
|
132 | 132 | convenient, even though this is also equivalent to "absname != |
|
133 | 133 | dottednpath") |
|
134 | 134 | |
|
135 | 135 | >>> localmods = {'foo.__init__', 'foo.foo1', |
|
136 | 136 | ... 'foo.bar.__init__', 'foo.bar.bar1', |
|
137 | 137 | ... 'baz.__init__', 'baz.baz1'} |
|
138 | 138 | >>> fromlocal = fromlocalfunc('foo.xxx', localmods) |
|
139 | 139 | >>> # relative |
|
140 | 140 | >>> fromlocal('foo1') |
|
141 | 141 | ('foo.foo1', 'foo.foo1', False) |
|
142 | 142 | >>> fromlocal('bar') |
|
143 | 143 | ('foo.bar', 'foo.bar.__init__', True) |
|
144 | 144 | >>> fromlocal('bar.bar1') |
|
145 | 145 | ('foo.bar.bar1', 'foo.bar.bar1', False) |
|
146 | 146 | >>> # absolute |
|
147 | 147 | >>> fromlocal('baz') |
|
148 | 148 | ('baz', 'baz.__init__', True) |
|
149 | 149 | >>> fromlocal('baz.baz1') |
|
150 | 150 | ('baz.baz1', 'baz.baz1', False) |
|
151 | 151 | >>> # unknown = maybe standard library |
|
152 | 152 | >>> fromlocal('os') |
|
153 | 153 | False |
|
154 | 154 | >>> fromlocal(None, 1) |
|
155 | 155 | ('foo', 'foo.__init__', True) |
|
156 | 156 | >>> fromlocal('foo1', 1) |
|
157 | 157 | ('foo.foo1', 'foo.foo1', False) |
|
158 | 158 | >>> fromlocal2 = fromlocalfunc('foo.xxx.yyy', localmods) |
|
159 | 159 | >>> fromlocal2(None, 2) |
|
160 | 160 | ('foo', 'foo.__init__', True) |
|
161 | 161 | >>> fromlocal2('bar2', 1) |
|
162 | 162 | False |
|
163 | 163 | >>> fromlocal2('bar', 2) |
|
164 | 164 | ('foo.bar', 'foo.bar.__init__', True) |
|
165 | 165 | """ |
|
166 | 166 | if not isinstance(modulename, str): |
|
167 | 167 | modulename = modulename.decode('ascii') |
|
168 | 168 | prefix = '.'.join(modulename.split('.')[:-1]) |
|
169 | 169 | if prefix: |
|
170 | 170 | prefix += '.' |
|
171 | 171 | |
|
172 | 172 | def fromlocal(name, level=0): |
|
173 | 173 | # name is false value when relative imports are used. |
|
174 | 174 | if not name: |
|
175 | 175 | # If relative imports are used, level must not be absolute. |
|
176 | 176 | assert level > 0 |
|
177 | 177 | candidates = ['.'.join(modulename.split('.')[:-level])] |
|
178 | 178 | else: |
|
179 | 179 | if not level: |
|
180 | 180 | # Check relative name first. |
|
181 | 181 | candidates = [prefix + name, name] |
|
182 | 182 | else: |
|
183 | 183 | candidates = [ |
|
184 | 184 | '.'.join(modulename.split('.')[:-level]) + '.' + name |
|
185 | 185 | ] |
|
186 | 186 | |
|
187 | 187 | for n in candidates: |
|
188 | 188 | if n in localmods: |
|
189 | 189 | return (n, n, False) |
|
190 | 190 | dottedpath = n + '.__init__' |
|
191 | 191 | if dottedpath in localmods: |
|
192 | 192 | return (n, dottedpath, True) |
|
193 | 193 | return False |
|
194 | 194 | |
|
195 | 195 | return fromlocal |
|
196 | 196 | |
|
197 | 197 | |
|
198 | 198 | def populateextmods(localmods): |
|
199 | 199 | """Populate C extension modules based on pure modules""" |
|
200 | 200 | newlocalmods = set(localmods) |
|
201 | 201 | for n in localmods: |
|
202 | 202 | if n.startswith('mercurial.pure.'): |
|
203 | 203 | m = n[len('mercurial.pure.') :] |
|
204 | 204 | newlocalmods.add('mercurial.cext.' + m) |
|
205 | 205 | newlocalmods.add('mercurial.cffi._' + m) |
|
206 | 206 | return newlocalmods |
|
207 | 207 | |
|
208 | 208 | |
|
209 | 209 | def list_stdlib_modules(): |
|
210 | 210 | """List the modules present in the stdlib. |
|
211 | 211 | |
|
212 | 212 | >>> py3 = sys.version_info[0] >= 3 |
|
213 | 213 | >>> mods = set(list_stdlib_modules()) |
|
214 | 214 | >>> 'BaseHTTPServer' in mods or py3 |
|
215 | 215 | True |
|
216 | 216 | |
|
217 | 217 | os.path isn't really a module, so it's missing: |
|
218 | 218 | |
|
219 | 219 | >>> 'os.path' in mods |
|
220 | 220 | False |
|
221 | 221 | |
|
222 | 222 | sys requires special treatment, because it's baked into the |
|
223 | 223 | interpreter, but it should still appear: |
|
224 | 224 | |
|
225 | 225 | >>> 'sys' in mods |
|
226 | 226 | True |
|
227 | 227 | |
|
228 | 228 | >>> 'collections' in mods |
|
229 | 229 | True |
|
230 | 230 | |
|
231 | 231 | >>> 'cStringIO' in mods or py3 |
|
232 | 232 | True |
|
233 | 233 | |
|
234 | 234 | >>> 'cffi' in mods |
|
235 | 235 | True |
|
236 | 236 | """ |
|
237 | 237 | for m in sys.builtin_module_names: |
|
238 | 238 | yield m |
|
239 | 239 | # These modules only exist on windows, but we should always |
|
240 | 240 | # consider them stdlib. |
|
241 | 241 | for m in ['msvcrt', '_winreg']: |
|
242 | 242 | yield m |
|
243 | 243 | yield '__builtin__' |
|
244 | 244 | yield 'builtins' # python3 only |
|
245 | 245 | yield 'importlib.abc' # python3 only |
|
246 | 246 | yield 'importlib.machinery' # python3 only |
|
247 | 247 | yield 'importlib.util' # python3 only |
|
248 | 248 | for m in 'fcntl', 'grp', 'pwd', 'termios': # Unix only |
|
249 | 249 | yield m |
|
250 | 250 | for m in 'cPickle', 'datetime': # in Python (not C) on PyPy |
|
251 | 251 | yield m |
|
252 | 252 | for m in ['cffi']: |
|
253 | 253 | yield m |
|
254 | 254 | stdlib_prefixes = {sys.prefix, sys.exec_prefix} |
|
255 | 255 | # We need to supplement the list of prefixes for the search to work |
|
256 | 256 | # when run from within a virtualenv. |
|
257 | 257 | for mod in (basehttpserver, zlib): |
|
258 | 258 | if mod is None: |
|
259 | 259 | continue |
|
260 | 260 | try: |
|
261 | 261 | # Not all module objects have a __file__ attribute. |
|
262 | 262 | filename = mod.__file__ |
|
263 | 263 | except AttributeError: |
|
264 | 264 | continue |
|
265 | 265 | dirname = os.path.dirname(filename) |
|
266 | 266 | for prefix in stdlib_prefixes: |
|
267 | 267 | if dirname.startswith(prefix): |
|
268 | 268 | # Then this directory is redundant. |
|
269 | 269 | break |
|
270 | 270 | else: |
|
271 | 271 | stdlib_prefixes.add(dirname) |
|
272 | 272 | sourceroot = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) |
|
273 | 273 | for libpath in sys.path: |
|
274 | 274 | # We want to walk everything in sys.path that starts with something in |
|
275 | 275 | # stdlib_prefixes, but not directories from the hg sources. |
|
276 | 276 | if os.path.abspath(libpath).startswith(sourceroot) or not any( |
|
277 | 277 | libpath.startswith(p) for p in stdlib_prefixes |
|
278 | 278 | ): |
|
279 | 279 | continue |
|
280 | 280 | for top, dirs, files in os.walk(libpath): |
|
281 | 281 | for i, d in reversed(list(enumerate(dirs))): |
|
282 | 282 | if ( |
|
283 | 283 | not os.path.exists(os.path.join(top, d, '__init__.py')) |
|
284 | 284 | or top == libpath |
|
285 | 285 | and d in ('hgdemandimport', 'hgext', 'mercurial') |
|
286 | 286 | ): |
|
287 | 287 | del dirs[i] |
|
288 | 288 | for name in files: |
|
289 | 289 | if not name.endswith(('.py', '.so', '.pyc', '.pyo', '.pyd')): |
|
290 | 290 | continue |
|
291 | 291 | if name.startswith('__init__.py'): |
|
292 | 292 | full_path = top |
|
293 | 293 | else: |
|
294 | 294 | full_path = os.path.join(top, name) |
|
295 | 295 | rel_path = full_path[len(libpath) + 1 :] |
|
296 | 296 | mod = dotted_name_of_path(rel_path) |
|
297 | 297 | yield mod |
|
298 | 298 | |
|
299 | 299 | |
|
300 | 300 | stdlib_modules = set(list_stdlib_modules()) |
|
301 | 301 | |
|
302 | 302 | |
|
303 | 303 | def imported_modules(source, modulename, f, localmods, ignore_nested=False): |
|
304 | 304 | """Given the source of a file as a string, yield the names |
|
305 | 305 | imported by that file. |
|
306 | 306 | |
|
307 | 307 | Args: |
|
308 | 308 | source: The python source to examine as a string. |
|
309 | 309 | modulename: of specified python source (may have `__init__`) |
|
310 | 310 | localmods: set of locally defined module names (may have `__init__`) |
|
311 | 311 | ignore_nested: If true, import statements that do not start in |
|
312 | 312 | column zero will be ignored. |
|
313 | 313 | |
|
314 | 314 | Returns: |
|
315 | 315 | A list of absolute module names imported by the given source. |
|
316 | 316 | |
|
317 | 317 | >>> f = 'foo/xxx.py' |
|
318 | 318 | >>> modulename = 'foo.xxx' |
|
319 | 319 | >>> localmods = {'foo.__init__': True, |
|
320 | 320 | ... 'foo.foo1': True, 'foo.foo2': True, |
|
321 | 321 | ... 'foo.bar.__init__': True, 'foo.bar.bar1': True, |
|
322 | 322 | ... 'baz.__init__': True, 'baz.baz1': True } |
|
323 | 323 | >>> # standard library (= not locally defined ones) |
|
324 | 324 | >>> sorted(imported_modules( |
|
325 | 325 | ... 'from stdlib1 import foo, bar; import stdlib2', |
|
326 | 326 | ... modulename, f, localmods)) |
|
327 | 327 | [] |
|
328 | 328 | >>> # relative importing |
|
329 | 329 | >>> sorted(imported_modules( |
|
330 | 330 | ... 'import foo1; from bar import bar1', |
|
331 | 331 | ... modulename, f, localmods)) |
|
332 | 332 | ['foo.bar.bar1', 'foo.foo1'] |
|
333 | 333 | >>> sorted(imported_modules( |
|
334 | 334 | ... 'from bar.bar1 import name1, name2, name3', |
|
335 | 335 | ... modulename, f, localmods)) |
|
336 | 336 | ['foo.bar.bar1'] |
|
337 | 337 | >>> # absolute importing |
|
338 | 338 | >>> sorted(imported_modules( |
|
339 | 339 | ... 'from baz import baz1, name1', |
|
340 | 340 | ... modulename, f, localmods)) |
|
341 | 341 | ['baz.__init__', 'baz.baz1'] |
|
342 | 342 | >>> # mixed importing, even though it shouldn't be recommended |
|
343 | 343 | >>> sorted(imported_modules( |
|
344 | 344 | ... 'import stdlib, foo1, baz', |
|
345 | 345 | ... modulename, f, localmods)) |
|
346 | 346 | ['baz.__init__', 'foo.foo1'] |
|
347 | 347 | >>> # ignore_nested |
|
348 | 348 | >>> sorted(imported_modules( |
|
349 | 349 | ... '''import foo |
|
350 | 350 | ... def wat(): |
|
351 | 351 | ... import bar |
|
352 | 352 | ... ''', modulename, f, localmods)) |
|
353 | 353 | ['foo.__init__', 'foo.bar.__init__'] |
|
354 | 354 | >>> sorted(imported_modules( |
|
355 | 355 | ... '''import foo |
|
356 | 356 | ... def wat(): |
|
357 | 357 | ... import bar |
|
358 | 358 | ... ''', modulename, f, localmods, ignore_nested=True)) |
|
359 | 359 | ['foo.__init__'] |
|
360 | 360 | """ |
|
361 | 361 | fromlocal = fromlocalfunc(modulename, localmods) |
|
362 | 362 | for node in ast.walk(ast.parse(source, f)): |
|
363 | 363 | if ignore_nested and getattr(node, 'col_offset', 0) > 0: |
|
364 | 364 | continue |
|
365 | 365 | if isinstance(node, ast.Import): |
|
366 | 366 | for n in node.names: |
|
367 | 367 | found = fromlocal(n.name) |
|
368 | 368 | if not found: |
|
369 | 369 | # this should import standard library |
|
370 | 370 | continue |
|
371 | 371 | yield found[1] |
|
372 | 372 | elif isinstance(node, ast.ImportFrom): |
|
373 | 373 | found = fromlocal(node.module, node.level) |
|
374 | 374 | if not found: |
|
375 | 375 | # this should import standard library |
|
376 | 376 | continue |
|
377 | 377 | |
|
378 | 378 | absname, dottedpath, hassubmod = found |
|
379 | 379 | if not hassubmod: |
|
380 | 380 | # "dottedpath" is not a package; must be imported |
|
381 | 381 | yield dottedpath |
|
382 | 382 | # examination of "node.names" should be redundant |
|
383 | 383 | # e.g.: from mercurial.node import nullid, nullrev |
|
384 | 384 | continue |
|
385 | 385 | |
|
386 | 386 | modnotfound = False |
|
387 | 387 | prefix = absname + '.' |
|
388 | 388 | for n in node.names: |
|
389 | 389 | found = fromlocal(prefix + n.name) |
|
390 | 390 | if not found: |
|
391 | 391 | # this should be a function or a property of "node.module" |
|
392 | 392 | modnotfound = True |
|
393 | 393 | continue |
|
394 | 394 | yield found[1] |
|
395 | 395 | if modnotfound and dottedpath != modulename: |
|
396 | 396 | # "dottedpath" is a package, but imported because of non-module |
|
397 | 397 | # lookup |
|
398 | 398 | # specifically allow "from . import foo" from __init__.py |
|
399 | 399 | yield dottedpath |
|
400 | 400 | |
|
401 | 401 | |
|
402 | 402 | def verify_import_convention(module, source, localmods): |
|
403 | 403 | """Verify imports match our established coding convention. |
|
404 | 404 | |
|
405 | 405 | We have 2 conventions: legacy and modern. The modern convention is in |
|
406 | 406 | effect when using absolute imports. |
|
407 | 407 | |
|
408 | 408 | The legacy convention only looks for mixed imports. The modern convention |
|
409 | 409 | is much more thorough. |
|
410 | 410 | """ |
|
411 | 411 | root = ast.parse(source) |
|
412 | 412 | absolute = usingabsolute(root) |
|
413 | 413 | |
|
414 | 414 | if absolute: |
|
415 | 415 | return verify_modern_convention(module, root, localmods) |
|
416 | 416 | else: |
|
417 | 417 | return verify_stdlib_on_own_line(root) |
|
418 | 418 | |
|
419 | 419 | |
|
420 | 420 | def verify_modern_convention(module, root, localmods, root_col_offset=0): |
|
421 | 421 | """Verify a file conforms to the modern import convention rules. |
|
422 | 422 | |
|
423 | 423 | The rules of the modern convention are: |
|
424 | 424 | |
|
425 | 425 | * Ordering is stdlib followed by local imports. Each group is lexically |
|
426 | 426 | sorted. |
|
427 | 427 | * Importing multiple modules via "import X, Y" is not allowed: use |
|
428 | 428 | separate import statements. |
|
429 | 429 | * Importing multiple modules via "from X import ..." is allowed if using |
|
430 | 430 | parenthesis and one entry per line. |
|
431 | 431 | * Only 1 relative import statement per import level ("from .", "from ..") |
|
432 | 432 | is allowed. |
|
433 | 433 | * Relative imports from higher levels must occur before lower levels. e.g. |
|
434 | 434 | "from .." must be before "from .". |
|
435 | 435 | * Imports from peer packages should use relative import (e.g. do not |
|
436 | 436 | "import mercurial.foo" from a "mercurial.*" module). |
|
437 | 437 | * Symbols can only be imported from specific modules (see |
|
438 | 438 | `allowsymbolimports`). For other modules, first import the module then |
|
439 | 439 | assign the symbol to a module-level variable. In addition, these imports |
|
440 | 440 | must be performed before other local imports. This rule only |
|
441 | 441 | applies to import statements outside of any blocks. |
|
442 | 442 | * Relative imports from the standard library are not allowed, unless that |
|
443 | 443 | library is also a local module. |
|
444 | 444 | * Certain modules must be aliased to alternate names to avoid aliasing |
|
445 | 445 | and readability problems. See `requirealias`. |
|
446 | 446 | """ |
|
447 | 447 | if not isinstance(module, str): |
|
448 | 448 | module = module.decode('ascii') |
|
449 | 449 | topmodule = module.split('.')[0] |
|
450 | 450 | fromlocal = fromlocalfunc(module, localmods) |
|
451 | 451 | |
|
452 | 452 | # Whether a local/non-stdlib import has been performed. |
|
453 | 453 | seenlocal = None |
|
454 | 454 | # Whether a local/non-stdlib, non-symbol import has been seen. |
|
455 | 455 | seennonsymbollocal = False |
|
456 | 456 | # The last name to be imported (for sorting). |
|
457 | 457 | lastname = None |
|
458 | 458 | laststdlib = None |
|
459 | 459 | # Relative import levels encountered so far. |
|
460 | 460 | seenlevels = set() |
|
461 | 461 | |
|
462 | 462 | for node, newscope in walklocal(root): |
|
463 | 463 | |
|
464 | 464 | def msg(fmt, *args): |
|
465 | 465 | return (fmt % args, node.lineno) |
|
466 | 466 | |
|
467 | 467 | if newscope: |
|
468 | 468 | # Check for local imports in function |
|
469 | 469 | for r in verify_modern_convention( |
|
470 | 470 | module, node, localmods, node.col_offset + 4 |
|
471 | 471 | ): |
|
472 | 472 | yield r |
|
473 | 473 | elif isinstance(node, ast.Import): |
|
474 | 474 | # Disallow "import foo, bar" and require separate imports |
|
475 | 475 | # for each module. |
|
476 | 476 | if len(node.names) > 1: |
|
477 | 477 | yield msg( |
|
478 | 478 | 'multiple imported names: %s', |
|
479 | 479 | ', '.join(n.name for n in node.names), |
|
480 | 480 | ) |
|
481 | 481 | |
|
482 | 482 | name = node.names[0].name |
|
483 | 483 | asname = node.names[0].asname |
|
484 | 484 | |
|
485 | 485 | stdlib = name in stdlib_modules |
|
486 | 486 | |
|
487 | 487 | # Ignore sorting rules on imports inside blocks. |
|
488 | 488 | if node.col_offset == root_col_offset: |
|
489 | 489 | if lastname and name < lastname and laststdlib == stdlib: |
|
490 | 490 | yield msg( |
|
491 | 491 | 'imports not lexically sorted: %s < %s', name, lastname |
|
492 | 492 | ) |
|
493 | 493 | |
|
494 | 494 | lastname = name |
|
495 | 495 | laststdlib = stdlib |
|
496 | 496 | |
|
497 | 497 | # stdlib imports should be before local imports. |
|
498 | 498 | if stdlib and seenlocal and node.col_offset == root_col_offset: |
|
499 | 499 | yield msg( |
|
500 | 500 | 'stdlib import "%s" follows local import: %s', |
|
501 | 501 | name, |
|
502 | 502 | seenlocal, |
|
503 | 503 | ) |
|
504 | 504 | |
|
505 | 505 | if not stdlib: |
|
506 | 506 | seenlocal = name |
|
507 | 507 | |
|
508 | 508 | # Import of sibling modules should use relative imports. |
|
509 | 509 | topname = name.split('.')[0] |
|
510 | 510 | if topname == topmodule: |
|
511 | 511 | yield msg('import should be relative: %s', name) |
|
512 | 512 | |
|
513 | 513 | if name in requirealias and asname != requirealias[name]: |
|
514 | 514 | yield msg( |
|
515 | 515 | '%s module must be "as" aliased to %s', |
|
516 | 516 | name, |
|
517 | 517 | requirealias[name], |
|
518 | 518 | ) |
|
519 | 519 | |
|
520 | 520 | elif isinstance(node, ast.ImportFrom): |
|
521 | 521 | # Resolve the full imported module name. |
|
522 | 522 | if node.level > 0: |
|
523 | 523 | fullname = '.'.join(module.split('.')[: -node.level]) |
|
524 | 524 | if node.module: |
|
525 | 525 | fullname += '.%s' % node.module |
|
526 | 526 | else: |
|
527 | 527 | assert node.module |
|
528 | 528 | fullname = node.module |
|
529 | 529 | |
|
530 | 530 | topname = fullname.split('.')[0] |
|
531 | 531 | if topname == topmodule: |
|
532 | 532 | yield msg('import should be relative: %s', fullname) |
|
533 | 533 | |
|
534 | 534 | # __future__ is special since it needs to come first and use |
|
535 | 535 | # symbol import. |
|
536 | 536 | if fullname != '__future__': |
|
537 | 537 | if not fullname or ( |
|
538 | 538 | fullname in stdlib_modules |
|
539 | 539 | # allow standard 'from typing import ...' style |
|
540 | 540 | and fullname.startswith('.') |
|
541 | 541 | and fullname not in localmods |
|
542 | 542 | and fullname + '.__init__' not in localmods |
|
543 | 543 | ): |
|
544 | 544 | yield msg('relative import of stdlib module') |
|
545 | 545 | else: |
|
546 | 546 | seenlocal = fullname |
|
547 | 547 | |
|
548 | 548 | # Direct symbol import is only allowed from certain modules and |
|
549 | 549 | # must occur before non-symbol imports. |
|
550 | 550 | found = fromlocal(node.module, node.level) |
|
551 | 551 | if found and found[2]: # node.module is a package |
|
552 | 552 | prefix = found[0] + '.' |
|
553 | 553 | symbols = ( |
|
554 | 554 | n.name for n in node.names if not fromlocal(prefix + n.name) |
|
555 | 555 | ) |
|
556 | 556 | else: |
|
557 | 557 | symbols = (n.name for n in node.names) |
|
558 | 558 | symbols = [sym for sym in symbols if sym not in directsymbols] |
|
559 | 559 | if node.module and node.col_offset == root_col_offset: |
|
560 | 560 | if symbols and fullname not in allowsymbolimports: |
|
561 | 561 | yield msg( |
|
562 | 562 | 'direct symbol import %s from %s', |
|
563 | 563 | ', '.join(symbols), |
|
564 | 564 | fullname, |
|
565 | 565 | ) |
|
566 | 566 | |
|
567 | 567 | if symbols and seennonsymbollocal: |
|
568 | 568 | yield msg( |
|
569 | 569 | 'symbol import follows non-symbol import: %s', fullname |
|
570 | 570 | ) |
|
571 | 571 | if not symbols and fullname not in stdlib_modules: |
|
572 | 572 | seennonsymbollocal = True |
|
573 | 573 | |
|
574 | 574 | if not node.module: |
|
575 | 575 | assert node.level |
|
576 | 576 | |
|
577 | 577 | # Only allow 1 group per level. |
|
578 | 578 | if ( |
|
579 | 579 | node.level in seenlevels |
|
580 | 580 | and node.col_offset == root_col_offset |
|
581 | 581 | ): |
|
582 | 582 | yield msg( |
|
583 | 583 | 'multiple "from %s import" statements', '.' * node.level |
|
584 | 584 | ) |
|
585 | 585 | |
|
586 | 586 | # Higher-level groups come before lower-level groups. |
|
587 | 587 | if any(node.level > l for l in seenlevels): |
|
588 | 588 | yield msg( |
|
589 | 589 | 'higher-level import should come first: %s', fullname |
|
590 | 590 | ) |
|
591 | 591 | |
|
592 | 592 | seenlevels.add(node.level) |
|
593 | 593 | |
|
594 | 594 | # Entries in "from .X import ( ... )" lists must be lexically |
|
595 | 595 | # sorted. |
|
596 | 596 | lastentryname = None |
|
597 | 597 | |
|
598 | 598 | for n in node.names: |
|
599 | 599 | if lastentryname and n.name < lastentryname: |
|
600 | 600 | yield msg( |
|
601 | 601 | 'imports from %s not lexically sorted: %s < %s', |
|
602 | 602 | fullname, |
|
603 | 603 | n.name, |
|
604 | 604 | lastentryname, |
|
605 | 605 | ) |
|
606 | 606 | |
|
607 | 607 | lastentryname = n.name |
|
608 | 608 | |
|
609 | 609 | if n.name in requirealias and n.asname != requirealias[n.name]: |
|
610 | 610 | yield msg( |
|
611 | 611 | '%s from %s must be "as" aliased to %s', |
|
612 | 612 | n.name, |
|
613 | 613 | fullname, |
|
614 | 614 | requirealias[n.name], |
|
615 | 615 | ) |
|
616 | 616 | |
|
617 | 617 | |
|
618 | 618 | def verify_stdlib_on_own_line(root): |
|
619 | 619 | """Given some python source, verify that stdlib imports are done |
|
620 | 620 | in separate statements from relative local module imports. |
|
621 | 621 | |
|
622 | 622 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, foo'))) |
|
623 | 623 | [('mixed imports\\n stdlib: sys\\n relative: foo', 1)] |
|
624 | 624 | >>> list(verify_stdlib_on_own_line(ast.parse('import sys, os'))) |
|
625 | 625 | [] |
|
626 | 626 | >>> list(verify_stdlib_on_own_line(ast.parse('import foo, bar'))) |
|
627 | 627 | [] |
|
628 | 628 | """ |
|
629 | 629 | for node in ast.walk(root): |
|
630 | 630 | if isinstance(node, ast.Import): |
|
631 | 631 | from_stdlib = {False: [], True: []} |
|
632 | 632 | for n in node.names: |
|
633 | 633 | from_stdlib[n.name in stdlib_modules].append(n.name) |
|
634 | 634 | if from_stdlib[True] and from_stdlib[False]: |
|
635 | 635 | yield ( |
|
636 | 636 | 'mixed imports\n stdlib: %s\n relative: %s' |
|
637 | 637 | % ( |
|
638 | 638 | ', '.join(sorted(from_stdlib[True])), |
|
639 | 639 | ', '.join(sorted(from_stdlib[False])), |
|
640 | 640 | ), |
|
641 | 641 | node.lineno, |
|
642 | 642 | ) |
|
643 | 643 | |
|
644 | 644 | |
|
645 | 645 | class CircularImport(Exception): |
|
646 | 646 | pass |
|
647 | 647 | |
|
648 | 648 | |
|
649 | 649 | def checkmod(mod, imports): |
|
650 | 650 | shortest = {} |
|
651 | 651 | visit = [[mod]] |
|
652 | 652 | while visit: |
|
653 | 653 | path = visit.pop(0) |
|
654 | 654 | for i in sorted(imports.get(path[-1], [])): |
|
655 | 655 | if len(path) < shortest.get(i, 1000): |
|
656 | 656 | shortest[i] = len(path) |
|
657 | 657 | if i in path: |
|
658 | 658 | if i == path[0]: |
|
659 | 659 | raise CircularImport(path) |
|
660 | 660 | continue |
|
661 | 661 | visit.append(path + [i]) |
|
662 | 662 | |
|
663 | 663 | |
|
664 | 664 | def rotatecycle(cycle): |
|
665 | 665 | """arrange a cycle so that the lexicographically first module listed first |
|
666 | 666 | |
|
667 | 667 | >>> rotatecycle(['foo', 'bar']) |
|
668 | 668 | ['bar', 'foo', 'bar'] |
|
669 | 669 | """ |
|
670 | 670 | lowest = min(cycle) |
|
671 | 671 | idx = cycle.index(lowest) |
|
672 | 672 | return cycle[idx:] + cycle[:idx] + [lowest] |
|
673 | 673 | |
|
674 | 674 | |
|
675 | 675 | def find_cycles(imports): |
|
676 | 676 | """Find cycles in an already-loaded import graph. |
|
677 | 677 | |
|
678 | 678 | All module names recorded in `imports` should be absolute one. |
|
679 | 679 | |
|
680 | 680 | >>> from __future__ import print_function |
|
681 | 681 | >>> imports = {'top.foo': ['top.bar', 'os.path', 'top.qux'], |
|
682 | 682 | ... 'top.bar': ['top.baz', 'sys'], |
|
683 | 683 | ... 'top.baz': ['top.foo'], |
|
684 | 684 | ... 'top.qux': ['top.foo']} |
|
685 | 685 | >>> print('\\n'.join(sorted(find_cycles(imports)))) |
|
686 | 686 | top.bar -> top.baz -> top.foo -> top.bar |
|
687 | 687 | top.foo -> top.qux -> top.foo |
|
688 | 688 | """ |
|
689 | 689 | cycles = set() |
|
690 | 690 | for mod in sorted(imports.keys()): |
|
691 | 691 | try: |
|
692 | 692 | checkmod(mod, imports) |
|
693 | 693 | except CircularImport as e: |
|
694 | 694 | cycle = e.args[0] |
|
695 | 695 | cycles.add(" -> ".join(rotatecycle(cycle))) |
|
696 | 696 | return cycles |
|
697 | 697 | |
|
698 | 698 | |
|
699 | 699 | def _cycle_sortkey(c): |
|
700 | 700 | return len(c), c |
|
701 | 701 | |
|
702 | 702 | |
|
703 | 703 | def embedded(f, modname, src): |
|
704 | 704 | """Extract embedded python code |
|
705 | 705 | |
|
706 | 706 | >>> def _forcestr(thing): |
|
707 | 707 | ... if not isinstance(thing, str): |
|
708 | 708 | ... return thing.decode('ascii') |
|
709 | 709 | ... return thing |
|
710 | 710 | >>> def test(fn, lines): |
|
711 | 711 | ... for s, m, f, l in embedded(fn, b"example", lines): |
|
712 | 712 | ... print("%s %s %d" % (_forcestr(m), _forcestr(f), l)) |
|
713 | 713 | ... print(repr(_forcestr(s))) |
|
714 | 714 | >>> lines = [ |
|
715 | 715 | ... 'comment', |
|
716 | 716 | ... ' >>> from __future__ import print_function', |
|
717 | 717 | ... " >>> ' multiline", |
|
718 | 718 | ... " ... string'", |
|
719 | 719 | ... ' ', |
|
720 | 720 | ... 'comment', |
|
721 | 721 | ... ' $ cat > foo.py <<EOF', |
|
722 | 722 | ... ' > from __future__ import print_function', |
|
723 | 723 | ... ' > EOF', |
|
724 | 724 | ... ] |
|
725 | 725 | >>> test(b"example.t", lines) |
|
726 | 726 | example[2] doctest.py 1 |
|
727 | 727 | "from __future__ import print_function\\n' multiline\\nstring'\\n\\n" |
|
728 | 728 | example[8] foo.py 7 |
|
729 | 729 | 'from __future__ import print_function\\n' |
|
730 | 730 | """ |
|
731 | 731 | errors = [] |
|
732 | 732 | for name, starts, ends, code in testparseutil.pyembedded(f, src, errors): |
|
733 | 733 | if not name: |
|
734 | 734 | # use 'doctest.py', in order to make already existing |
|
735 | 735 | # doctest above pass instantly |
|
736 | 736 | name = 'doctest.py' |
|
737 | 737 | # "starts" is "line number" (1-origin), but embedded() is |
|
738 | 738 | # expected to return "line offset" (0-origin). Therefore, this |
|
739 | 739 | # yields "starts - 1". |
|
740 | 740 | if not isinstance(modname, str): |
|
741 | 741 | modname = modname.decode('utf8') |
|
742 | 742 | yield code, "%s[%d]" % (modname, starts), name, starts - 1 |
|
743 | 743 | |
|
744 | 744 | |
|
745 | 745 | def sources(f, modname): |
|
746 | 746 | """Yields possibly multiple sources from a filepath |
|
747 | 747 | |
|
748 | 748 | input: filepath, modulename |
|
749 | 749 | yields: script(string), modulename, filepath, linenumber |
|
750 | 750 | |
|
751 | 751 | For embedded scripts, the modulename and filepath will be different |
|
752 | 752 | from the function arguments. linenumber is an offset relative to |
|
753 | 753 | the input file. |
|
754 | 754 | """ |
|
755 | 755 | py = False |
|
756 | 756 | if not f.endswith('.t'): |
|
757 | 757 | with open(f, 'rb') as src: |
|
758 | 758 | yield src.read(), modname, f, 0 |
|
759 | 759 | py = True |
|
760 | 760 | if py or f.endswith('.t'): |
|
761 | 761 | # Strictly speaking we should sniff for the magic header that denotes |
|
762 | 762 | # Python source file encoding. But in reality we don't use anything |
|
763 | 763 | # other than ASCII (mainly) and UTF-8 (in a few exceptions), so |
|
764 | 764 | # simplicity is fine. |
|
765 | 765 | with io.open(f, 'r', encoding='utf-8') as src: |
|
766 | 766 | for script, modname, t, line in embedded(f, modname, src): |
|
767 | 767 | yield script, modname.encode('utf8'), t, line |
|
768 | 768 | |
|
769 | 769 | |
|
770 | 770 | def main(argv): |
|
771 | 771 | if len(argv) < 2 or (argv[1] == '-' and len(argv) > 2): |
|
772 | 772 | print('Usage: %s {-|file [file] [file] ...}') |
|
773 | 773 | return 1 |
|
774 | 774 | if argv[1] == '-': |
|
775 | 775 | argv = argv[:1] |
|
776 | 776 | argv.extend(l.rstrip() for l in sys.stdin.readlines()) |
|
777 | 777 | localmodpaths = {} |
|
778 | 778 | used_imports = {} |
|
779 | 779 | any_errors = False |
|
780 | 780 | for source_path in argv[1:]: |
|
781 | 781 | modname = dotted_name_of_path(source_path) |
|
782 | 782 | localmodpaths[modname] = source_path |
|
783 | 783 | localmods = populateextmods(localmodpaths) |
|
784 | 784 | for localmodname, source_path in sorted(localmodpaths.items()): |
|
785 | 785 | if not isinstance(localmodname, bytes): |
|
786 | 786 | # This is only safe because all hg's files are ascii |
|
787 | 787 | localmodname = localmodname.encode('ascii') |
|
788 | 788 | for src, modname, name, line in sources(source_path, localmodname): |
|
789 | 789 | try: |
|
790 | 790 | used_imports[modname] = sorted( |
|
791 | 791 | imported_modules( |
|
792 | 792 | src, modname, name, localmods, ignore_nested=True |
|
793 | 793 | ) |
|
794 | 794 | ) |
|
795 | 795 | for error, lineno in verify_import_convention( |
|
796 | 796 | modname, src, localmods |
|
797 | 797 | ): |
|
798 | 798 | any_errors = True |
|
799 | 799 | print('%s:%d: %s' % (source_path, lineno + line, error)) |
|
800 | 800 | except SyntaxError as e: |
|
801 | 801 | print( |
|
802 | 802 | '%s:%d: SyntaxError: %s' % (source_path, e.lineno + line, e) |
|
803 | 803 | ) |
|
804 | 804 | cycles = find_cycles(used_imports) |
|
805 | 805 | if cycles: |
|
806 | 806 | firstmods = set() |
|
807 | 807 | for c in sorted(cycles, key=_cycle_sortkey): |
|
808 | 808 | first = c.split()[0] |
|
809 | 809 | # As a rough cut, ignore any cycle that starts with the |
|
810 | 810 | # same module as some other cycle. Otherwise we see lots |
|
811 | 811 | # of cycles that are effectively duplicates. |
|
812 | 812 | if first in firstmods: |
|
813 | 813 | continue |
|
814 | 814 | print('Import cycle:', c) |
|
815 | 815 | firstmods.add(first) |
|
816 | 816 | any_errors = True |
|
817 | 817 | return any_errors != 0 |
|
818 | 818 | |
|
819 | 819 | |
|
820 | 820 | if __name__ == '__main__': |
|
821 | 821 | sys.exit(int(main(sys.argv))) |
@@ -1,211 +1,215 b'' | |||
|
1 | 1 | # install-dependencies.ps1 - Install Windows dependencies for building Mercurial |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | # This script can be used to bootstrap a Mercurial build environment on |
|
9 | 9 | # Windows. |
|
10 | 10 | # |
|
11 | 11 | # The script makes a lot of assumptions about how things should work. |
|
12 | 12 | # For example, the install location of Python is hardcoded to c:\hgdev\*. |
|
13 | 13 | # |
|
14 | 14 | # The script should be executed from a PowerShell with elevated privileges |
|
15 | 15 | # if you don't want to see a UAC prompt for various installers. |
|
16 | 16 | # |
|
17 | 17 | # The script is tested on Windows 10 and Windows Server 2019 (in EC2). |
|
18 | 18 | |
|
19 | 19 | $VS_BUILD_TOOLS_URL = "https://download.visualstudio.microsoft.com/download/pr/a1603c02-8a66-4b83-b821-811e3610a7c4/aa2db8bb39e0cbd23e9940d8951e0bc3/vs_buildtools.exe" |
|
20 | 20 | $VS_BUILD_TOOLS_SHA256 = "911E292B8E6E5F46CBC17003BDCD2D27A70E616E8D5E6E69D5D489A605CAA139" |
|
21 | 21 | |
|
22 | 22 | $VC9_PYTHON_URL = "https://download.microsoft.com/download/7/9/6/796EF2E4-801B-4FC4-AB28-B59FBF6D907B/VCForPython27.msi" |
|
23 | 23 | $VC9_PYTHON_SHA256 = "070474db76a2e625513a5835df4595df9324d820f9cc97eab2a596dcbc2f5cbf" |
|
24 | 24 | |
|
25 | 25 | $PYTHON27_x64_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.amd64.msi" |
|
26 | 26 | $PYTHON27_x64_SHA256 = "b74a3afa1e0bf2a6fc566a7b70d15c9bfabba3756fb077797d16fffa27800c05" |
|
27 | 27 | $PYTHON27_X86_URL = "https://www.python.org/ftp/python/2.7.18/python-2.7.18.msi" |
|
28 | 28 | $PYTHON27_X86_SHA256 = "d901802e90026e9bad76b8a81f8dd7e43c7d7e8269d9281c9e9df7a9c40480a9" |
|
29 | 29 | |
|
30 | 30 | $PYTHON37_x86_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9.exe" |
|
31 | 31 | $PYTHON37_x86_SHA256 = "769bb7c74ad1df6d7d74071cc16a984ff6182e4016e11b8949b93db487977220" |
|
32 | 32 | $PYTHON37_X64_URL = "https://www.python.org/ftp/python/3.7.9/python-3.7.9-amd64.exe" |
|
33 | 33 | $PYTHON37_x64_SHA256 = "e69ed52afb5a722e5c56f6c21d594e85c17cb29f12f18bb69751cf1714e0f987" |
|
34 | 34 | |
|
35 |
$PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8. |
|
|
36 | $PYTHON38_x86_SHA256 = "287d5df01ff22ff09e6a487ae018603ee19eade71d462ec703850c96f1d5e8a0" | |
|
37 |
$PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8. |
|
|
38 | $PYTHON38_x64_SHA256 = "328a257f189cb500606bb26ab0fbdd298ed0e05d8c36540a322a1744f489a0a0" | |
|
35 | $PYTHON38_x86_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10.exe" | |
|
36 | $PYTHON38_x86_SHA256 = "ad07633a1f0cd795f3bf9da33729f662281df196b4567fa795829f3bb38a30ac" | |
|
37 | $PYTHON38_x64_URL = "https://www.python.org/ftp/python/3.8.10/python-3.8.10-amd64.exe" | |
|
38 | $PYTHON38_x64_SHA256 = "7628244cb53408b50639d2c1287c659f4e29d3dfdb9084b11aed5870c0c6a48a" | |
|
39 | 39 | |
|
40 |
$PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9. |
|
|
41 | $PYTHON39_x86_SHA256 = "a4c65917f4225d1543959342f0615c813a4e9e7ff1137c4394ff6a5290ac1913" | |
|
42 |
$PYTHON39_x64_URL = "https://www.python.org/ftp/python/3.9. |
|
|
43 | $PYTHON39_x64_SHA256 = "fd2e2c6612d43bb6b213b72fc53f07d73d99059fa72c96e44bde12e7815073ae" | |
|
40 | $PYTHON39_x86_URL = "https://www.python.org/ftp/python/3.9.5/python-3.9.5.exe" | |
|
41 | $PYTHON39_x86_SHA256 = "505129081a839b699a6ab9064b441ad922ef03767b5dd4241fd0c2166baf64de" | |
|
42 | $PYTHON39_x64_URL = "https://www.python.org/ftp/python/3.9.5/python-3.9.5-amd64.exe" | |
|
43 | $PYTHON39_x64_SHA256 = "84d5243088ba00c11e51905c704dbe041040dfff044f4e1ce5476844ee2e6eac" | |
|
44 | 44 | |
|
45 | 45 | # PIP 19.2.3. |
|
46 | 46 | $PIP_URL = "https://github.com/pypa/get-pip/raw/309a56c5fd94bd1134053a541cb4657a4e47e09d/get-pip.py" |
|
47 | 47 | $PIP_SHA256 = "57e3643ff19f018f8a00dfaa6b7e4620e3c1a7a2171fd218425366ec006b3bfe" |
|
48 | 48 | |
|
49 | 49 | $VIRTUALENV_URL = "https://files.pythonhosted.org/packages/66/f0/6867af06d2e2f511e4e1d7094ff663acdebc4f15d4a0cb0fed1007395124/virtualenv-16.7.5.tar.gz" |
|
50 | 50 | $VIRTUALENV_SHA256 = "f78d81b62d3147396ac33fc9d77579ddc42cc2a98dd9ea38886f616b33bc7fb2" |
|
51 | 51 | |
|
52 | 52 | $INNO_SETUP_URL = "http://files.jrsoftware.org/is/5/innosetup-5.6.1-unicode.exe" |
|
53 | 53 | $INNO_SETUP_SHA256 = "27D49E9BC769E9D1B214C153011978DB90DC01C2ACD1DDCD9ED7B3FE3B96B538" |
|
54 | 54 | |
|
55 | 55 | $MINGW_BIN_URL = "https://osdn.net/frs/redir.php?m=constant&f=mingw%2F68260%2Fmingw-get-0.6.3-mingw32-pre-20170905-1-bin.zip" |
|
56 | 56 | $MINGW_BIN_SHA256 = "2AB8EFD7C7D1FC8EAF8B2FA4DA4EEF8F3E47768284C021599BC7435839A046DF" |
|
57 | 57 | |
|
58 | 58 | $MERCURIAL_WHEEL_FILENAME = "mercurial-5.1.2-cp27-cp27m-win_amd64.whl" |
|
59 | 59 | $MERCURIAL_WHEEL_URL = "https://files.pythonhosted.org/packages/6d/47/e031e47f7fe9b16e4e3383da47e2b0a7eae6e603996bc67a03ec4fa1b3f4/$MERCURIAL_WHEEL_FILENAME" |
|
60 | 60 | $MERCURIAL_WHEEL_SHA256 = "1d18c7f6ca1456f0f62ee65c9a50c14cbba48ce6e924930cdb10537f5c9eaf5f" |
|
61 | 61 | |
|
62 | 62 | $RUSTUP_INIT_URL = "https://static.rust-lang.org/rustup/archive/1.21.1/x86_64-pc-windows-gnu/rustup-init.exe" |
|
63 | 63 | $RUSTUP_INIT_SHA256 = "d17df34ba974b9b19cf5c75883a95475aa22ddc364591d75d174090d55711c72" |
|
64 | 64 | |
|
65 | $PYOXIDIZER_URL = "https://github.com/indygreg/PyOxidizer/releases/download/pyoxidizer%2F0.16.0/PyOxidizer-0.16.0-x64.msi" | |
|
66 | $PYOXIDIZER_SHA256 = "2a9c58add9161c272c418d5e6dec13fbe648f624b5d26770190357e4d664f24e" | |
|
67 | ||
|
65 | 68 | # Writing progress slows down downloads substantially. So disable it. |
|
66 | 69 | $progressPreference = 'silentlyContinue' |
|
67 | 70 | |
|
68 | 71 | function Secure-Download($url, $path, $sha256) { |
|
69 | 72 | if (Test-Path -Path $path) { |
|
70 | 73 | Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash |
|
71 | 74 | |
|
72 | 75 | if ($hash.Hash -eq $sha256) { |
|
73 | 76 | Write-Output "SHA256 of $path verified as $sha256" |
|
74 | 77 | return |
|
75 | 78 | } |
|
76 | 79 | |
|
77 | 80 | Write-Output "hash mismatch on $path; downloading again" |
|
78 | 81 | } |
|
79 | 82 | |
|
80 | 83 | Write-Output "downloading $url to $path" |
|
81 | 84 | Invoke-WebRequest -Uri $url -OutFile $path |
|
82 | 85 | Get-FileHash -Path $path -Algorithm SHA256 -OutVariable hash |
|
83 | 86 | |
|
84 | 87 | if ($hash.Hash -ne $sha256) { |
|
85 | 88 | Remove-Item -Path $path |
|
86 | 89 | throw "hash mismatch when downloading $url; got $($hash.Hash), expected $sha256" |
|
87 | 90 | } |
|
88 | 91 | } |
|
89 | 92 | |
|
90 | 93 | function Invoke-Process($path, $arguments) { |
|
91 | 94 | $p = Start-Process -FilePath $path -ArgumentList $arguments -Wait -PassThru -WindowStyle Hidden |
|
92 | 95 | |
|
93 | 96 | if ($p.ExitCode -ne 0) { |
|
94 | 97 | throw "process exited non-0: $($p.ExitCode)" |
|
95 | 98 | } |
|
96 | 99 | } |
|
97 | 100 | |
|
98 | 101 | function Install-Python3($name, $installer, $dest, $pip) { |
|
99 | 102 | Write-Output "installing $name" |
|
100 | 103 | |
|
101 | 104 | # We hit this when running the script as part of Simple Systems Manager in |
|
102 | 105 | # EC2. The Python 3 installer doesn't seem to like per-user installs |
|
103 | 106 | # when running as the SYSTEM user. So enable global installs if executed in |
|
104 | 107 | # this mode. |
|
105 | 108 | if ($env:USERPROFILE -eq "C:\Windows\system32\config\systemprofile") { |
|
106 | 109 | Write-Output "running with SYSTEM account; installing for all users" |
|
107 | 110 | $allusers = "1" |
|
108 | 111 | } |
|
109 | 112 | else { |
|
110 | 113 | $allusers = "0" |
|
111 | 114 | } |
|
112 | 115 | |
|
113 | 116 | Invoke-Process $installer "/quiet TargetDir=${dest} InstallAllUsers=${allusers} AssociateFiles=0 CompileAll=0 PrependPath=0 Include_doc=0 Include_launcher=0 InstallLauncherAllUsers=0 Include_pip=0 Include_test=0" |
|
114 | 117 | Invoke-Process ${dest}\python.exe $pip |
|
115 | 118 | } |
|
116 | 119 | |
|
117 | 120 | function Install-Rust($prefix) { |
|
118 | 121 | Write-Output "installing Rust" |
|
119 | 122 | $Env:RUSTUP_HOME = "${prefix}\rustup" |
|
120 | 123 | $Env:CARGO_HOME = "${prefix}\cargo" |
|
121 | 124 | |
|
122 | 125 | Invoke-Process "${prefix}\assets\rustup-init.exe" "-y --default-host x86_64-pc-windows-msvc" |
|
123 | 126 | Invoke-Process "${prefix}\cargo\bin\rustup.exe" "target add i686-pc-windows-msvc" |
|
124 |
Invoke-Process "${prefix}\cargo\bin\rustup.exe" "install 1. |
|
|
127 | Invoke-Process "${prefix}\cargo\bin\rustup.exe" "install 1.52.0" | |
|
125 | 128 | Invoke-Process "${prefix}\cargo\bin\rustup.exe" "component add clippy" |
|
126 | ||
|
127 | # Install PyOxidizer for packaging. | |
|
128 | Invoke-Process "${prefix}\cargo\bin\cargo.exe" "install --version 0.10.3 pyoxidizer" | |
|
129 | 129 | } |
|
130 | 130 | |
|
131 | 131 | function Install-Dependencies($prefix) { |
|
132 | 132 | if (!(Test-Path -Path $prefix\assets)) { |
|
133 | 133 | New-Item -Path $prefix\assets -ItemType Directory |
|
134 | 134 | } |
|
135 | 135 | |
|
136 | 136 | $pip = "${prefix}\assets\get-pip.py" |
|
137 | 137 | |
|
138 | 138 | Secure-Download $VC9_PYTHON_URL ${prefix}\assets\VCForPython27.msi $VC9_PYTHON_SHA256 |
|
139 | 139 | Secure-Download $PYTHON27_x86_URL ${prefix}\assets\python27-x86.msi $PYTHON27_x86_SHA256 |
|
140 | 140 | Secure-Download $PYTHON27_x64_URL ${prefix}\assets\python27-x64.msi $PYTHON27_x64_SHA256 |
|
141 | 141 | Secure-Download $PYTHON37_x86_URL ${prefix}\assets\python37-x86.exe $PYTHON37_x86_SHA256 |
|
142 | 142 | Secure-Download $PYTHON37_x64_URL ${prefix}\assets\python37-x64.exe $PYTHON37_x64_SHA256 |
|
143 | 143 | Secure-Download $PYTHON38_x86_URL ${prefix}\assets\python38-x86.exe $PYTHON38_x86_SHA256 |
|
144 | 144 | Secure-Download $PYTHON38_x64_URL ${prefix}\assets\python38-x64.exe $PYTHON38_x64_SHA256 |
|
145 | 145 | Secure-Download $PYTHON39_x86_URL ${prefix}\assets\python39-x86.exe $PYTHON39_x86_SHA256 |
|
146 | 146 | Secure-Download $PYTHON39_x64_URL ${prefix}\assets\python39-x64.exe $PYTHON39_x64_SHA256 |
|
147 | 147 | Secure-Download $PIP_URL ${pip} $PIP_SHA256 |
|
148 | 148 | Secure-Download $VIRTUALENV_URL ${prefix}\assets\virtualenv.tar.gz $VIRTUALENV_SHA256 |
|
149 | 149 | Secure-Download $VS_BUILD_TOOLS_URL ${prefix}\assets\vs_buildtools.exe $VS_BUILD_TOOLS_SHA256 |
|
150 | 150 | Secure-Download $INNO_SETUP_URL ${prefix}\assets\InnoSetup.exe $INNO_SETUP_SHA256 |
|
151 | 151 | Secure-Download $MINGW_BIN_URL ${prefix}\assets\mingw-get-bin.zip $MINGW_BIN_SHA256 |
|
152 | 152 | Secure-Download $MERCURIAL_WHEEL_URL ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME} $MERCURIAL_WHEEL_SHA256 |
|
153 | 153 | Secure-Download $RUSTUP_INIT_URL ${prefix}\assets\rustup-init.exe $RUSTUP_INIT_SHA256 |
|
154 | Secure-Download $PYOXIDIZER_URL ${prefix}\assets\PyOxidizer.msi $PYOXIDIZER_SHA256 | |
|
154 | 155 | |
|
155 | 156 | Write-Output "installing Python 2.7 32-bit" |
|
156 | 157 | Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x86.msi /l* ${prefix}\assets\python27-x86.log /q TARGETDIR=${prefix}\python27-x86 ALLUSERS=" |
|
157 | 158 | Invoke-Process ${prefix}\python27-x86\python.exe ${prefix}\assets\get-pip.py |
|
158 | 159 | Invoke-Process ${prefix}\python27-x86\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz" |
|
159 | 160 | |
|
160 | 161 | Write-Output "installing Python 2.7 64-bit" |
|
161 | 162 | Invoke-Process msiexec.exe "/i ${prefix}\assets\python27-x64.msi /l* ${prefix}\assets\python27-x64.log /q TARGETDIR=${prefix}\python27-x64 ALLUSERS=" |
|
162 | 163 | Invoke-Process ${prefix}\python27-x64\python.exe ${prefix}\assets\get-pip.py |
|
163 | 164 | Invoke-Process ${prefix}\python27-x64\Scripts\pip.exe "install ${prefix}\assets\virtualenv.tar.gz" |
|
164 | 165 | |
|
165 | 166 | Install-Python3 "Python 3.7 32-bit" ${prefix}\assets\python37-x86.exe ${prefix}\python37-x86 ${pip} |
|
166 | 167 | Install-Python3 "Python 3.7 64-bit" ${prefix}\assets\python37-x64.exe ${prefix}\python37-x64 ${pip} |
|
167 | 168 | Install-Python3 "Python 3.8 32-bit" ${prefix}\assets\python38-x86.exe ${prefix}\python38-x86 ${pip} |
|
168 | 169 | Install-Python3 "Python 3.8 64-bit" ${prefix}\assets\python38-x64.exe ${prefix}\python38-x64 ${pip} |
|
169 | 170 | Install-Python3 "Python 3.9 32-bit" ${prefix}\assets\python39-x86.exe ${prefix}\python39-x86 ${pip} |
|
170 | 171 | Install-Python3 "Python 3.9 64-bit" ${prefix}\assets\python39-x64.exe ${prefix}\python39-x64 ${pip} |
|
171 | 172 | |
|
172 | 173 | Write-Output "installing Visual Studio 2017 Build Tools and SDKs" |
|
173 | 174 | Invoke-Process ${prefix}\assets\vs_buildtools.exe "--quiet --wait --norestart --nocache --channelUri https://aka.ms/vs/15/release/channel --add Microsoft.VisualStudio.Workload.MSBuildTools --add Microsoft.VisualStudio.Component.Windows10SDK.17763 --add Microsoft.VisualStudio.Workload.VCTools --add Microsoft.VisualStudio.Component.Windows10SDK --add Microsoft.VisualStudio.Component.VC.140" |
|
174 | 175 | |
|
176 | Write-Output "installing PyOxidizer" | |
|
177 | Invoke-Process msiexec.exe "/i ${prefix}\assets\PyOxidizer.msi /l* ${prefix}\assets\PyOxidizer.log /quiet" | |
|
178 | ||
|
175 | 179 | Install-Rust ${prefix} |
|
176 | 180 | |
|
177 | 181 | Write-Output "installing Visual C++ 9.0 for Python 2.7" |
|
178 | 182 | Invoke-Process msiexec.exe "/i ${prefix}\assets\VCForPython27.msi /l* ${prefix}\assets\VCForPython27.log /q" |
|
179 | 183 | |
|
180 | 184 | Write-Output "installing Inno Setup" |
|
181 | 185 | Invoke-Process ${prefix}\assets\InnoSetup.exe "/SP- /VERYSILENT /SUPPRESSMSGBOXES" |
|
182 | 186 | |
|
183 | 187 | Write-Output "extracting MinGW base archive" |
|
184 | 188 | Expand-Archive -Path ${prefix}\assets\mingw-get-bin.zip -DestinationPath "${prefix}\MinGW" -Force |
|
185 | 189 | |
|
186 | 190 | Write-Output "updating MinGW package catalogs" |
|
187 | 191 | Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "update" |
|
188 | 192 | |
|
189 | 193 | Write-Output "installing MinGW packages" |
|
190 | 194 | Invoke-Process ${prefix}\MinGW\bin\mingw-get.exe "install msys-base msys-coreutils msys-diffutils msys-unzip" |
|
191 | 195 | |
|
192 | 196 | # Construct a virtualenv useful for bootstrapping. It conveniently contains a |
|
193 | 197 | # Mercurial install. |
|
194 | 198 | Write-Output "creating bootstrap virtualenv with Mercurial" |
|
195 | 199 | Invoke-Process "$prefix\python27-x64\Scripts\virtualenv.exe" "${prefix}\venv-bootstrap" |
|
196 | 200 | Invoke-Process "${prefix}\venv-bootstrap\Scripts\pip.exe" "install ${prefix}\assets\${MERCURIAL_WHEEL_FILENAME}" |
|
197 | 201 | } |
|
198 | 202 | |
|
199 | 203 | function Clone-Mercurial-Repo($prefix, $repo_url, $dest) { |
|
200 | 204 | Write-Output "cloning $repo_url to $dest" |
|
201 | 205 | # TODO Figure out why CA verification isn't working in EC2 and remove |
|
202 | 206 | # --insecure. |
|
203 | 207 | Invoke-Process "${prefix}\venv-bootstrap\Scripts\hg.exe" "clone --insecure $repo_url $dest" |
|
204 | 208 | |
|
205 | 209 | # Mark repo as non-publishing by default for convenience. |
|
206 | 210 | Add-Content -Path "$dest\.hg\hgrc" -Value "`n[phases]`npublish = false" |
|
207 | 211 | } |
|
208 | 212 | |
|
209 | 213 | $prefix = "c:\hgdev" |
|
210 | 214 | Install-Dependencies $prefix |
|
211 | 215 | Clone-Mercurial-Repo $prefix "https://www.mercurial-scm.org/repo/hg" $prefix\src |
@@ -1,187 +1,194 b'' | |||
|
1 | 1 | # cli.py - Command line interface for automation |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | # no-check-code because Python 3 native. |
|
9 | 9 | |
|
10 | 10 | import argparse |
|
11 | 11 | import os |
|
12 | 12 | import pathlib |
|
13 | 13 | |
|
14 | 14 | from . import ( |
|
15 | 15 | inno, |
|
16 | 16 | wix, |
|
17 | 17 | ) |
|
18 | 18 | |
|
19 | 19 | HERE = pathlib.Path(os.path.abspath(os.path.dirname(__file__))) |
|
20 | 20 | SOURCE_DIR = HERE.parent.parent.parent |
|
21 | 21 | |
|
22 | 22 | |
|
23 | 23 | def build_inno(pyoxidizer_target=None, python=None, iscc=None, version=None): |
|
24 | 24 | if not pyoxidizer_target and not python: |
|
25 | 25 | raise Exception("--python required unless building with PyOxidizer") |
|
26 | 26 | |
|
27 | 27 | if python and not os.path.isabs(python): |
|
28 | 28 | raise Exception("--python arg must be an absolute path") |
|
29 | 29 | |
|
30 | 30 | if iscc: |
|
31 | 31 | iscc = pathlib.Path(iscc) |
|
32 | 32 | else: |
|
33 | 33 | iscc = ( |
|
34 | 34 | pathlib.Path(os.environ["ProgramFiles(x86)"]) |
|
35 | 35 | / "Inno Setup 5" |
|
36 | 36 | / "ISCC.exe" |
|
37 | 37 | ) |
|
38 | 38 | |
|
39 | 39 | build_dir = SOURCE_DIR / "build" |
|
40 | 40 | |
|
41 | 41 | if pyoxidizer_target: |
|
42 | 42 | inno.build_with_pyoxidizer( |
|
43 | 43 | SOURCE_DIR, build_dir, pyoxidizer_target, iscc, version=version |
|
44 | 44 | ) |
|
45 | 45 | else: |
|
46 | 46 | inno.build_with_py2exe( |
|
47 | 47 | SOURCE_DIR, |
|
48 | 48 | build_dir, |
|
49 | 49 | pathlib.Path(python), |
|
50 | 50 | iscc, |
|
51 | 51 | version=version, |
|
52 | 52 | ) |
|
53 | 53 | |
|
54 | 54 | |
|
55 | 55 | def build_wix( |
|
56 | 56 | name=None, |
|
57 | 57 | pyoxidizer_target=None, |
|
58 | 58 | python=None, |
|
59 | 59 | version=None, |
|
60 | 60 | sign_sn=None, |
|
61 | 61 | sign_cert=None, |
|
62 | 62 | sign_password=None, |
|
63 | 63 | sign_timestamp_url=None, |
|
64 | 64 | extra_packages_script=None, |
|
65 | 65 | extra_wxs=None, |
|
66 | 66 | extra_features=None, |
|
67 | extra_pyoxidizer_vars=None, | |
|
67 | 68 | ): |
|
68 | 69 | if not pyoxidizer_target and not python: |
|
69 | 70 | raise Exception("--python required unless building with PyOxidizer") |
|
70 | 71 | |
|
71 | 72 | if python and not os.path.isabs(python): |
|
72 | 73 | raise Exception("--python arg must be an absolute path") |
|
73 | 74 | |
|
74 | 75 | kwargs = { |
|
75 | 76 | "source_dir": SOURCE_DIR, |
|
76 | 77 | "version": version, |
|
77 | 78 | } |
|
78 | 79 | |
|
79 | 80 | if pyoxidizer_target: |
|
80 | 81 | fn = wix.build_installer_pyoxidizer |
|
81 | 82 | kwargs["target_triple"] = pyoxidizer_target |
|
82 | 83 | else: |
|
83 | 84 | fn = wix.build_installer_py2exe |
|
84 | 85 | kwargs["python_exe"] = pathlib.Path(python) |
|
85 | 86 | |
|
86 | 87 | if extra_packages_script: |
|
87 | 88 | if pyoxidizer_target: |
|
88 | 89 | raise Exception( |
|
89 | 90 | "pyoxidizer does not support --extra-packages-script" |
|
90 | 91 | ) |
|
91 | 92 | kwargs["extra_packages_script"] = extra_packages_script |
|
92 | 93 | if extra_wxs: |
|
93 | 94 | kwargs["extra_wxs"] = dict( |
|
94 | 95 | thing.split("=") for thing in extra_wxs.split(",") |
|
95 | 96 | ) |
|
96 | 97 | if extra_features: |
|
97 | 98 | kwargs["extra_features"] = extra_features.split(",") |
|
98 | 99 | |
|
99 | 100 | if sign_sn or sign_cert: |
|
100 | 101 | kwargs["signing_info"] = { |
|
101 | 102 | "name": name, |
|
102 | 103 | "subject_name": sign_sn, |
|
103 | 104 | "cert_path": sign_cert, |
|
104 | 105 | "cert_password": sign_password, |
|
105 | 106 | "timestamp_url": sign_timestamp_url, |
|
106 | 107 | } |
|
107 | 108 | |
|
108 | fn(**kwargs) | |
|
109 | fn(**kwargs, extra_pyoxidizer_vars=extra_pyoxidizer_vars) | |
|
109 | 110 | |
|
110 | 111 | |
|
111 | 112 | def get_parser(): |
|
112 | 113 | parser = argparse.ArgumentParser() |
|
113 | 114 | |
|
114 | 115 | subparsers = parser.add_subparsers() |
|
115 | 116 | |
|
116 | 117 | sp = subparsers.add_parser("inno", help="Build Inno Setup installer") |
|
117 | 118 | sp.add_argument( |
|
118 | 119 | "--pyoxidizer-target", |
|
119 | 120 | choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"}, |
|
120 | 121 | help="Build with PyOxidizer targeting this host triple", |
|
121 | 122 | ) |
|
122 | 123 | sp.add_argument("--python", help="path to python.exe to use") |
|
123 | 124 | sp.add_argument("--iscc", help="path to iscc.exe to use") |
|
124 | 125 | sp.add_argument( |
|
125 | 126 | "--version", |
|
126 | 127 | help="Mercurial version string to use " |
|
127 | 128 | "(detected from __version__.py if not defined", |
|
128 | 129 | ) |
|
129 | 130 | sp.set_defaults(func=build_inno) |
|
130 | 131 | |
|
131 | 132 | sp = subparsers.add_parser( |
|
132 | 133 | "wix", help="Build Windows installer with WiX Toolset" |
|
133 | 134 | ) |
|
134 | 135 | sp.add_argument("--name", help="Application name", default="Mercurial") |
|
135 | 136 | sp.add_argument( |
|
136 | 137 | "--pyoxidizer-target", |
|
137 | 138 | choices={"i686-pc-windows-msvc", "x86_64-pc-windows-msvc"}, |
|
138 | 139 | help="Build with PyOxidizer targeting this host triple", |
|
139 | 140 | ) |
|
140 | 141 | sp.add_argument("--python", help="Path to Python executable to use") |
|
141 | 142 | sp.add_argument( |
|
142 | 143 | "--sign-sn", |
|
143 | 144 | help="Subject name (or fragment thereof) of certificate " |
|
144 | 145 | "to use for signing", |
|
145 | 146 | ) |
|
146 | 147 | sp.add_argument( |
|
147 | 148 | "--sign-cert", help="Path to certificate to use for signing" |
|
148 | 149 | ) |
|
149 | 150 | sp.add_argument("--sign-password", help="Password for signing certificate") |
|
150 | 151 | sp.add_argument( |
|
151 | 152 | "--sign-timestamp-url", |
|
152 | 153 | help="URL of timestamp server to use for signing", |
|
153 | 154 | ) |
|
154 | 155 | sp.add_argument("--version", help="Version string to use") |
|
155 | 156 | sp.add_argument( |
|
156 | 157 | "--extra-packages-script", |
|
157 | 158 | help=( |
|
158 | 159 | "Script to execute to include extra packages in " "py2exe binary." |
|
159 | 160 | ), |
|
160 | 161 | ) |
|
161 | 162 | sp.add_argument( |
|
162 | 163 | "--extra-wxs", help="CSV of path_to_wxs_file=working_dir_for_wxs_file" |
|
163 | 164 | ) |
|
164 | 165 | sp.add_argument( |
|
165 | 166 | "--extra-features", |
|
166 | 167 | help=( |
|
167 | 168 | "CSV of extra feature names to include " |
|
168 | 169 | "in the installer from the extra wxs files" |
|
169 | 170 | ), |
|
170 | 171 | ) |
|
172 | ||
|
173 | sp.add_argument( | |
|
174 | "--extra-pyoxidizer-vars", | |
|
175 | help="json map of extra variables to pass to pyoxidizer", | |
|
176 | ) | |
|
177 | ||
|
171 | 178 | sp.set_defaults(func=build_wix) |
|
172 | 179 | |
|
173 | 180 | return parser |
|
174 | 181 | |
|
175 | 182 | |
|
176 | 183 | def main(): |
|
177 | 184 | parser = get_parser() |
|
178 | 185 | args = parser.parse_args() |
|
179 | 186 | |
|
180 | 187 | if not hasattr(args, "func"): |
|
181 | 188 | parser.print_help() |
|
182 | 189 | return |
|
183 | 190 | |
|
184 | 191 | kwargs = dict(vars(args)) |
|
185 | 192 | del kwargs["func"] |
|
186 | 193 | |
|
187 | 194 | args.func(**kwargs) |
@@ -1,242 +1,244 b'' | |||
|
1 | 1 | # inno.py - Inno Setup functionality. |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | # no-check-code because Python 3 native. |
|
9 | 9 | |
|
10 | 10 | import os |
|
11 | 11 | import pathlib |
|
12 | 12 | import shutil |
|
13 | 13 | import subprocess |
|
14 | 14 | |
|
15 | 15 | import jinja2 |
|
16 | 16 | |
|
17 | 17 | from .py2exe import ( |
|
18 | 18 | build_py2exe, |
|
19 | 19 | stage_install, |
|
20 | 20 | ) |
|
21 |
from .pyoxidizer import |
|
|
21 | from .pyoxidizer import create_pyoxidizer_install_layout | |
|
22 | 22 | from .util import ( |
|
23 | 23 | find_legacy_vc_runtime_files, |
|
24 | 24 | normalize_windows_version, |
|
25 | 25 | process_install_rules, |
|
26 | 26 | read_version_py, |
|
27 | 27 | ) |
|
28 | 28 | |
|
29 | 29 | EXTRA_PACKAGES = { |
|
30 | 30 | 'dulwich', |
|
31 | 31 | 'keyring', |
|
32 | 32 | 'pygments', |
|
33 | 33 | 'win32ctypes', |
|
34 | 34 | } |
|
35 | 35 | |
|
36 | 36 | EXTRA_INCLUDES = { |
|
37 | 37 | '_curses', |
|
38 | 38 | '_curses_panel', |
|
39 | 39 | } |
|
40 | 40 | |
|
41 | 41 | EXTRA_INSTALL_RULES = [ |
|
42 | 42 | ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'), |
|
43 | 43 | ] |
|
44 | 44 | |
|
45 | 45 | PACKAGE_FILES_METADATA = { |
|
46 | 46 | 'ReadMe.html': 'Flags: isreadme', |
|
47 | 47 | } |
|
48 | 48 | |
|
49 | 49 | |
|
50 | 50 | def build_with_py2exe( |
|
51 | 51 | source_dir: pathlib.Path, |
|
52 | 52 | build_dir: pathlib.Path, |
|
53 | 53 | python_exe: pathlib.Path, |
|
54 | 54 | iscc_exe: pathlib.Path, |
|
55 | 55 | version=None, |
|
56 | 56 | ): |
|
57 | 57 | """Build the Inno installer using py2exe. |
|
58 | 58 | |
|
59 | 59 | Build files will be placed in ``build_dir``. |
|
60 | 60 | |
|
61 | 61 | py2exe's setup.py doesn't use setuptools. It doesn't have modern logic |
|
62 | 62 | for finding the Python 2.7 toolchain. So, we require the environment |
|
63 | 63 | to already be configured with an active toolchain. |
|
64 | 64 | """ |
|
65 | 65 | if not iscc_exe.exists(): |
|
66 | 66 | raise Exception('%s does not exist' % iscc_exe) |
|
67 | 67 | |
|
68 | 68 | vc_x64 = r'\x64' in os.environ.get('LIB', '') |
|
69 | 69 | arch = 'x64' if vc_x64 else 'x86' |
|
70 | 70 | inno_build_dir = build_dir / ('inno-py2exe-%s' % arch) |
|
71 | 71 | staging_dir = inno_build_dir / 'stage' |
|
72 | 72 | |
|
73 | 73 | requirements_txt = ( |
|
74 | 74 | source_dir / 'contrib' / 'packaging' / 'requirements-windows-py2.txt' |
|
75 | 75 | ) |
|
76 | 76 | |
|
77 | 77 | inno_build_dir.mkdir(parents=True, exist_ok=True) |
|
78 | 78 | |
|
79 | 79 | build_py2exe( |
|
80 | 80 | source_dir, |
|
81 | 81 | build_dir, |
|
82 | 82 | python_exe, |
|
83 | 83 | 'inno', |
|
84 | 84 | requirements_txt, |
|
85 | 85 | extra_packages=EXTRA_PACKAGES, |
|
86 | 86 | extra_includes=EXTRA_INCLUDES, |
|
87 | 87 | ) |
|
88 | 88 | |
|
89 | 89 | # Purge the staging directory for every build so packaging is |
|
90 | 90 | # pristine. |
|
91 | 91 | if staging_dir.exists(): |
|
92 | 92 | print('purging %s' % staging_dir) |
|
93 | 93 | shutil.rmtree(staging_dir) |
|
94 | 94 | |
|
95 | 95 | # Now assemble all the packaged files into the staging directory. |
|
96 | 96 | stage_install(source_dir, staging_dir) |
|
97 | 97 | |
|
98 | 98 | # We also install some extra files. |
|
99 | 99 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) |
|
100 | 100 | |
|
101 | 101 | # hg.exe depends on VC9 runtime DLLs. Copy those into place. |
|
102 | 102 | for f in find_legacy_vc_runtime_files(vc_x64): |
|
103 | 103 | if f.name.endswith('.manifest'): |
|
104 | 104 | basename = 'Microsoft.VC90.CRT.manifest' |
|
105 | 105 | else: |
|
106 | 106 | basename = f.name |
|
107 | 107 | |
|
108 | 108 | dest_path = staging_dir / basename |
|
109 | 109 | |
|
110 | 110 | print('copying %s to %s' % (f, dest_path)) |
|
111 | 111 | shutil.copyfile(f, dest_path) |
|
112 | 112 | |
|
113 | 113 | build_installer( |
|
114 | 114 | source_dir, |
|
115 | 115 | inno_build_dir, |
|
116 | 116 | staging_dir, |
|
117 | 117 | iscc_exe, |
|
118 | 118 | version, |
|
119 | 119 | arch="x64" if vc_x64 else None, |
|
120 | 120 | suffix="-python2", |
|
121 | 121 | ) |
|
122 | 122 | |
|
123 | 123 | |
|
124 | 124 | def build_with_pyoxidizer( |
|
125 | 125 | source_dir: pathlib.Path, |
|
126 | 126 | build_dir: pathlib.Path, |
|
127 | 127 | target_triple: str, |
|
128 | 128 | iscc_exe: pathlib.Path, |
|
129 | 129 | version=None, |
|
130 | 130 | ): |
|
131 | 131 | """Build the Inno installer using PyOxidizer.""" |
|
132 | 132 | if not iscc_exe.exists(): |
|
133 | 133 | raise Exception("%s does not exist" % iscc_exe) |
|
134 | 134 | |
|
135 | 135 | inno_build_dir = build_dir / ("inno-pyoxidizer-%s" % target_triple) |
|
136 | 136 | staging_dir = inno_build_dir / "stage" |
|
137 | 137 | |
|
138 | 138 | inno_build_dir.mkdir(parents=True, exist_ok=True) |
|
139 | run_pyoxidizer(source_dir, inno_build_dir, staging_dir, target_triple) | |
|
139 | create_pyoxidizer_install_layout( | |
|
140 | source_dir, inno_build_dir, staging_dir, target_triple | |
|
141 | ) | |
|
140 | 142 | |
|
141 | 143 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) |
|
142 | 144 | |
|
143 | 145 | build_installer( |
|
144 | 146 | source_dir, |
|
145 | 147 | inno_build_dir, |
|
146 | 148 | staging_dir, |
|
147 | 149 | iscc_exe, |
|
148 | 150 | version, |
|
149 | 151 | arch="x64" if "x86_64" in target_triple else None, |
|
150 | 152 | ) |
|
151 | 153 | |
|
152 | 154 | |
|
153 | 155 | def build_installer( |
|
154 | 156 | source_dir: pathlib.Path, |
|
155 | 157 | inno_build_dir: pathlib.Path, |
|
156 | 158 | staging_dir: pathlib.Path, |
|
157 | 159 | iscc_exe: pathlib.Path, |
|
158 | 160 | version, |
|
159 | 161 | arch=None, |
|
160 | 162 | suffix="", |
|
161 | 163 | ): |
|
162 | 164 | """Build an Inno installer from staged Mercurial files. |
|
163 | 165 | |
|
164 | 166 | This function is agnostic about how to build Mercurial. It just |
|
165 | 167 | cares that Mercurial files are in ``staging_dir``. |
|
166 | 168 | """ |
|
167 | 169 | inno_source_dir = source_dir / "contrib" / "packaging" / "inno" |
|
168 | 170 | |
|
169 | 171 | # The final package layout is simply a mirror of the staging directory. |
|
170 | 172 | package_files = [] |
|
171 | 173 | for root, dirs, files in os.walk(staging_dir): |
|
172 | 174 | dirs.sort() |
|
173 | 175 | |
|
174 | 176 | root = pathlib.Path(root) |
|
175 | 177 | |
|
176 | 178 | for f in sorted(files): |
|
177 | 179 | full = root / f |
|
178 | 180 | rel = full.relative_to(staging_dir) |
|
179 | 181 | if str(rel.parent) == '.': |
|
180 | 182 | dest_dir = '{app}' |
|
181 | 183 | else: |
|
182 | 184 | dest_dir = '{app}\\%s' % rel.parent |
|
183 | 185 | |
|
184 | 186 | package_files.append( |
|
185 | 187 | { |
|
186 | 188 | 'source': rel, |
|
187 | 189 | 'dest_dir': dest_dir, |
|
188 | 190 | 'metadata': PACKAGE_FILES_METADATA.get(str(rel), None), |
|
189 | 191 | } |
|
190 | 192 | ) |
|
191 | 193 | |
|
192 | 194 | print('creating installer') |
|
193 | 195 | |
|
194 | 196 | # Install Inno files by rendering a template. |
|
195 | 197 | jinja_env = jinja2.Environment( |
|
196 | 198 | loader=jinja2.FileSystemLoader(str(inno_source_dir)), |
|
197 | 199 | # Need to change these to prevent conflict with Inno Setup. |
|
198 | 200 | comment_start_string='{##', |
|
199 | 201 | comment_end_string='##}', |
|
200 | 202 | ) |
|
201 | 203 | |
|
202 | 204 | try: |
|
203 | 205 | template = jinja_env.get_template('mercurial.iss') |
|
204 | 206 | except jinja2.TemplateSyntaxError as e: |
|
205 | 207 | raise Exception( |
|
206 | 208 | 'template syntax error at %s:%d: %s' |
|
207 | 209 | % ( |
|
208 | 210 | e.name, |
|
209 | 211 | e.lineno, |
|
210 | 212 | e.message, |
|
211 | 213 | ) |
|
212 | 214 | ) |
|
213 | 215 | |
|
214 | 216 | content = template.render(package_files=package_files) |
|
215 | 217 | |
|
216 | 218 | with (inno_build_dir / 'mercurial.iss').open('w', encoding='utf-8') as fh: |
|
217 | 219 | fh.write(content) |
|
218 | 220 | |
|
219 | 221 | # Copy additional files used by Inno. |
|
220 | 222 | for p in ('mercurial.ico', 'postinstall.txt'): |
|
221 | 223 | shutil.copyfile( |
|
222 | 224 | source_dir / 'contrib' / 'win32' / p, inno_build_dir / p |
|
223 | 225 | ) |
|
224 | 226 | |
|
225 | 227 | args = [str(iscc_exe)] |
|
226 | 228 | |
|
227 | 229 | if arch: |
|
228 | 230 | args.append('/dARCH=%s' % arch) |
|
229 | 231 | args.append('/dSUFFIX=-%s%s' % (arch, suffix)) |
|
230 | 232 | else: |
|
231 | 233 | args.append('/dSUFFIX=-x86%s' % suffix) |
|
232 | 234 | |
|
233 | 235 | if not version: |
|
234 | 236 | version = read_version_py(source_dir) |
|
235 | 237 | |
|
236 | 238 | args.append('/dVERSION=%s' % version) |
|
237 | 239 | args.append('/dQUAD_VERSION=%s' % normalize_windows_version(version)) |
|
238 | 240 | |
|
239 | 241 | args.append('/Odist') |
|
240 | 242 | args.append(str(inno_build_dir / 'mercurial.iss')) |
|
241 | 243 | |
|
242 | 244 | subprocess.run(args, cwd=str(source_dir), check=True) |
@@ -1,144 +1,180 b'' | |||
|
1 | 1 | # pyoxidizer.py - Packaging support for PyOxidizer |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2020 Gregory Szorc <gregory.szorc@gmail.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | # no-check-code because Python 3 native. |
|
9 | 9 | |
|
10 | 10 | import os |
|
11 | 11 | import pathlib |
|
12 | 12 | import shutil |
|
13 | 13 | import subprocess |
|
14 | 14 | import sys |
|
15 | import typing | |
|
15 | 16 | |
|
16 | 17 | from .downloads import download_entry |
|
17 | 18 | from .util import ( |
|
18 | 19 | extract_zip_to_directory, |
|
19 | 20 | process_install_rules, |
|
20 | 21 | find_vc_runtime_dll, |
|
21 | 22 | ) |
|
22 | 23 | |
|
23 | 24 | |
|
24 | 25 | STAGING_RULES_WINDOWS = [ |
|
25 | 26 | ('contrib/bash_completion', 'contrib/'), |
|
26 | 27 | ('contrib/hgk', 'contrib/hgk.tcl'), |
|
27 | 28 | ('contrib/hgweb.fcgi', 'contrib/'), |
|
28 | 29 | ('contrib/hgweb.wsgi', 'contrib/'), |
|
29 | 30 | ('contrib/logo-droplets.svg', 'contrib/'), |
|
30 | 31 | ('contrib/mercurial.el', 'contrib/'), |
|
31 | 32 | ('contrib/mq.el', 'contrib/'), |
|
32 | 33 | ('contrib/tcsh_completion', 'contrib/'), |
|
33 | 34 | ('contrib/tcsh_completion_build.sh', 'contrib/'), |
|
34 | 35 | ('contrib/vim/*', 'contrib/vim/'), |
|
35 | 36 | ('contrib/win32/postinstall.txt', 'ReleaseNotes.txt'), |
|
36 | 37 | ('contrib/win32/ReadMe.html', 'ReadMe.html'), |
|
37 | 38 | ('contrib/xml.rnc', 'contrib/'), |
|
38 | 39 | ('contrib/zsh_completion', 'contrib/'), |
|
39 | 40 | ('doc/*.html', 'doc/'), |
|
40 | 41 | ('doc/style.css', 'doc/'), |
|
41 | 42 | ('COPYING', 'Copying.txt'), |
|
42 | 43 | ] |
|
43 | 44 | |
|
44 | 45 | STAGING_RULES_APP = [ |
|
45 | 46 | ('lib/mercurial/helptext/**/*.txt', 'helptext/'), |
|
46 | 47 | ('lib/mercurial/defaultrc/*.rc', 'defaultrc/'), |
|
47 | 48 | ('lib/mercurial/locale/**/*', 'locale/'), |
|
48 | 49 | ('lib/mercurial/templates/**/*', 'templates/'), |
|
49 | 50 | ] |
|
50 | 51 | |
|
51 | 52 | STAGING_EXCLUDES_WINDOWS = [ |
|
52 | 53 | "doc/hg-ssh.8.html", |
|
53 | 54 | ] |
|
54 | 55 | |
|
55 | 56 | |
|
57 | def build_docs_html(source_dir: pathlib.Path): | |
|
58 | """Ensures HTML documentation is built. | |
|
59 | ||
|
60 | This will fail if docutils isn't available. | |
|
61 | ||
|
62 | (The HTML docs aren't built as part of `pip install` so we need to build them | |
|
63 | out of band.) | |
|
64 | """ | |
|
65 | subprocess.run( | |
|
66 | [sys.executable, str(source_dir / "setup.py"), "build_doc", "--html"], | |
|
67 | cwd=str(source_dir), | |
|
68 | check=True, | |
|
69 | ) | |
|
70 | ||
|
71 | ||
|
56 | 72 | def run_pyoxidizer( |
|
57 | 73 | source_dir: pathlib.Path, |
|
58 | 74 | build_dir: pathlib.Path, |
|
59 | out_dir: pathlib.Path, | |
|
60 | 75 | target_triple: str, |
|
61 | ): | |
|
62 | """Build Mercurial with PyOxidizer and copy additional files into place. | |
|
76 | build_vars: typing.Optional[typing.Dict[str, str]] = None, | |
|
77 | target: typing.Optional[str] = None, | |
|
78 | ) -> pathlib.Path: | |
|
79 | """Run `pyoxidizer` in an environment with access to build dependencies. | |
|
63 | 80 | |
|
64 | After successful completion, ``out_dir`` contains files constituting a | |
|
65 | Mercurial install. | |
|
81 | Returns the output directory that pyoxidizer would have used for build | |
|
82 | artifacts. Actual build artifacts are likely in a sub-directory with the | |
|
83 | name of the pyoxidizer build target that was built. | |
|
66 | 84 | """ |
|
85 | build_vars = build_vars or {} | |
|
86 | ||
|
67 | 87 | # We need to make gettext binaries available for compiling i18n files. |
|
68 | 88 | gettext_pkg, gettext_entry = download_entry('gettext', build_dir) |
|
69 | 89 | gettext_dep_pkg = download_entry('gettext-dep', build_dir)[0] |
|
70 | 90 | |
|
71 | 91 | gettext_root = build_dir / ('gettext-win-%s' % gettext_entry['version']) |
|
72 | 92 | |
|
73 | 93 | if not gettext_root.exists(): |
|
74 | 94 | extract_zip_to_directory(gettext_pkg, gettext_root) |
|
75 | 95 | extract_zip_to_directory(gettext_dep_pkg, gettext_root) |
|
76 | 96 | |
|
77 | 97 | env = dict(os.environ) |
|
78 | 98 | env["PATH"] = "%s%s%s" % ( |
|
79 | 99 | env["PATH"], |
|
80 | 100 | os.pathsep, |
|
81 | 101 | str(gettext_root / "bin"), |
|
82 | 102 | ) |
|
83 | 103 | |
|
84 | 104 | args = [ |
|
85 | 105 | "pyoxidizer", |
|
86 | 106 | "build", |
|
87 | 107 | "--path", |
|
88 | 108 | str(source_dir / "rust" / "hgcli"), |
|
89 | 109 | "--release", |
|
90 | 110 | "--target-triple", |
|
91 | 111 | target_triple, |
|
92 | 112 | ] |
|
93 | 113 | |
|
114 | for k, v in sorted(build_vars.items()): | |
|
115 | args.extend(["--var", k, v]) | |
|
116 | ||
|
117 | if target: | |
|
118 | args.append(target) | |
|
119 | ||
|
94 | 120 | subprocess.run(args, env=env, check=True) |
|
95 | 121 | |
|
122 | return source_dir / "build" / "pyoxidizer" / target_triple / "release" | |
|
123 | ||
|
124 | ||
|
125 | def create_pyoxidizer_install_layout( | |
|
126 | source_dir: pathlib.Path, | |
|
127 | build_dir: pathlib.Path, | |
|
128 | out_dir: pathlib.Path, | |
|
129 | target_triple: str, | |
|
130 | ): | |
|
131 | """Build Mercurial with PyOxidizer and copy additional files into place. | |
|
132 | ||
|
133 | After successful completion, ``out_dir`` contains files constituting a | |
|
134 | Mercurial install. | |
|
135 | """ | |
|
136 | ||
|
137 | run_pyoxidizer(source_dir, build_dir, target_triple) | |
|
138 | ||
|
96 | 139 | if "windows" in target_triple: |
|
97 | 140 | target = "app_windows" |
|
98 | 141 | else: |
|
99 | 142 | target = "app_posix" |
|
100 | 143 | |
|
101 | 144 | build_dir = ( |
|
102 | 145 | source_dir / "build" / "pyoxidizer" / target_triple / "release" / target |
|
103 | 146 | ) |
|
104 | 147 | |
|
105 | 148 | if out_dir.exists(): |
|
106 | 149 | print("purging %s" % out_dir) |
|
107 | 150 | shutil.rmtree(out_dir) |
|
108 | 151 | |
|
109 | 152 | # Now assemble all the files from PyOxidizer into the staging directory. |
|
110 | 153 | shutil.copytree(build_dir, out_dir) |
|
111 | 154 | |
|
112 | 155 | # Move some of those files around. We can get rid of this once Mercurial |
|
113 | 156 | # is taught to use the importlib APIs for reading resources. |
|
114 | 157 | process_install_rules(STAGING_RULES_APP, build_dir, out_dir) |
|
115 | 158 | |
|
116 | # We also need to run setup.py build_doc to produce html files, | |
|
117 | # as they aren't built as part of ``pip install``. | |
|
118 | # This will fail if docutils isn't installed. | |
|
119 | subprocess.run( | |
|
120 | [sys.executable, str(source_dir / "setup.py"), "build_doc", "--html"], | |
|
121 | cwd=str(source_dir), | |
|
122 | check=True, | |
|
123 | ) | |
|
159 | build_docs_html(source_dir) | |
|
124 | 160 | |
|
125 | 161 | if "windows" in target_triple: |
|
126 | 162 | process_install_rules(STAGING_RULES_WINDOWS, source_dir, out_dir) |
|
127 | 163 | |
|
128 | 164 | # Write out a default editor.rc file to configure notepad as the |
|
129 | 165 | # default editor. |
|
130 | 166 | os.makedirs(out_dir / "defaultrc", exist_ok=True) |
|
131 | 167 | with (out_dir / "defaultrc" / "editor.rc").open( |
|
132 | 168 | "w", encoding="utf-8" |
|
133 | 169 | ) as fh: |
|
134 | 170 | fh.write("[ui]\neditor = notepad\n") |
|
135 | 171 | |
|
136 | 172 | for f in STAGING_EXCLUDES_WINDOWS: |
|
137 | 173 | p = out_dir / f |
|
138 | 174 | if p.exists(): |
|
139 | 175 | print("removing %s" % p) |
|
140 | 176 | p.unlink() |
|
141 | 177 | |
|
142 | 178 | # Add vcruntimeXXX.dll next to executable. |
|
143 | 179 | vc_runtime_dll = find_vc_runtime_dll(x64="x86_64" in target_triple) |
|
144 | 180 | shutil.copy(vc_runtime_dll, out_dir / vc_runtime_dll.name) |
@@ -1,547 +1,586 b'' | |||
|
1 | 1 | # wix.py - WiX installer functionality |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2019 Gregory Szorc <gregory.szorc@gmail.com> |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | # no-check-code because Python 3 native. |
|
9 | 9 | |
|
10 | 10 | import collections |
|
11 | import json | |
|
11 | 12 | import os |
|
12 | 13 | import pathlib |
|
13 | 14 | import re |
|
14 | 15 | import shutil |
|
15 | 16 | import subprocess |
|
16 | 17 | import typing |
|
17 | 18 | import uuid |
|
18 | 19 | import xml.dom.minidom |
|
19 | 20 | |
|
20 | 21 | from .downloads import download_entry |
|
21 | 22 | from .py2exe import ( |
|
22 | 23 | build_py2exe, |
|
23 | 24 | stage_install, |
|
24 | 25 | ) |
|
25 |
from .pyoxidizer import |
|
|
26 | from .pyoxidizer import ( | |
|
27 | build_docs_html, | |
|
28 | create_pyoxidizer_install_layout, | |
|
29 | run_pyoxidizer, | |
|
30 | ) | |
|
26 | 31 | from .util import ( |
|
27 | 32 | extract_zip_to_directory, |
|
28 | 33 | normalize_windows_version, |
|
29 | 34 | process_install_rules, |
|
30 | 35 | sign_with_signtool, |
|
31 | 36 | ) |
|
32 | 37 | |
|
33 | 38 | |
|
34 | 39 | EXTRA_PACKAGES = { |
|
35 | 40 | 'dulwich', |
|
36 | 41 | 'distutils', |
|
37 | 42 | 'keyring', |
|
38 | 43 | 'pygments', |
|
39 | 44 | 'win32ctypes', |
|
40 | 45 | } |
|
41 | 46 | |
|
42 | 47 | EXTRA_INCLUDES = { |
|
43 | 48 | '_curses', |
|
44 | 49 | '_curses_panel', |
|
45 | 50 | } |
|
46 | 51 | |
|
47 | 52 | EXTRA_INSTALL_RULES = [ |
|
48 | 53 | ('contrib/packaging/wix/COPYING.rtf', 'COPYING.rtf'), |
|
49 | 54 | ('contrib/win32/mercurial.ini', 'defaultrc/mercurial.rc'), |
|
50 | 55 | ] |
|
51 | 56 | |
|
52 | 57 | STAGING_REMOVE_FILES = [ |
|
53 | 58 | # We use the RTF variant. |
|
54 | 59 | 'copying.txt', |
|
55 | 60 | ] |
|
56 | 61 | |
|
57 | 62 | SHORTCUTS = { |
|
58 | 63 | # hg.1.html' |
|
59 | 64 | 'hg.file.5d3e441c_28d9_5542_afd0_cdd4234f12d5': { |
|
60 | 65 | 'Name': 'Mercurial Command Reference', |
|
61 | 66 | }, |
|
62 | 67 | # hgignore.5.html |
|
63 | 68 | 'hg.file.5757d8e0_f207_5e10_a2ec_3ba0a062f431': { |
|
64 | 69 | 'Name': 'Mercurial Ignore Files', |
|
65 | 70 | }, |
|
66 | 71 | # hgrc.5.html |
|
67 | 72 | 'hg.file.92e605fd_1d1a_5dc6_9fc0_5d2998eb8f5e': { |
|
68 | 73 | 'Name': 'Mercurial Configuration Files', |
|
69 | 74 | }, |
|
70 | 75 | } |
|
71 | 76 | |
|
72 | 77 | |
|
73 | 78 | def find_version(source_dir: pathlib.Path): |
|
74 | 79 | version_py = source_dir / 'mercurial' / '__version__.py' |
|
75 | 80 | |
|
76 | 81 | with version_py.open('r', encoding='utf-8') as fh: |
|
77 | 82 | source = fh.read().strip() |
|
78 | 83 | |
|
79 | 84 | m = re.search('version = b"(.*)"', source) |
|
80 | 85 | return m.group(1) |
|
81 | 86 | |
|
82 | 87 | |
|
83 | 88 | def ensure_vc90_merge_modules(build_dir): |
|
84 | 89 | x86 = ( |
|
85 | 90 | download_entry( |
|
86 | 91 | 'vc9-crt-x86-msm', |
|
87 | 92 | build_dir, |
|
88 | 93 | local_name='microsoft.vcxx.crt.x86_msm.msm', |
|
89 | 94 | )[0], |
|
90 | 95 | download_entry( |
|
91 | 96 | 'vc9-crt-x86-msm-policy', |
|
92 | 97 | build_dir, |
|
93 | 98 | local_name='policy.x.xx.microsoft.vcxx.crt.x86_msm.msm', |
|
94 | 99 | )[0], |
|
95 | 100 | ) |
|
96 | 101 | |
|
97 | 102 | x64 = ( |
|
98 | 103 | download_entry( |
|
99 | 104 | 'vc9-crt-x64-msm', |
|
100 | 105 | build_dir, |
|
101 | 106 | local_name='microsoft.vcxx.crt.x64_msm.msm', |
|
102 | 107 | )[0], |
|
103 | 108 | download_entry( |
|
104 | 109 | 'vc9-crt-x64-msm-policy', |
|
105 | 110 | build_dir, |
|
106 | 111 | local_name='policy.x.xx.microsoft.vcxx.crt.x64_msm.msm', |
|
107 | 112 | )[0], |
|
108 | 113 | ) |
|
109 | 114 | return { |
|
110 | 115 | 'x86': x86, |
|
111 | 116 | 'x64': x64, |
|
112 | 117 | } |
|
113 | 118 | |
|
114 | 119 | |
|
115 | 120 | def run_candle(wix, cwd, wxs, source_dir, defines=None): |
|
116 | 121 | args = [ |
|
117 | 122 | str(wix / 'candle.exe'), |
|
118 | 123 | '-nologo', |
|
119 | 124 | str(wxs), |
|
120 | 125 | '-dSourceDir=%s' % source_dir, |
|
121 | 126 | ] |
|
122 | 127 | |
|
123 | 128 | if defines: |
|
124 | 129 | args.extend('-d%s=%s' % define for define in sorted(defines.items())) |
|
125 | 130 | |
|
126 | 131 | subprocess.run(args, cwd=str(cwd), check=True) |
|
127 | 132 | |
|
128 | 133 | |
|
129 | 134 | def make_files_xml(staging_dir: pathlib.Path, is_x64) -> str: |
|
130 | 135 | """Create XML string listing every file to be installed.""" |
|
131 | 136 | |
|
132 | 137 | # We derive GUIDs from a deterministic file path identifier. |
|
133 | 138 | # We shoehorn the name into something that looks like a URL because |
|
134 | 139 | # the UUID namespaces are supposed to work that way (even though |
|
135 | 140 | # the input data probably is never validated). |
|
136 | 141 | |
|
137 | 142 | doc = xml.dom.minidom.parseString( |
|
138 | 143 | '<?xml version="1.0" encoding="utf-8"?>' |
|
139 | 144 | '<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">' |
|
140 | 145 | '</Wix>' |
|
141 | 146 | ) |
|
142 | 147 | |
|
143 | 148 | # Assemble the install layout by directory. This makes it easier to |
|
144 | 149 | # emit XML, since each directory has separate entities. |
|
145 | 150 | manifest = collections.defaultdict(dict) |
|
146 | 151 | |
|
147 | 152 | for root, dirs, files in os.walk(staging_dir): |
|
148 | 153 | dirs.sort() |
|
149 | 154 | |
|
150 | 155 | root = pathlib.Path(root) |
|
151 | 156 | rel_dir = root.relative_to(staging_dir) |
|
152 | 157 | |
|
153 | 158 | for i in range(len(rel_dir.parts)): |
|
154 | 159 | parent = '/'.join(rel_dir.parts[0 : i + 1]) |
|
155 | 160 | manifest.setdefault(parent, {}) |
|
156 | 161 | |
|
157 | 162 | for f in sorted(files): |
|
158 | 163 | full = root / f |
|
159 | 164 | manifest[str(rel_dir).replace('\\', '/')][full.name] = full |
|
160 | 165 | |
|
161 | 166 | component_groups = collections.defaultdict(list) |
|
162 | 167 | |
|
163 | 168 | # Now emit a <Fragment> for each directory. |
|
164 | 169 | # Each directory is composed of a <DirectoryRef> pointing to its parent |
|
165 | 170 | # and defines child <Directory>'s and a <Component> with all the files. |
|
166 | 171 | for dir_name, entries in sorted(manifest.items()): |
|
167 | 172 | # The directory id is derived from the path. But the root directory |
|
168 | 173 | # is special. |
|
169 | 174 | if dir_name == '.': |
|
170 | 175 | parent_directory_id = 'INSTALLDIR' |
|
171 | 176 | else: |
|
172 | 177 | parent_directory_id = 'hg.dir.%s' % dir_name.replace( |
|
173 | 178 | '/', '.' |
|
174 | 179 | ).replace('-', '_') |
|
175 | 180 | |
|
176 | 181 | fragment = doc.createElement('Fragment') |
|
177 | 182 | directory_ref = doc.createElement('DirectoryRef') |
|
178 | 183 | directory_ref.setAttribute('Id', parent_directory_id) |
|
179 | 184 | |
|
180 | 185 | # Add <Directory> entries for immediate children directories. |
|
181 | 186 | for possible_child in sorted(manifest.keys()): |
|
182 | 187 | if ( |
|
183 | 188 | dir_name == '.' |
|
184 | 189 | and '/' not in possible_child |
|
185 | 190 | and possible_child != '.' |
|
186 | 191 | ): |
|
187 | 192 | child_directory_id = ('hg.dir.%s' % possible_child).replace( |
|
188 | 193 | '-', '_' |
|
189 | 194 | ) |
|
190 | 195 | name = possible_child |
|
191 | 196 | else: |
|
192 | 197 | if not possible_child.startswith('%s/' % dir_name): |
|
193 | 198 | continue |
|
194 | 199 | name = possible_child[len(dir_name) + 1 :] |
|
195 | 200 | if '/' in name: |
|
196 | 201 | continue |
|
197 | 202 | |
|
198 | 203 | child_directory_id = 'hg.dir.%s' % possible_child.replace( |
|
199 | 204 | '/', '.' |
|
200 | 205 | ).replace('-', '_') |
|
201 | 206 | |
|
202 | 207 | directory = doc.createElement('Directory') |
|
203 | 208 | directory.setAttribute('Id', child_directory_id) |
|
204 | 209 | directory.setAttribute('Name', name) |
|
205 | 210 | directory_ref.appendChild(directory) |
|
206 | 211 | |
|
207 | 212 | # Add <Component>s for files in this directory. |
|
208 | 213 | for rel, source_path in sorted(entries.items()): |
|
209 | 214 | if dir_name == '.': |
|
210 | 215 | full_rel = rel |
|
211 | 216 | else: |
|
212 | 217 | full_rel = '%s/%s' % (dir_name, rel) |
|
213 | 218 | |
|
214 | 219 | component_unique_id = ( |
|
215 | 220 | 'https://www.mercurial-scm.org/wix-installer/0/component/%s' |
|
216 | 221 | % full_rel |
|
217 | 222 | ) |
|
218 | 223 | component_guid = uuid.uuid5(uuid.NAMESPACE_URL, component_unique_id) |
|
219 | 224 | component_id = 'hg.component.%s' % str(component_guid).replace( |
|
220 | 225 | '-', '_' |
|
221 | 226 | ) |
|
222 | 227 | |
|
223 | 228 | component = doc.createElement('Component') |
|
224 | 229 | |
|
225 | 230 | component.setAttribute('Id', component_id) |
|
226 | 231 | component.setAttribute('Guid', str(component_guid).upper()) |
|
227 | 232 | component.setAttribute('Win64', 'yes' if is_x64 else 'no') |
|
228 | 233 | |
|
229 | 234 | # Assign this component to a top-level group. |
|
230 | 235 | if dir_name == '.': |
|
231 | 236 | component_groups['ROOT'].append(component_id) |
|
232 | 237 | elif '/' in dir_name: |
|
233 | 238 | component_groups[dir_name[0 : dir_name.index('/')]].append( |
|
234 | 239 | component_id |
|
235 | 240 | ) |
|
236 | 241 | else: |
|
237 | 242 | component_groups[dir_name].append(component_id) |
|
238 | 243 | |
|
239 | 244 | unique_id = ( |
|
240 | 245 | 'https://www.mercurial-scm.org/wix-installer/0/%s' % full_rel |
|
241 | 246 | ) |
|
242 | 247 | file_guid = uuid.uuid5(uuid.NAMESPACE_URL, unique_id) |
|
243 | 248 | |
|
244 | 249 | # IDs have length limits. So use GUID to derive them. |
|
245 | 250 | file_guid_normalized = str(file_guid).replace('-', '_') |
|
246 | 251 | file_id = 'hg.file.%s' % file_guid_normalized |
|
247 | 252 | |
|
248 | 253 | file_element = doc.createElement('File') |
|
249 | 254 | file_element.setAttribute('Id', file_id) |
|
250 | 255 | file_element.setAttribute('Source', str(source_path)) |
|
251 | 256 | file_element.setAttribute('KeyPath', 'yes') |
|
252 | 257 | file_element.setAttribute('ReadOnly', 'yes') |
|
253 | 258 | |
|
254 | 259 | component.appendChild(file_element) |
|
255 | 260 | directory_ref.appendChild(component) |
|
256 | 261 | |
|
257 | 262 | fragment.appendChild(directory_ref) |
|
258 | 263 | doc.documentElement.appendChild(fragment) |
|
259 | 264 | |
|
260 | 265 | for group, component_ids in sorted(component_groups.items()): |
|
261 | 266 | fragment = doc.createElement('Fragment') |
|
262 | 267 | component_group = doc.createElement('ComponentGroup') |
|
263 | 268 | component_group.setAttribute('Id', 'hg.group.%s' % group) |
|
264 | 269 | |
|
265 | 270 | for component_id in component_ids: |
|
266 | 271 | component_ref = doc.createElement('ComponentRef') |
|
267 | 272 | component_ref.setAttribute('Id', component_id) |
|
268 | 273 | component_group.appendChild(component_ref) |
|
269 | 274 | |
|
270 | 275 | fragment.appendChild(component_group) |
|
271 | 276 | doc.documentElement.appendChild(fragment) |
|
272 | 277 | |
|
273 | 278 | # Add <Shortcut> to files that have it defined. |
|
274 | 279 | for file_id, metadata in sorted(SHORTCUTS.items()): |
|
275 | 280 | els = doc.getElementsByTagName('File') |
|
276 | 281 | els = [el for el in els if el.getAttribute('Id') == file_id] |
|
277 | 282 | |
|
278 | 283 | if not els: |
|
279 | 284 | raise Exception('could not find File[Id=%s]' % file_id) |
|
280 | 285 | |
|
281 | 286 | for el in els: |
|
282 | 287 | shortcut = doc.createElement('Shortcut') |
|
283 | 288 | shortcut.setAttribute('Id', 'hg.shortcut.%s' % file_id) |
|
284 | 289 | shortcut.setAttribute('Directory', 'ProgramMenuDir') |
|
285 | 290 | shortcut.setAttribute('Icon', 'hgIcon.ico') |
|
286 | 291 | shortcut.setAttribute('IconIndex', '0') |
|
287 | 292 | shortcut.setAttribute('Advertise', 'yes') |
|
288 | 293 | for k, v in sorted(metadata.items()): |
|
289 | 294 | shortcut.setAttribute(k, v) |
|
290 | 295 | |
|
291 | 296 | el.appendChild(shortcut) |
|
292 | 297 | |
|
293 | 298 | return doc.toprettyxml() |
|
294 | 299 | |
|
295 | 300 | |
|
296 | 301 | def build_installer_py2exe( |
|
297 | 302 | source_dir: pathlib.Path, |
|
298 | 303 | python_exe: pathlib.Path, |
|
299 | 304 | msi_name='mercurial', |
|
300 | 305 | version=None, |
|
301 | 306 | extra_packages_script=None, |
|
302 | 307 | extra_wxs: typing.Optional[typing.Dict[str, str]] = None, |
|
303 | 308 | extra_features: typing.Optional[typing.List[str]] = None, |
|
304 | 309 | signing_info: typing.Optional[typing.Dict[str, str]] = None, |
|
305 | 310 | ): |
|
306 | 311 | """Build a WiX MSI installer using py2exe. |
|
307 | 312 | |
|
308 | 313 | ``source_dir`` is the path to the Mercurial source tree to use. |
|
309 | 314 | ``arch`` is the target architecture. either ``x86`` or ``x64``. |
|
310 | 315 | ``python_exe`` is the path to the Python executable to use/bundle. |
|
311 | 316 | ``version`` is the Mercurial version string. If not defined, |
|
312 | 317 | ``mercurial/__version__.py`` will be consulted. |
|
313 | 318 | ``extra_packages_script`` is a command to be run to inject extra packages |
|
314 | 319 | into the py2exe binary. It should stage packages into the virtualenv and |
|
315 | 320 | print a null byte followed by a newline-separated list of packages that |
|
316 | 321 | should be included in the exe. |
|
317 | 322 | ``extra_wxs`` is a dict of {wxs_name: working_dir_for_wxs_build}. |
|
318 | 323 | ``extra_features`` is a list of additional named Features to include in |
|
319 | 324 | the build. These must match Feature names in one of the wxs scripts. |
|
320 | 325 | """ |
|
321 | 326 | arch = 'x64' if r'\x64' in os.environ.get('LIB', '') else 'x86' |
|
322 | 327 | |
|
323 | 328 | hg_build_dir = source_dir / 'build' |
|
324 | 329 | |
|
325 | 330 | requirements_txt = ( |
|
326 | 331 | source_dir / 'contrib' / 'packaging' / 'requirements-windows-py2.txt' |
|
327 | 332 | ) |
|
328 | 333 | |
|
329 | 334 | build_py2exe( |
|
330 | 335 | source_dir, |
|
331 | 336 | hg_build_dir, |
|
332 | 337 | python_exe, |
|
333 | 338 | 'wix', |
|
334 | 339 | requirements_txt, |
|
335 | 340 | extra_packages=EXTRA_PACKAGES, |
|
336 | 341 | extra_packages_script=extra_packages_script, |
|
337 | 342 | extra_includes=EXTRA_INCLUDES, |
|
338 | 343 | ) |
|
339 | 344 | |
|
340 | 345 | build_dir = hg_build_dir / ('wix-%s' % arch) |
|
341 | 346 | staging_dir = build_dir / 'stage' |
|
342 | 347 | |
|
343 | 348 | build_dir.mkdir(exist_ok=True) |
|
344 | 349 | |
|
345 | 350 | # Purge the staging directory for every build so packaging is pristine. |
|
346 | 351 | if staging_dir.exists(): |
|
347 | 352 | print('purging %s' % staging_dir) |
|
348 | 353 | shutil.rmtree(staging_dir) |
|
349 | 354 | |
|
350 | 355 | stage_install(source_dir, staging_dir, lower_case=True) |
|
351 | 356 | |
|
352 | 357 | # We also install some extra files. |
|
353 | 358 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) |
|
354 | 359 | |
|
355 | 360 | # And remove some files we don't want. |
|
356 | 361 | for f in STAGING_REMOVE_FILES: |
|
357 | 362 | p = staging_dir / f |
|
358 | 363 | if p.exists(): |
|
359 | 364 | print('removing %s' % p) |
|
360 | 365 | p.unlink() |
|
361 | 366 | |
|
362 | 367 | return run_wix_packaging( |
|
363 | 368 | source_dir, |
|
364 | 369 | build_dir, |
|
365 | 370 | staging_dir, |
|
366 | 371 | arch, |
|
367 | 372 | version=version, |
|
368 | 373 | python2=True, |
|
369 | 374 | msi_name=msi_name, |
|
370 | 375 | suffix="-python2", |
|
371 | 376 | extra_wxs=extra_wxs, |
|
372 | 377 | extra_features=extra_features, |
|
373 | 378 | signing_info=signing_info, |
|
374 | 379 | ) |
|
375 | 380 | |
|
376 | 381 | |
|
377 | 382 | def build_installer_pyoxidizer( |
|
378 | 383 | source_dir: pathlib.Path, |
|
379 | 384 | target_triple: str, |
|
380 | 385 | msi_name='mercurial', |
|
381 | 386 | version=None, |
|
382 | 387 | extra_wxs: typing.Optional[typing.Dict[str, str]] = None, |
|
383 | 388 | extra_features: typing.Optional[typing.List[str]] = None, |
|
384 | 389 | signing_info: typing.Optional[typing.Dict[str, str]] = None, |
|
390 | extra_pyoxidizer_vars=None, | |
|
385 | 391 | ): |
|
386 | 392 | """Build a WiX MSI installer using PyOxidizer.""" |
|
387 | 393 | hg_build_dir = source_dir / "build" |
|
388 | 394 | build_dir = hg_build_dir / ("wix-%s" % target_triple) |
|
389 | staging_dir = build_dir / "stage" | |
|
390 | ||
|
391 | arch = "x64" if "x86_64" in target_triple else "x86" | |
|
392 | 395 | |
|
393 | 396 | build_dir.mkdir(parents=True, exist_ok=True) |
|
394 | run_pyoxidizer(source_dir, build_dir, staging_dir, target_triple) | |
|
397 | ||
|
398 | # Need to ensure docs HTML is built because this isn't done as part of | |
|
399 | # `pip install Mercurial`. | |
|
400 | build_docs_html(source_dir) | |
|
401 | ||
|
402 | build_vars = {} | |
|
395 | 403 | |
|
396 | # We also install some extra files. | |
|
397 | process_install_rules(EXTRA_INSTALL_RULES, source_dir, staging_dir) | |
|
404 | if msi_name: | |
|
405 | build_vars["MSI_NAME"] = msi_name | |
|
406 | ||
|
407 | if version: | |
|
408 | build_vars["VERSION"] = version | |
|
409 | ||
|
410 | if extra_features: | |
|
411 | build_vars["EXTRA_MSI_FEATURES"] = ";".join(extra_features) | |
|
398 | 412 | |
|
399 | # And remove some files we don't want. | |
|
400 | for f in STAGING_REMOVE_FILES: | |
|
401 | p = staging_dir / f | |
|
402 | if p.exists(): | |
|
403 | print('removing %s' % p) | |
|
404 | p.unlink() | |
|
413 | if signing_info: | |
|
414 | if signing_info["cert_path"]: | |
|
415 | build_vars["SIGNING_PFX_PATH"] = signing_info["cert_path"] | |
|
416 | if signing_info["cert_password"]: | |
|
417 | build_vars["SIGNING_PFX_PASSWORD"] = signing_info["cert_password"] | |
|
418 | if signing_info["subject_name"]: | |
|
419 | build_vars["SIGNING_SUBJECT_NAME"] = signing_info["subject_name"] | |
|
420 | if signing_info["timestamp_url"]: | |
|
421 | build_vars["TIME_STAMP_SERVER_URL"] = signing_info["timestamp_url"] | |
|
405 | 422 | |
|
406 | return run_wix_packaging( | |
|
423 | if extra_pyoxidizer_vars: | |
|
424 | build_vars.update(json.loads(extra_pyoxidizer_vars)) | |
|
425 | ||
|
426 | if extra_wxs: | |
|
427 | raise Exception( | |
|
428 | "support for extra .wxs files has been temporarily dropped" | |
|
429 | ) | |
|
430 | ||
|
431 | out_dir = run_pyoxidizer( | |
|
407 | 432 | source_dir, |
|
408 | 433 | build_dir, |
|
409 |
|
|
|
410 | arch, | |
|
411 |
|
|
|
412 | python2=False, | |
|
413 | msi_name=msi_name, | |
|
414 | extra_wxs=extra_wxs, | |
|
415 | extra_features=extra_features, | |
|
416 | signing_info=signing_info, | |
|
434 | target_triple, | |
|
435 | build_vars=build_vars, | |
|
436 | target="msi", | |
|
417 | 437 | ) |
|
418 | 438 | |
|
439 | msi_dir = out_dir / "msi" | |
|
440 | msi_files = [f for f in os.listdir(msi_dir) if f.endswith(".msi")] | |
|
441 | ||
|
442 | if len(msi_files) != 1: | |
|
443 | raise Exception("expected exactly 1 .msi file; got %d" % len(msi_files)) | |
|
444 | ||
|
445 | msi_filename = msi_files[0] | |
|
446 | ||
|
447 | msi_path = msi_dir / msi_filename | |
|
448 | dist_path = source_dir / "dist" / msi_filename | |
|
449 | ||
|
450 | dist_path.parent.mkdir(parents=True, exist_ok=True) | |
|
451 | ||
|
452 | shutil.copyfile(msi_path, dist_path) | |
|
453 | ||
|
454 | return { | |
|
455 | "msi_path": dist_path, | |
|
456 | } | |
|
457 | ||
|
419 | 458 | |
|
420 | 459 | def run_wix_packaging( |
|
421 | 460 | source_dir: pathlib.Path, |
|
422 | 461 | build_dir: pathlib.Path, |
|
423 | 462 | staging_dir: pathlib.Path, |
|
424 | 463 | arch: str, |
|
425 | 464 | version: str, |
|
426 | 465 | python2: bool, |
|
427 | 466 | msi_name: typing.Optional[str] = "mercurial", |
|
428 | 467 | suffix: str = "", |
|
429 | 468 | extra_wxs: typing.Optional[typing.Dict[str, str]] = None, |
|
430 | 469 | extra_features: typing.Optional[typing.List[str]] = None, |
|
431 | 470 | signing_info: typing.Optional[typing.Dict[str, str]] = None, |
|
432 | 471 | ): |
|
433 | 472 | """Invokes WiX to package up a built Mercurial. |
|
434 | 473 | |
|
435 | 474 | ``signing_info`` is a dict defining properties to facilitate signing the |
|
436 | 475 | installer. Recognized keys include ``name``, ``subject_name``, |
|
437 | 476 | ``cert_path``, ``cert_password``, and ``timestamp_url``. If populated, |
|
438 | 477 | we will sign both the hg.exe and the .msi using the signing credentials |
|
439 | 478 | specified. |
|
440 | 479 | """ |
|
441 | 480 | |
|
442 | 481 | orig_version = version or find_version(source_dir) |
|
443 | 482 | version = normalize_windows_version(orig_version) |
|
444 | 483 | print('using version string: %s' % version) |
|
445 | 484 | if version != orig_version: |
|
446 | 485 | print('(normalized from: %s)' % orig_version) |
|
447 | 486 | |
|
448 | 487 | if signing_info: |
|
449 | 488 | sign_with_signtool( |
|
450 | 489 | staging_dir / "hg.exe", |
|
451 | 490 | "%s %s" % (signing_info["name"], version), |
|
452 | 491 | subject_name=signing_info["subject_name"], |
|
453 | 492 | cert_path=signing_info["cert_path"], |
|
454 | 493 | cert_password=signing_info["cert_password"], |
|
455 | 494 | timestamp_url=signing_info["timestamp_url"], |
|
456 | 495 | ) |
|
457 | 496 | |
|
458 | 497 | wix_dir = source_dir / 'contrib' / 'packaging' / 'wix' |
|
459 | 498 | |
|
460 | 499 | wix_pkg, wix_entry = download_entry('wix', build_dir) |
|
461 | 500 | wix_path = build_dir / ('wix-%s' % wix_entry['version']) |
|
462 | 501 | |
|
463 | 502 | if not wix_path.exists(): |
|
464 | 503 | extract_zip_to_directory(wix_pkg, wix_path) |
|
465 | 504 | |
|
466 | 505 | if python2: |
|
467 | 506 | ensure_vc90_merge_modules(build_dir) |
|
468 | 507 | |
|
469 | 508 | source_build_rel = pathlib.Path(os.path.relpath(source_dir, build_dir)) |
|
470 | 509 | |
|
471 | 510 | defines = {'Platform': arch} |
|
472 | 511 | |
|
473 | 512 | # Derive a .wxs file with the staged files. |
|
474 | 513 | manifest_wxs = build_dir / 'stage.wxs' |
|
475 | 514 | with manifest_wxs.open('w', encoding='utf-8') as fh: |
|
476 | 515 | fh.write(make_files_xml(staging_dir, is_x64=arch == 'x64')) |
|
477 | 516 | |
|
478 | 517 | run_candle(wix_path, build_dir, manifest_wxs, staging_dir, defines=defines) |
|
479 | 518 | |
|
480 | 519 | for source, rel_path in sorted((extra_wxs or {}).items()): |
|
481 | 520 | run_candle(wix_path, build_dir, source, rel_path, defines=defines) |
|
482 | 521 | |
|
483 | 522 | source = wix_dir / 'mercurial.wxs' |
|
484 | 523 | defines['Version'] = version |
|
485 | 524 | defines['Comments'] = 'Installs Mercurial version %s' % version |
|
486 | 525 | |
|
487 | 526 | if python2: |
|
488 | 527 | defines["PythonVersion"] = "2" |
|
489 | 528 | defines['VCRedistSrcDir'] = str(build_dir) |
|
490 | 529 | else: |
|
491 | 530 | defines["PythonVersion"] = "3" |
|
492 | 531 | |
|
493 | 532 | if (staging_dir / "lib").exists(): |
|
494 | 533 | defines["MercurialHasLib"] = "1" |
|
495 | 534 | |
|
496 | 535 | if extra_features: |
|
497 | 536 | assert all(';' not in f for f in extra_features) |
|
498 | 537 | defines['MercurialExtraFeatures'] = ';'.join(extra_features) |
|
499 | 538 | |
|
500 | 539 | run_candle(wix_path, build_dir, source, source_build_rel, defines=defines) |
|
501 | 540 | |
|
502 | 541 | msi_path = ( |
|
503 | 542 | source_dir |
|
504 | 543 | / 'dist' |
|
505 | 544 | / ('%s-%s-%s%s.msi' % (msi_name, orig_version, arch, suffix)) |
|
506 | 545 | ) |
|
507 | 546 | |
|
508 | 547 | args = [ |
|
509 | 548 | str(wix_path / 'light.exe'), |
|
510 | 549 | '-nologo', |
|
511 | 550 | '-ext', |
|
512 | 551 | 'WixUIExtension', |
|
513 | 552 | '-sw1076', |
|
514 | 553 | '-spdb', |
|
515 | 554 | '-o', |
|
516 | 555 | str(msi_path), |
|
517 | 556 | ] |
|
518 | 557 | |
|
519 | 558 | for source, rel_path in sorted((extra_wxs or {}).items()): |
|
520 | 559 | assert source.endswith('.wxs') |
|
521 | 560 | source = os.path.basename(source) |
|
522 | 561 | args.append(str(build_dir / ('%s.wixobj' % source[:-4]))) |
|
523 | 562 | |
|
524 | 563 | args.extend( |
|
525 | 564 | [ |
|
526 | 565 | str(build_dir / 'stage.wixobj'), |
|
527 | 566 | str(build_dir / 'mercurial.wixobj'), |
|
528 | 567 | ] |
|
529 | 568 | ) |
|
530 | 569 | |
|
531 | 570 | subprocess.run(args, cwd=str(source_dir), check=True) |
|
532 | 571 | |
|
533 | 572 | print('%s created' % msi_path) |
|
534 | 573 | |
|
535 | 574 | if signing_info: |
|
536 | 575 | sign_with_signtool( |
|
537 | 576 | msi_path, |
|
538 | 577 | "%s %s" % (signing_info["name"], version), |
|
539 | 578 | subject_name=signing_info["subject_name"], |
|
540 | 579 | cert_path=signing_info["cert_path"], |
|
541 | 580 | cert_password=signing_info["cert_password"], |
|
542 | 581 | timestamp_url=signing_info["timestamp_url"], |
|
543 | 582 | ) |
|
544 | 583 | |
|
545 | 584 | return { |
|
546 | 585 | 'msi_path': msi_path, |
|
547 | 586 | } |
@@ -1,153 +1,157 b'' | |||
|
1 | 1 | <?xml version='1.0' encoding='windows-1252'?> |
|
2 | 2 | <Wix xmlns='http://schemas.microsoft.com/wix/2006/wi'> |
|
3 | 3 | |
|
4 | 4 | <!-- Copyright 2010 Steve Borho <steve@borho.org> |
|
5 | 5 | |
|
6 | 6 | This software may be used and distributed according to the terms of the |
|
7 | 7 | GNU General Public License version 2 or any later version. --> |
|
8 | 8 | |
|
9 | 9 | <?include guids.wxi ?> |
|
10 | 10 | <?include defines.wxi ?> |
|
11 | 11 | |
|
12 | 12 | <?if $(var.Platform) = "x64" ?> |
|
13 | 13 | <?define PFolder = ProgramFiles64Folder ?> |
|
14 | 14 | <?else?> |
|
15 | 15 | <?define PFolder = ProgramFilesFolder ?> |
|
16 | 16 | <?endif?> |
|
17 | 17 | |
|
18 | 18 | <Product Id='*' |
|
19 | 19 | Name='Mercurial $(var.Version) ($(var.Platform))' |
|
20 | 20 | UpgradeCode='$(var.ProductUpgradeCode)' |
|
21 | 21 | Language='1033' Codepage='1252' Version='$(var.Version)' |
|
22 | 22 | Manufacturer='Olivia Mackall and others'> |
|
23 | 23 | |
|
24 | 24 | <Package Id='*' |
|
25 | 25 | Keywords='Installer' |
|
26 | 26 | Description="Mercurial distributed SCM (version $(var.Version))" |
|
27 | 27 | Comments='$(var.Comments)' |
|
28 | 28 | Platform='$(var.Platform)' |
|
29 | 29 | Manufacturer='Olivia Mackall and others' |
|
30 | 30 | InstallerVersion='300' Languages='1033' Compressed='yes' SummaryCodepage='1252' /> |
|
31 | 31 | |
|
32 | 32 | <Media Id='1' Cabinet='mercurial.cab' EmbedCab='yes' DiskPrompt='CD-ROM #1' |
|
33 | 33 | CompressionLevel='high' /> |
|
34 | 34 | <Property Id='DiskPrompt' Value="Mercurial $(var.Version) Installation [1]" /> |
|
35 | 35 | |
|
36 | 36 | <Condition Message='Mercurial MSI installers require Windows XP or higher'> |
|
37 | 37 | VersionNT >= 501 |
|
38 | 38 | </Condition> |
|
39 | 39 | |
|
40 | 40 | <Property Id="INSTALLDIR"> |
|
41 | 41 | <ComponentSearch Id='SearchForMainExecutableComponent' |
|
42 | 42 | Guid='$(var.ComponentMainExecutableGUID)' |
|
43 | 43 | Type='directory' /> |
|
44 | 44 | </Property> |
|
45 | 45 | |
|
46 | 46 | <!--Property Id='ARPCOMMENTS'>any comments</Property--> |
|
47 | 47 | <Property Id='ARPCONTACT'>mercurial@mercurial-scm.org</Property> |
|
48 | 48 | <Property Id='ARPHELPLINK'>https://mercurial-scm.org/wiki/</Property> |
|
49 | 49 | <Property Id='ARPURLINFOABOUT'>https://mercurial-scm.org/about/</Property> |
|
50 | 50 | <Property Id='ARPURLUPDATEINFO'>https://mercurial-scm.org/downloads/</Property> |
|
51 | 51 | <Property Id='ARPHELPTELEPHONE'>https://mercurial-scm.org/wiki/Support</Property> |
|
52 | 52 | <Property Id='ARPPRODUCTICON'>hgIcon.ico</Property> |
|
53 | 53 | |
|
54 | 54 | <Property Id='INSTALLEDMERCURIALPRODUCTS' Secure='yes'></Property> |
|
55 | 55 | <Property Id='REINSTALLMODE'>amus</Property> |
|
56 | 56 | |
|
57 | 57 | <!--Auto-accept the license page--> |
|
58 | 58 | <Property Id='LicenseAccepted'>1</Property> |
|
59 | 59 | |
|
60 | 60 | <Directory Id='TARGETDIR' Name='SourceDir'> |
|
61 | 61 | <Directory Id='$(var.PFolder)' Name='PFiles'> |
|
62 | 62 | <Directory Id='INSTALLDIR' Name='Mercurial'> |
|
63 | 63 | <Component Id='MainExecutable' Guid='$(var.ComponentMainExecutableGUID)' Win64='$(var.IsX64)'> |
|
64 | 64 | <CreateFolder /> |
|
65 | 65 | <Environment Id="Environment" Name="PATH" Part="last" System="yes" |
|
66 | 66 | Permanent="no" Value="[INSTALLDIR]" Action="set" /> |
|
67 | 67 | </Component> |
|
68 | 68 | </Directory> |
|
69 | 69 | </Directory> |
|
70 | 70 | |
|
71 | 71 | <Directory Id="ProgramMenuFolder" Name="Programs"> |
|
72 | 72 | <Directory Id="ProgramMenuDir" Name="Mercurial $(var.Version)"> |
|
73 | 73 | <Component Id="ProgramMenuDir" Guid="$(var.ProgramMenuDir.guid)" Win64='$(var.IsX64)'> |
|
74 | 74 | <RemoveFolder Id='ProgramMenuDir' On='uninstall' /> |
|
75 | 75 | <RegistryValue Root='HKCU' Key='Software\Mercurial\InstallDir' Type='string' |
|
76 | 76 | Value='[INSTALLDIR]' KeyPath='yes' /> |
|
77 | 77 | <Shortcut Id='UrlShortcut' Directory='ProgramMenuDir' Name='Mercurial Web Site' |
|
78 | 78 | Target='[ARPHELPLINK]' Icon="hgIcon.ico" IconIndex='0' /> |
|
79 | 79 | </Component> |
|
80 | 80 | </Directory> |
|
81 | 81 | </Directory> |
|
82 | 82 | |
|
83 | 83 | <!-- Install VCRedist merge modules on Python 2. On Python 3, |
|
84 | 84 | vcruntimeXXX.dll is part of the install layout and gets picked up |
|
85 | 85 | as a regular file. --> |
|
86 | 86 | <?if $(var.PythonVersion) = "2" ?> |
|
87 | 87 | <?if $(var.Platform) = "x86" ?> |
|
88 | 88 | <Merge Id='VCRuntime' DiskId='1' Language='1033' |
|
89 | 89 | SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x86_msm.msm' /> |
|
90 | 90 | <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033' |
|
91 | 91 | SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x86_msm.msm' /> |
|
92 | 92 | <?else?> |
|
93 | 93 | <Merge Id='VCRuntime' DiskId='1' Language='1033' |
|
94 | 94 | SourceFile='$(var.VCRedistSrcDir)\microsoft.vcxx.crt.x64_msm.msm' /> |
|
95 | 95 | <Merge Id='VCRuntimePolicy' DiskId='1' Language='1033' |
|
96 | 96 | SourceFile='$(var.VCRedistSrcDir)\policy.x.xx.microsoft.vcxx.crt.x64_msm.msm' /> |
|
97 | 97 | <?endif?> |
|
98 | 98 | <?endif?> |
|
99 | 99 | </Directory> |
|
100 | 100 | |
|
101 | 101 | <Feature Id='Complete' Title='Mercurial' Description='The complete package' |
|
102 | 102 | Display='expand' Level='1' ConfigurableDirectory='INSTALLDIR' > |
|
103 | 103 | <Feature Id='MainProgram' Title='Program' Description='Mercurial command line app' |
|
104 | 104 | Level='1' Absent='disallow' > |
|
105 | 105 | <ComponentRef Id='MainExecutable' /> |
|
106 | 106 | <ComponentRef Id='ProgramMenuDir' /> |
|
107 | 107 | <ComponentGroupRef Id="hg.group.ROOT" /> |
|
108 | 108 | <ComponentGroupRef Id="hg.group.defaultrc" /> |
|
109 | 109 | <ComponentGroupRef Id="hg.group.helptext" /> |
|
110 | 110 | <?ifdef MercurialHasLib?> |
|
111 | 111 | <ComponentGroupRef Id="hg.group.lib" /> |
|
112 | 112 | <?endif?> |
|
113 | 113 | <ComponentGroupRef Id="hg.group.templates" /> |
|
114 | 114 | <?if $(var.PythonVersion) = "2" ?> |
|
115 | 115 | <MergeRef Id='VCRuntime' /> |
|
116 | 116 | <MergeRef Id='VCRuntimePolicy' /> |
|
117 | 117 | <?endif?> |
|
118 | 118 | </Feature> |
|
119 | 119 | <?ifdef MercurialExtraFeatures?> |
|
120 | 120 | <?foreach EXTRAFEAT in $(var.MercurialExtraFeatures)?> |
|
121 | 121 | <FeatureRef Id="$(var.EXTRAFEAT)" /> |
|
122 | 122 | <?endforeach?> |
|
123 | 123 | <?endif?> |
|
124 | 124 | <Feature Id='Locales' Title='Translations' Description='Translations' Level='1'> |
|
125 | 125 | <ComponentGroupRef Id="hg.group.locale" /> |
|
126 | 126 | </Feature> |
|
127 | 127 | <Feature Id='Documentation' Title='Documentation' Description='HTML man pages' Level='1'> |
|
128 | 128 | <ComponentGroupRef Id="hg.group.doc" /> |
|
129 | 129 | </Feature> |
|
130 | 130 | <Feature Id='Misc' Title='Miscellaneous' Description='Contributed scripts' Level='1'> |
|
131 | 131 | <ComponentGroupRef Id="hg.group.contrib" /> |
|
132 | 132 | </Feature> |
|
133 | 133 | </Feature> |
|
134 | 134 | |
|
135 | 135 | <UIRef Id="WixUI_FeatureTree" /> |
|
136 | 136 | <UIRef Id="WixUI_ErrorProgressText" /> |
|
137 | 137 | |
|
138 | <?ifdef PyOxidizer?> | |
|
139 | <WixVariable Id="WixUILicenseRtf" Value="COPYING.rtf" /> | |
|
140 | <Icon Id="hgIcon.ico" SourceFile="mercurial.ico" /> | |
|
141 | <?else?> | |
|
138 | 142 | <WixVariable Id="WixUILicenseRtf" Value="contrib\packaging\wix\COPYING.rtf" /> |
|
139 | ||
|
140 | 143 | <Icon Id="hgIcon.ico" SourceFile="contrib/win32/mercurial.ico" /> |
|
144 | <?endif?> | |
|
141 | 145 | |
|
142 | 146 | <Upgrade Id='$(var.ProductUpgradeCode)'> |
|
143 | 147 | <UpgradeVersion |
|
144 | 148 | IncludeMinimum='yes' Minimum='0.0.0' IncludeMaximum='no' OnlyDetect='no' |
|
145 | 149 | Property='INSTALLEDMERCURIALPRODUCTS' /> |
|
146 | 150 | </Upgrade> |
|
147 | 151 | |
|
148 | 152 | <InstallExecuteSequence> |
|
149 | 153 | <RemoveExistingProducts After='InstallInitialize'/> |
|
150 | 154 | </InstallExecuteSequence> |
|
151 | 155 | |
|
152 | 156 | </Product> |
|
153 | 157 | </Wix> |
@@ -1,3928 +1,3980 b'' | |||
|
1 | 1 | # perf.py - performance test routines |
|
2 | 2 | '''helper extension to measure performance |
|
3 | 3 | |
|
4 | 4 | Configurations |
|
5 | 5 | ============== |
|
6 | 6 | |
|
7 | 7 | ``perf`` |
|
8 | 8 | -------- |
|
9 | 9 | |
|
10 | 10 | ``all-timing`` |
|
11 | 11 | When set, additional statistics will be reported for each benchmark: best, |
|
12 | 12 | worst, median average. If not set only the best timing is reported |
|
13 | 13 | (default: off). |
|
14 | 14 | |
|
15 | 15 | ``presleep`` |
|
16 | 16 | number of second to wait before any group of runs (default: 1) |
|
17 | 17 | |
|
18 | 18 | ``pre-run`` |
|
19 | 19 | number of run to perform before starting measurement. |
|
20 | 20 | |
|
21 | 21 | ``profile-benchmark`` |
|
22 | 22 | Enable profiling for the benchmarked section. |
|
23 | 23 | (The first iteration is benchmarked) |
|
24 | 24 | |
|
25 | 25 | ``run-limits`` |
|
26 | 26 | Control the number of runs each benchmark will perform. The option value |
|
27 | 27 | should be a list of `<time>-<numberofrun>` pairs. After each run the |
|
28 | 28 | conditions are considered in order with the following logic: |
|
29 | 29 | |
|
30 | 30 | If benchmark has been running for <time> seconds, and we have performed |
|
31 | 31 | <numberofrun> iterations, stop the benchmark, |
|
32 | 32 | |
|
33 | 33 | The default value is: `3.0-100, 10.0-3` |
|
34 | 34 | |
|
35 | 35 | ``stub`` |
|
36 | 36 | When set, benchmarks will only be run once, useful for testing |
|
37 | 37 | (default: off) |
|
38 | 38 | ''' |
|
39 | 39 | |
|
40 | 40 | # "historical portability" policy of perf.py: |
|
41 | 41 | # |
|
42 | 42 | # We have to do: |
|
43 | 43 | # - make perf.py "loadable" with as wide Mercurial version as possible |
|
44 | 44 | # This doesn't mean that perf commands work correctly with that Mercurial. |
|
45 | 45 | # BTW, perf.py itself has been available since 1.1 (or eb240755386d). |
|
46 | 46 | # - make historical perf command work correctly with as wide Mercurial |
|
47 | 47 | # version as possible |
|
48 | 48 | # |
|
49 | 49 | # We have to do, if possible with reasonable cost: |
|
50 | 50 | # - make recent perf command for historical feature work correctly |
|
51 | 51 | # with early Mercurial |
|
52 | 52 | # |
|
53 | 53 | # We don't have to do: |
|
54 | 54 | # - make perf command for recent feature work correctly with early |
|
55 | 55 | # Mercurial |
|
56 | 56 | |
|
57 | 57 | from __future__ import absolute_import |
|
58 | 58 | import contextlib |
|
59 | 59 | import functools |
|
60 | 60 | import gc |
|
61 | 61 | import os |
|
62 | 62 | import random |
|
63 | 63 | import shutil |
|
64 | 64 | import struct |
|
65 | 65 | import sys |
|
66 | 66 | import tempfile |
|
67 | 67 | import threading |
|
68 | 68 | import time |
|
69 | ||
|
70 | import mercurial.revlog | |
|
69 | 71 | from mercurial import ( |
|
70 | 72 | changegroup, |
|
71 | 73 | cmdutil, |
|
72 | 74 | commands, |
|
73 | 75 | copies, |
|
74 | 76 | error, |
|
75 | 77 | extensions, |
|
76 | 78 | hg, |
|
77 | 79 | mdiff, |
|
78 | 80 | merge, |
|
79 | revlog, | |
|
80 | 81 | util, |
|
81 | 82 | ) |
|
82 | 83 | |
|
83 | 84 | # for "historical portability": |
|
84 | 85 | # try to import modules separately (in dict order), and ignore |
|
85 | 86 | # failure, because these aren't available with early Mercurial |
|
86 | 87 | try: |
|
87 | 88 | from mercurial import branchmap # since 2.5 (or bcee63733aad) |
|
88 | 89 | except ImportError: |
|
89 | 90 | pass |
|
90 | 91 | try: |
|
91 | 92 | from mercurial import obsolete # since 2.3 (or ad0d6c2b3279) |
|
92 | 93 | except ImportError: |
|
93 | 94 | pass |
|
94 | 95 | try: |
|
95 | 96 | from mercurial import registrar # since 3.7 (or 37d50250b696) |
|
96 | 97 | |
|
97 | 98 | dir(registrar) # forcibly load it |
|
98 | 99 | except ImportError: |
|
99 | 100 | registrar = None |
|
100 | 101 | try: |
|
101 | 102 | from mercurial import repoview # since 2.5 (or 3a6ddacb7198) |
|
102 | 103 | except ImportError: |
|
103 | 104 | pass |
|
104 | 105 | try: |
|
105 | 106 | from mercurial.utils import repoviewutil # since 5.0 |
|
106 | 107 | except ImportError: |
|
107 | 108 | repoviewutil = None |
|
108 | 109 | try: |
|
109 | 110 | from mercurial import scmutil # since 1.9 (or 8b252e826c68) |
|
110 | 111 | except ImportError: |
|
111 | 112 | pass |
|
112 | 113 | try: |
|
113 | 114 | from mercurial import setdiscovery # since 1.9 (or cb98fed52495) |
|
114 | 115 | except ImportError: |
|
115 | 116 | pass |
|
116 | 117 | |
|
117 | 118 | try: |
|
118 | 119 | from mercurial import profiling |
|
119 | 120 | except ImportError: |
|
120 | 121 | profiling = None |
|
121 | 122 | |
|
123 | try: | |
|
124 | from mercurial.revlogutils import constants as revlog_constants | |
|
125 | ||
|
126 | perf_rl_kind = (revlog_constants.KIND_OTHER, b'created-by-perf') | |
|
127 | ||
|
128 | def revlog(opener, *args, **kwargs): | |
|
129 | return mercurial.revlog.revlog(opener, perf_rl_kind, *args, **kwargs) | |
|
130 | ||
|
131 | ||
|
132 | except (ImportError, AttributeError): | |
|
133 | perf_rl_kind = None | |
|
134 | ||
|
135 | def revlog(opener, *args, **kwargs): | |
|
136 | return mercurial.revlog.revlog(opener, *args, **kwargs) | |
|
137 | ||
|
122 | 138 | |
|
123 | 139 | def identity(a): |
|
124 | 140 | return a |
|
125 | 141 | |
|
126 | 142 | |
|
127 | 143 | try: |
|
128 | 144 | from mercurial import pycompat |
|
129 | 145 | |
|
130 | 146 | getargspec = pycompat.getargspec # added to module after 4.5 |
|
131 | 147 | _byteskwargs = pycompat.byteskwargs # since 4.1 (or fbc3f73dc802) |
|
132 | 148 | _sysstr = pycompat.sysstr # since 4.0 (or 2219f4f82ede) |
|
133 | 149 | _bytestr = pycompat.bytestr # since 4.2 (or b70407bd84d5) |
|
134 | 150 | _xrange = pycompat.xrange # since 4.8 (or 7eba8f83129b) |
|
135 | 151 | fsencode = pycompat.fsencode # since 3.9 (or f4a5e0e86a7e) |
|
136 | 152 | if pycompat.ispy3: |
|
137 | 153 | _maxint = sys.maxsize # per py3 docs for replacing maxint |
|
138 | 154 | else: |
|
139 | 155 | _maxint = sys.maxint |
|
140 | 156 | except (NameError, ImportError, AttributeError): |
|
141 | 157 | import inspect |
|
142 | 158 | |
|
143 | 159 | getargspec = inspect.getargspec |
|
144 | 160 | _byteskwargs = identity |
|
145 | 161 | _bytestr = str |
|
146 | 162 | fsencode = identity # no py3 support |
|
147 | 163 | _maxint = sys.maxint # no py3 support |
|
148 | 164 | _sysstr = lambda x: x # no py3 support |
|
149 | 165 | _xrange = xrange |
|
150 | 166 | |
|
151 | 167 | try: |
|
152 | 168 | # 4.7+ |
|
153 | 169 | queue = pycompat.queue.Queue |
|
154 | 170 | except (NameError, AttributeError, ImportError): |
|
155 | 171 | # <4.7. |
|
156 | 172 | try: |
|
157 | 173 | queue = pycompat.queue |
|
158 | 174 | except (NameError, AttributeError, ImportError): |
|
159 | 175 | import Queue as queue |
|
160 | 176 | |
|
161 | 177 | try: |
|
162 | 178 | from mercurial import logcmdutil |
|
163 | 179 | |
|
164 | 180 | makelogtemplater = logcmdutil.maketemplater |
|
165 | 181 | except (AttributeError, ImportError): |
|
166 | 182 | try: |
|
167 | 183 | makelogtemplater = cmdutil.makelogtemplater |
|
168 | 184 | except (AttributeError, ImportError): |
|
169 | 185 | makelogtemplater = None |
|
170 | 186 | |
|
171 | 187 | # for "historical portability": |
|
172 | 188 | # define util.safehasattr forcibly, because util.safehasattr has been |
|
173 | 189 | # available since 1.9.3 (or 94b200a11cf7) |
|
174 | 190 | _undefined = object() |
|
175 | 191 | |
|
176 | 192 | |
|
177 | 193 | def safehasattr(thing, attr): |
|
178 | 194 | return getattr(thing, _sysstr(attr), _undefined) is not _undefined |
|
179 | 195 | |
|
180 | 196 | |
|
181 | 197 | setattr(util, 'safehasattr', safehasattr) |
|
182 | 198 | |
|
183 | 199 | # for "historical portability": |
|
184 | 200 | # define util.timer forcibly, because util.timer has been available |
|
185 | 201 | # since ae5d60bb70c9 |
|
186 | 202 | if safehasattr(time, 'perf_counter'): |
|
187 | 203 | util.timer = time.perf_counter |
|
188 | 204 | elif os.name == b'nt': |
|
189 | 205 | util.timer = time.clock |
|
190 | 206 | else: |
|
191 | 207 | util.timer = time.time |
|
192 | 208 | |
|
193 | 209 | # for "historical portability": |
|
194 | 210 | # use locally defined empty option list, if formatteropts isn't |
|
195 | 211 | # available, because commands.formatteropts has been available since |
|
196 | 212 | # 3.2 (or 7a7eed5176a4), even though formatting itself has been |
|
197 | 213 | # available since 2.2 (or ae5f92e154d3) |
|
198 | 214 | formatteropts = getattr( |
|
199 | 215 | cmdutil, "formatteropts", getattr(commands, "formatteropts", []) |
|
200 | 216 | ) |
|
201 | 217 | |
|
202 | 218 | # for "historical portability": |
|
203 | 219 | # use locally defined option list, if debugrevlogopts isn't available, |
|
204 | 220 | # because commands.debugrevlogopts has been available since 3.7 (or |
|
205 | 221 | # 5606f7d0d063), even though cmdutil.openrevlog() has been available |
|
206 | 222 | # since 1.9 (or a79fea6b3e77). |
|
207 | 223 | revlogopts = getattr( |
|
208 | 224 | cmdutil, |
|
209 | 225 | "debugrevlogopts", |
|
210 | 226 | getattr( |
|
211 | 227 | commands, |
|
212 | 228 | "debugrevlogopts", |
|
213 | 229 | [ |
|
214 | 230 | (b'c', b'changelog', False, b'open changelog'), |
|
215 | 231 | (b'm', b'manifest', False, b'open manifest'), |
|
216 | 232 | (b'', b'dir', False, b'open directory manifest'), |
|
217 | 233 | ], |
|
218 | 234 | ), |
|
219 | 235 | ) |
|
220 | 236 | |
|
221 | 237 | cmdtable = {} |
|
222 | 238 | |
|
223 | 239 | # for "historical portability": |
|
224 | 240 | # define parsealiases locally, because cmdutil.parsealiases has been |
|
225 | 241 | # available since 1.5 (or 6252852b4332) |
|
226 | 242 | def parsealiases(cmd): |
|
227 | 243 | return cmd.split(b"|") |
|
228 | 244 | |
|
229 | 245 | |
|
230 | 246 | if safehasattr(registrar, 'command'): |
|
231 | 247 | command = registrar.command(cmdtable) |
|
232 | 248 | elif safehasattr(cmdutil, 'command'): |
|
233 | 249 | command = cmdutil.command(cmdtable) |
|
234 | 250 | if 'norepo' not in getargspec(command).args: |
|
235 | 251 | # for "historical portability": |
|
236 | 252 | # wrap original cmdutil.command, because "norepo" option has |
|
237 | 253 | # been available since 3.1 (or 75a96326cecb) |
|
238 | 254 | _command = command |
|
239 | 255 | |
|
240 | 256 | def command(name, options=(), synopsis=None, norepo=False): |
|
241 | 257 | if norepo: |
|
242 | 258 | commands.norepo += b' %s' % b' '.join(parsealiases(name)) |
|
243 | 259 | return _command(name, list(options), synopsis) |
|
244 | 260 | |
|
245 | 261 | |
|
246 | 262 | else: |
|
247 | 263 | # for "historical portability": |
|
248 | 264 | # define "@command" annotation locally, because cmdutil.command |
|
249 | 265 | # has been available since 1.9 (or 2daa5179e73f) |
|
250 | 266 | def command(name, options=(), synopsis=None, norepo=False): |
|
251 | 267 | def decorator(func): |
|
252 | 268 | if synopsis: |
|
253 | 269 | cmdtable[name] = func, list(options), synopsis |
|
254 | 270 | else: |
|
255 | 271 | cmdtable[name] = func, list(options) |
|
256 | 272 | if norepo: |
|
257 | 273 | commands.norepo += b' %s' % b' '.join(parsealiases(name)) |
|
258 | 274 | return func |
|
259 | 275 | |
|
260 | 276 | return decorator |
|
261 | 277 | |
|
262 | 278 | |
|
263 | 279 | try: |
|
264 | 280 | import mercurial.registrar |
|
265 | 281 | import mercurial.configitems |
|
266 | 282 | |
|
267 | 283 | configtable = {} |
|
268 | 284 | configitem = mercurial.registrar.configitem(configtable) |
|
269 | 285 | configitem( |
|
270 | 286 | b'perf', |
|
271 | 287 | b'presleep', |
|
272 | 288 | default=mercurial.configitems.dynamicdefault, |
|
273 | 289 | experimental=True, |
|
274 | 290 | ) |
|
275 | 291 | configitem( |
|
276 | 292 | b'perf', |
|
277 | 293 | b'stub', |
|
278 | 294 | default=mercurial.configitems.dynamicdefault, |
|
279 | 295 | experimental=True, |
|
280 | 296 | ) |
|
281 | 297 | configitem( |
|
282 | 298 | b'perf', |
|
283 | 299 | b'parentscount', |
|
284 | 300 | default=mercurial.configitems.dynamicdefault, |
|
285 | 301 | experimental=True, |
|
286 | 302 | ) |
|
287 | 303 | configitem( |
|
288 | 304 | b'perf', |
|
289 | 305 | b'all-timing', |
|
290 | 306 | default=mercurial.configitems.dynamicdefault, |
|
291 | 307 | experimental=True, |
|
292 | 308 | ) |
|
293 | 309 | configitem( |
|
294 | 310 | b'perf', |
|
295 | 311 | b'pre-run', |
|
296 | 312 | default=mercurial.configitems.dynamicdefault, |
|
297 | 313 | ) |
|
298 | 314 | configitem( |
|
299 | 315 | b'perf', |
|
300 | 316 | b'profile-benchmark', |
|
301 | 317 | default=mercurial.configitems.dynamicdefault, |
|
302 | 318 | ) |
|
303 | 319 | configitem( |
|
304 | 320 | b'perf', |
|
305 | 321 | b'run-limits', |
|
306 | 322 | default=mercurial.configitems.dynamicdefault, |
|
307 | 323 | experimental=True, |
|
308 | 324 | ) |
|
309 | 325 | except (ImportError, AttributeError): |
|
310 | 326 | pass |
|
311 | 327 | except TypeError: |
|
312 | 328 | # compatibility fix for a11fd395e83f |
|
313 | 329 | # hg version: 5.2 |
|
314 | 330 | configitem( |
|
315 | 331 | b'perf', |
|
316 | 332 | b'presleep', |
|
317 | 333 | default=mercurial.configitems.dynamicdefault, |
|
318 | 334 | ) |
|
319 | 335 | configitem( |
|
320 | 336 | b'perf', |
|
321 | 337 | b'stub', |
|
322 | 338 | default=mercurial.configitems.dynamicdefault, |
|
323 | 339 | ) |
|
324 | 340 | configitem( |
|
325 | 341 | b'perf', |
|
326 | 342 | b'parentscount', |
|
327 | 343 | default=mercurial.configitems.dynamicdefault, |
|
328 | 344 | ) |
|
329 | 345 | configitem( |
|
330 | 346 | b'perf', |
|
331 | 347 | b'all-timing', |
|
332 | 348 | default=mercurial.configitems.dynamicdefault, |
|
333 | 349 | ) |
|
334 | 350 | configitem( |
|
335 | 351 | b'perf', |
|
336 | 352 | b'pre-run', |
|
337 | 353 | default=mercurial.configitems.dynamicdefault, |
|
338 | 354 | ) |
|
339 | 355 | configitem( |
|
340 | 356 | b'perf', |
|
341 | 357 | b'profile-benchmark', |
|
342 | 358 | default=mercurial.configitems.dynamicdefault, |
|
343 | 359 | ) |
|
344 | 360 | configitem( |
|
345 | 361 | b'perf', |
|
346 | 362 | b'run-limits', |
|
347 | 363 | default=mercurial.configitems.dynamicdefault, |
|
348 | 364 | ) |
|
349 | 365 | |
|
350 | 366 | |
|
351 | 367 | def getlen(ui): |
|
352 | 368 | if ui.configbool(b"perf", b"stub", False): |
|
353 | 369 | return lambda x: 1 |
|
354 | 370 | return len |
|
355 | 371 | |
|
356 | 372 | |
|
357 | 373 | class noop(object): |
|
358 | 374 | """dummy context manager""" |
|
359 | 375 | |
|
360 | 376 | def __enter__(self): |
|
361 | 377 | pass |
|
362 | 378 | |
|
363 | 379 | def __exit__(self, *args): |
|
364 | 380 | pass |
|
365 | 381 | |
|
366 | 382 | |
|
367 | 383 | NOOPCTX = noop() |
|
368 | 384 | |
|
369 | 385 | |
|
370 | 386 | def gettimer(ui, opts=None): |
|
371 | 387 | """return a timer function and formatter: (timer, formatter) |
|
372 | 388 | |
|
373 | 389 | This function exists to gather the creation of formatter in a single |
|
374 | 390 | place instead of duplicating it in all performance commands.""" |
|
375 | 391 | |
|
376 | 392 | # enforce an idle period before execution to counteract power management |
|
377 | 393 | # experimental config: perf.presleep |
|
378 | 394 | time.sleep(getint(ui, b"perf", b"presleep", 1)) |
|
379 | 395 | |
|
380 | 396 | if opts is None: |
|
381 | 397 | opts = {} |
|
382 | 398 | # redirect all to stderr unless buffer api is in use |
|
383 | 399 | if not ui._buffers: |
|
384 | 400 | ui = ui.copy() |
|
385 | 401 | uifout = safeattrsetter(ui, b'fout', ignoremissing=True) |
|
386 | 402 | if uifout: |
|
387 | 403 | # for "historical portability": |
|
388 | 404 | # ui.fout/ferr have been available since 1.9 (or 4e1ccd4c2b6d) |
|
389 | 405 | uifout.set(ui.ferr) |
|
390 | 406 | |
|
391 | 407 | # get a formatter |
|
392 | 408 | uiformatter = getattr(ui, 'formatter', None) |
|
393 | 409 | if uiformatter: |
|
394 | 410 | fm = uiformatter(b'perf', opts) |
|
395 | 411 | else: |
|
396 | 412 | # for "historical portability": |
|
397 | 413 | # define formatter locally, because ui.formatter has been |
|
398 | 414 | # available since 2.2 (or ae5f92e154d3) |
|
399 | 415 | from mercurial import node |
|
400 | 416 | |
|
401 | 417 | class defaultformatter(object): |
|
402 | 418 | """Minimized composition of baseformatter and plainformatter""" |
|
403 | 419 | |
|
404 | 420 | def __init__(self, ui, topic, opts): |
|
405 | 421 | self._ui = ui |
|
406 | 422 | if ui.debugflag: |
|
407 | 423 | self.hexfunc = node.hex |
|
408 | 424 | else: |
|
409 | 425 | self.hexfunc = node.short |
|
410 | 426 | |
|
411 | 427 | def __nonzero__(self): |
|
412 | 428 | return False |
|
413 | 429 | |
|
414 | 430 | __bool__ = __nonzero__ |
|
415 | 431 | |
|
416 | 432 | def startitem(self): |
|
417 | 433 | pass |
|
418 | 434 | |
|
419 | 435 | def data(self, **data): |
|
420 | 436 | pass |
|
421 | 437 | |
|
422 | 438 | def write(self, fields, deftext, *fielddata, **opts): |
|
423 | 439 | self._ui.write(deftext % fielddata, **opts) |
|
424 | 440 | |
|
425 | 441 | def condwrite(self, cond, fields, deftext, *fielddata, **opts): |
|
426 | 442 | if cond: |
|
427 | 443 | self._ui.write(deftext % fielddata, **opts) |
|
428 | 444 | |
|
429 | 445 | def plain(self, text, **opts): |
|
430 | 446 | self._ui.write(text, **opts) |
|
431 | 447 | |
|
432 | 448 | def end(self): |
|
433 | 449 | pass |
|
434 | 450 | |
|
435 | 451 | fm = defaultformatter(ui, b'perf', opts) |
|
436 | 452 | |
|
437 | 453 | # stub function, runs code only once instead of in a loop |
|
438 | 454 | # experimental config: perf.stub |
|
439 | 455 | if ui.configbool(b"perf", b"stub", False): |
|
440 | 456 | return functools.partial(stub_timer, fm), fm |
|
441 | 457 | |
|
442 | 458 | # experimental config: perf.all-timing |
|
443 | 459 | displayall = ui.configbool(b"perf", b"all-timing", False) |
|
444 | 460 | |
|
445 | 461 | # experimental config: perf.run-limits |
|
446 | 462 | limitspec = ui.configlist(b"perf", b"run-limits", []) |
|
447 | 463 | limits = [] |
|
448 | 464 | for item in limitspec: |
|
449 | 465 | parts = item.split(b'-', 1) |
|
450 | 466 | if len(parts) < 2: |
|
451 | 467 | ui.warn((b'malformatted run limit entry, missing "-": %s\n' % item)) |
|
452 | 468 | continue |
|
453 | 469 | try: |
|
454 | 470 | time_limit = float(_sysstr(parts[0])) |
|
455 | 471 | except ValueError as e: |
|
456 | 472 | ui.warn( |
|
457 | 473 | ( |
|
458 | 474 | b'malformatted run limit entry, %s: %s\n' |
|
459 | 475 | % (_bytestr(e), item) |
|
460 | 476 | ) |
|
461 | 477 | ) |
|
462 | 478 | continue |
|
463 | 479 | try: |
|
464 | 480 | run_limit = int(_sysstr(parts[1])) |
|
465 | 481 | except ValueError as e: |
|
466 | 482 | ui.warn( |
|
467 | 483 | ( |
|
468 | 484 | b'malformatted run limit entry, %s: %s\n' |
|
469 | 485 | % (_bytestr(e), item) |
|
470 | 486 | ) |
|
471 | 487 | ) |
|
472 | 488 | continue |
|
473 | 489 | limits.append((time_limit, run_limit)) |
|
474 | 490 | if not limits: |
|
475 | 491 | limits = DEFAULTLIMITS |
|
476 | 492 | |
|
477 | 493 | profiler = None |
|
478 | 494 | if profiling is not None: |
|
479 | 495 | if ui.configbool(b"perf", b"profile-benchmark", False): |
|
480 | 496 | profiler = profiling.profile(ui) |
|
481 | 497 | |
|
482 | 498 | prerun = getint(ui, b"perf", b"pre-run", 0) |
|
483 | 499 | t = functools.partial( |
|
484 | 500 | _timer, |
|
485 | 501 | fm, |
|
486 | 502 | displayall=displayall, |
|
487 | 503 | limits=limits, |
|
488 | 504 | prerun=prerun, |
|
489 | 505 | profiler=profiler, |
|
490 | 506 | ) |
|
491 | 507 | return t, fm |
|
492 | 508 | |
|
493 | 509 | |
|
494 | 510 | def stub_timer(fm, func, setup=None, title=None): |
|
495 | 511 | if setup is not None: |
|
496 | 512 | setup() |
|
497 | 513 | func() |
|
498 | 514 | |
|
499 | 515 | |
|
500 | 516 | @contextlib.contextmanager |
|
501 | 517 | def timeone(): |
|
502 | 518 | r = [] |
|
503 | 519 | ostart = os.times() |
|
504 | 520 | cstart = util.timer() |
|
505 | 521 | yield r |
|
506 | 522 | cstop = util.timer() |
|
507 | 523 | ostop = os.times() |
|
508 | 524 | a, b = ostart, ostop |
|
509 | 525 | r.append((cstop - cstart, b[0] - a[0], b[1] - a[1])) |
|
510 | 526 | |
|
511 | 527 | |
|
512 | 528 | # list of stop condition (elapsed time, minimal run count) |
|
513 | 529 | DEFAULTLIMITS = ( |
|
514 | 530 | (3.0, 100), |
|
515 | 531 | (10.0, 3), |
|
516 | 532 | ) |
|
517 | 533 | |
|
518 | 534 | |
|
519 | 535 | def _timer( |
|
520 | 536 | fm, |
|
521 | 537 | func, |
|
522 | 538 | setup=None, |
|
523 | 539 | title=None, |
|
524 | 540 | displayall=False, |
|
525 | 541 | limits=DEFAULTLIMITS, |
|
526 | 542 | prerun=0, |
|
527 | 543 | profiler=None, |
|
528 | 544 | ): |
|
529 | 545 | gc.collect() |
|
530 | 546 | results = [] |
|
531 | 547 | begin = util.timer() |
|
532 | 548 | count = 0 |
|
533 | 549 | if profiler is None: |
|
534 | 550 | profiler = NOOPCTX |
|
535 | 551 | for i in range(prerun): |
|
536 | 552 | if setup is not None: |
|
537 | 553 | setup() |
|
538 | 554 | func() |
|
539 | 555 | keepgoing = True |
|
540 | 556 | while keepgoing: |
|
541 | 557 | if setup is not None: |
|
542 | 558 | setup() |
|
543 | 559 | with profiler: |
|
544 | 560 | with timeone() as item: |
|
545 | 561 | r = func() |
|
546 | 562 | profiler = NOOPCTX |
|
547 | 563 | count += 1 |
|
548 | 564 | results.append(item[0]) |
|
549 | 565 | cstop = util.timer() |
|
550 | 566 | # Look for a stop condition. |
|
551 | 567 | elapsed = cstop - begin |
|
552 | 568 | for t, mincount in limits: |
|
553 | 569 | if elapsed >= t and count >= mincount: |
|
554 | 570 | keepgoing = False |
|
555 | 571 | break |
|
556 | 572 | |
|
557 | 573 | formatone(fm, results, title=title, result=r, displayall=displayall) |
|
558 | 574 | |
|
559 | 575 | |
|
560 | 576 | def formatone(fm, timings, title=None, result=None, displayall=False): |
|
561 | 577 | |
|
562 | 578 | count = len(timings) |
|
563 | 579 | |
|
564 | 580 | fm.startitem() |
|
565 | 581 | |
|
566 | 582 | if title: |
|
567 | 583 | fm.write(b'title', b'! %s\n', title) |
|
568 | 584 | if result: |
|
569 | 585 | fm.write(b'result', b'! result: %s\n', result) |
|
570 | 586 | |
|
571 | 587 | def display(role, entry): |
|
572 | 588 | prefix = b'' |
|
573 | 589 | if role != b'best': |
|
574 | 590 | prefix = b'%s.' % role |
|
575 | 591 | fm.plain(b'!') |
|
576 | 592 | fm.write(prefix + b'wall', b' wall %f', entry[0]) |
|
577 | 593 | fm.write(prefix + b'comb', b' comb %f', entry[1] + entry[2]) |
|
578 | 594 | fm.write(prefix + b'user', b' user %f', entry[1]) |
|
579 | 595 | fm.write(prefix + b'sys', b' sys %f', entry[2]) |
|
580 | 596 | fm.write(prefix + b'count', b' (%s of %%d)' % role, count) |
|
581 | 597 | fm.plain(b'\n') |
|
582 | 598 | |
|
583 | 599 | timings.sort() |
|
584 | 600 | min_val = timings[0] |
|
585 | 601 | display(b'best', min_val) |
|
586 | 602 | if displayall: |
|
587 | 603 | max_val = timings[-1] |
|
588 | 604 | display(b'max', max_val) |
|
589 | 605 | avg = tuple([sum(x) / count for x in zip(*timings)]) |
|
590 | 606 | display(b'avg', avg) |
|
591 | 607 | median = timings[len(timings) // 2] |
|
592 | 608 | display(b'median', median) |
|
593 | 609 | |
|
594 | 610 | |
|
595 | 611 | # utilities for historical portability |
|
596 | 612 | |
|
597 | 613 | |
|
598 | 614 | def getint(ui, section, name, default): |
|
599 | 615 | # for "historical portability": |
|
600 | 616 | # ui.configint has been available since 1.9 (or fa2b596db182) |
|
601 | 617 | v = ui.config(section, name, None) |
|
602 | 618 | if v is None: |
|
603 | 619 | return default |
|
604 | 620 | try: |
|
605 | 621 | return int(v) |
|
606 | 622 | except ValueError: |
|
607 | 623 | raise error.ConfigError( |
|
608 | 624 | b"%s.%s is not an integer ('%s')" % (section, name, v) |
|
609 | 625 | ) |
|
610 | 626 | |
|
611 | 627 | |
|
612 | 628 | def safeattrsetter(obj, name, ignoremissing=False): |
|
613 | 629 | """Ensure that 'obj' has 'name' attribute before subsequent setattr |
|
614 | 630 | |
|
615 | 631 | This function is aborted, if 'obj' doesn't have 'name' attribute |
|
616 | 632 | at runtime. This avoids overlooking removal of an attribute, which |
|
617 | 633 | breaks assumption of performance measurement, in the future. |
|
618 | 634 | |
|
619 | 635 | This function returns the object to (1) assign a new value, and |
|
620 | 636 | (2) restore an original value to the attribute. |
|
621 | 637 | |
|
622 | 638 | If 'ignoremissing' is true, missing 'name' attribute doesn't cause |
|
623 | 639 | abortion, and this function returns None. This is useful to |
|
624 | 640 | examine an attribute, which isn't ensured in all Mercurial |
|
625 | 641 | versions. |
|
626 | 642 | """ |
|
627 | 643 | if not util.safehasattr(obj, name): |
|
628 | 644 | if ignoremissing: |
|
629 | 645 | return None |
|
630 | 646 | raise error.Abort( |
|
631 | 647 | ( |
|
632 | 648 | b"missing attribute %s of %s might break assumption" |
|
633 | 649 | b" of performance measurement" |
|
634 | 650 | ) |
|
635 | 651 | % (name, obj) |
|
636 | 652 | ) |
|
637 | 653 | |
|
638 | 654 | origvalue = getattr(obj, _sysstr(name)) |
|
639 | 655 | |
|
640 | 656 | class attrutil(object): |
|
641 | 657 | def set(self, newvalue): |
|
642 | 658 | setattr(obj, _sysstr(name), newvalue) |
|
643 | 659 | |
|
644 | 660 | def restore(self): |
|
645 | 661 | setattr(obj, _sysstr(name), origvalue) |
|
646 | 662 | |
|
647 | 663 | return attrutil() |
|
648 | 664 | |
|
649 | 665 | |
|
650 | 666 | # utilities to examine each internal API changes |
|
651 | 667 | |
|
652 | 668 | |
|
653 | 669 | def getbranchmapsubsettable(): |
|
654 | 670 | # for "historical portability": |
|
655 | 671 | # subsettable is defined in: |
|
656 | 672 | # - branchmap since 2.9 (or 175c6fd8cacc) |
|
657 | 673 | # - repoview since 2.5 (or 59a9f18d4587) |
|
658 | 674 | # - repoviewutil since 5.0 |
|
659 | 675 | for mod in (branchmap, repoview, repoviewutil): |
|
660 | 676 | subsettable = getattr(mod, 'subsettable', None) |
|
661 | 677 | if subsettable: |
|
662 | 678 | return subsettable |
|
663 | 679 | |
|
664 | 680 | # bisecting in bcee63733aad::59a9f18d4587 can reach here (both |
|
665 | 681 | # branchmap and repoview modules exist, but subsettable attribute |
|
666 | 682 | # doesn't) |
|
667 | 683 | raise error.Abort( |
|
668 | 684 | b"perfbranchmap not available with this Mercurial", |
|
669 | 685 | hint=b"use 2.5 or later", |
|
670 | 686 | ) |
|
671 | 687 | |
|
672 | 688 | |
|
673 | 689 | def getsvfs(repo): |
|
674 | 690 | """Return appropriate object to access files under .hg/store""" |
|
675 | 691 | # for "historical portability": |
|
676 | 692 | # repo.svfs has been available since 2.3 (or 7034365089bf) |
|
677 | 693 | svfs = getattr(repo, 'svfs', None) |
|
678 | 694 | if svfs: |
|
679 | 695 | return svfs |
|
680 | 696 | else: |
|
681 | 697 | return getattr(repo, 'sopener') |
|
682 | 698 | |
|
683 | 699 | |
|
684 | 700 | def getvfs(repo): |
|
685 | 701 | """Return appropriate object to access files under .hg""" |
|
686 | 702 | # for "historical portability": |
|
687 | 703 | # repo.vfs has been available since 2.3 (or 7034365089bf) |
|
688 | 704 | vfs = getattr(repo, 'vfs', None) |
|
689 | 705 | if vfs: |
|
690 | 706 | return vfs |
|
691 | 707 | else: |
|
692 | 708 | return getattr(repo, 'opener') |
|
693 | 709 | |
|
694 | 710 | |
|
695 | 711 | def repocleartagscachefunc(repo): |
|
696 | 712 | """Return the function to clear tags cache according to repo internal API""" |
|
697 | 713 | if util.safehasattr(repo, b'_tagscache'): # since 2.0 (or 9dca7653b525) |
|
698 | 714 | # in this case, setattr(repo, '_tagscache', None) or so isn't |
|
699 | 715 | # correct way to clear tags cache, because existing code paths |
|
700 | 716 | # expect _tagscache to be a structured object. |
|
701 | 717 | def clearcache(): |
|
702 | 718 | # _tagscache has been filteredpropertycache since 2.5 (or |
|
703 | 719 | # 98c867ac1330), and delattr() can't work in such case |
|
704 | 720 | if '_tagscache' in vars(repo): |
|
705 | 721 | del repo.__dict__['_tagscache'] |
|
706 | 722 | |
|
707 | 723 | return clearcache |
|
708 | 724 | |
|
709 | 725 | repotags = safeattrsetter(repo, b'_tags', ignoremissing=True) |
|
710 | 726 | if repotags: # since 1.4 (or 5614a628d173) |
|
711 | 727 | return lambda: repotags.set(None) |
|
712 | 728 | |
|
713 | 729 | repotagscache = safeattrsetter(repo, b'tagscache', ignoremissing=True) |
|
714 | 730 | if repotagscache: # since 0.6 (or d7df759d0e97) |
|
715 | 731 | return lambda: repotagscache.set(None) |
|
716 | 732 | |
|
717 | 733 | # Mercurial earlier than 0.6 (or d7df759d0e97) logically reaches |
|
718 | 734 | # this point, but it isn't so problematic, because: |
|
719 | 735 | # - repo.tags of such Mercurial isn't "callable", and repo.tags() |
|
720 | 736 | # in perftags() causes failure soon |
|
721 | 737 | # - perf.py itself has been available since 1.1 (or eb240755386d) |
|
722 | 738 | raise error.Abort(b"tags API of this hg command is unknown") |
|
723 | 739 | |
|
724 | 740 | |
|
725 | 741 | # utilities to clear cache |
|
726 | 742 | |
|
727 | 743 | |
|
728 | 744 | def clearfilecache(obj, attrname): |
|
729 | 745 | unfiltered = getattr(obj, 'unfiltered', None) |
|
730 | 746 | if unfiltered is not None: |
|
731 | 747 | obj = obj.unfiltered() |
|
732 | 748 | if attrname in vars(obj): |
|
733 | 749 | delattr(obj, attrname) |
|
734 | 750 | obj._filecache.pop(attrname, None) |
|
735 | 751 | |
|
736 | 752 | |
|
737 | 753 | def clearchangelog(repo): |
|
738 | 754 | if repo is not repo.unfiltered(): |
|
739 | 755 | object.__setattr__(repo, '_clcachekey', None) |
|
740 | 756 | object.__setattr__(repo, '_clcache', None) |
|
741 | 757 | clearfilecache(repo.unfiltered(), 'changelog') |
|
742 | 758 | |
|
743 | 759 | |
|
744 | 760 | # perf commands |
|
745 | 761 | |
|
746 | 762 | |
|
747 | 763 | @command(b'perf::walk|perfwalk', formatteropts) |
|
748 | 764 | def perfwalk(ui, repo, *pats, **opts): |
|
749 | 765 | opts = _byteskwargs(opts) |
|
750 | 766 | timer, fm = gettimer(ui, opts) |
|
751 | 767 | m = scmutil.match(repo[None], pats, {}) |
|
752 | 768 | timer( |
|
753 | 769 | lambda: len( |
|
754 | 770 | list( |
|
755 | 771 | repo.dirstate.walk(m, subrepos=[], unknown=True, ignored=False) |
|
756 | 772 | ) |
|
757 | 773 | ) |
|
758 | 774 | ) |
|
759 | 775 | fm.end() |
|
760 | 776 | |
|
761 | 777 | |
|
762 | 778 | @command(b'perf::annotate|perfannotate', formatteropts) |
|
763 | 779 | def perfannotate(ui, repo, f, **opts): |
|
764 | 780 | opts = _byteskwargs(opts) |
|
765 | 781 | timer, fm = gettimer(ui, opts) |
|
766 | 782 | fc = repo[b'.'][f] |
|
767 | 783 | timer(lambda: len(fc.annotate(True))) |
|
768 | 784 | fm.end() |
|
769 | 785 | |
|
770 | 786 | |
|
771 | 787 | @command( |
|
772 | 788 | b'perf::status|perfstatus', |
|
773 | 789 | [ |
|
774 | 790 | (b'u', b'unknown', False, b'ask status to look for unknown files'), |
|
775 | 791 | (b'', b'dirstate', False, b'benchmark the internal dirstate call'), |
|
776 | 792 | ] |
|
777 | 793 | + formatteropts, |
|
778 | 794 | ) |
|
779 | 795 | def perfstatus(ui, repo, **opts): |
|
780 | 796 | """benchmark the performance of a single status call |
|
781 | 797 | |
|
782 | 798 | The repository data are preserved between each call. |
|
783 | 799 | |
|
784 | 800 | By default, only the status of the tracked file are requested. If |
|
785 | 801 | `--unknown` is passed, the "unknown" files are also tracked. |
|
786 | 802 | """ |
|
787 | 803 | opts = _byteskwargs(opts) |
|
788 | 804 | # m = match.always(repo.root, repo.getcwd()) |
|
789 | 805 | # timer(lambda: sum(map(len, repo.dirstate.status(m, [], False, False, |
|
790 | 806 | # False)))) |
|
791 | 807 | timer, fm = gettimer(ui, opts) |
|
792 | 808 | if opts[b'dirstate']: |
|
793 | 809 | dirstate = repo.dirstate |
|
794 | 810 | m = scmutil.matchall(repo) |
|
795 | 811 | unknown = opts[b'unknown'] |
|
796 | 812 | |
|
797 | 813 | def status_dirstate(): |
|
798 | 814 | s = dirstate.status( |
|
799 | 815 | m, subrepos=[], ignored=False, clean=False, unknown=unknown |
|
800 | 816 | ) |
|
801 | 817 | sum(map(bool, s)) |
|
802 | 818 | |
|
803 | 819 | timer(status_dirstate) |
|
804 | 820 | else: |
|
805 | 821 | timer(lambda: sum(map(len, repo.status(unknown=opts[b'unknown'])))) |
|
806 | 822 | fm.end() |
|
807 | 823 | |
|
808 | 824 | |
|
809 | 825 | @command(b'perf::addremove|perfaddremove', formatteropts) |
|
810 | 826 | def perfaddremove(ui, repo, **opts): |
|
811 | 827 | opts = _byteskwargs(opts) |
|
812 | 828 | timer, fm = gettimer(ui, opts) |
|
813 | 829 | try: |
|
814 | 830 | oldquiet = repo.ui.quiet |
|
815 | 831 | repo.ui.quiet = True |
|
816 | 832 | matcher = scmutil.match(repo[None]) |
|
817 | 833 | opts[b'dry_run'] = True |
|
818 | 834 | if 'uipathfn' in getargspec(scmutil.addremove).args: |
|
819 | 835 | uipathfn = scmutil.getuipathfn(repo) |
|
820 | 836 | timer(lambda: scmutil.addremove(repo, matcher, b"", uipathfn, opts)) |
|
821 | 837 | else: |
|
822 | 838 | timer(lambda: scmutil.addremove(repo, matcher, b"", opts)) |
|
823 | 839 | finally: |
|
824 | 840 | repo.ui.quiet = oldquiet |
|
825 | 841 | fm.end() |
|
826 | 842 | |
|
827 | 843 | |
|
828 | 844 | def clearcaches(cl): |
|
829 | 845 | # behave somewhat consistently across internal API changes |
|
830 | 846 | if util.safehasattr(cl, b'clearcaches'): |
|
831 | 847 | cl.clearcaches() |
|
832 | 848 | elif util.safehasattr(cl, b'_nodecache'): |
|
833 | 849 | # <= hg-5.2 |
|
834 | 850 | from mercurial.node import nullid, nullrev |
|
835 | 851 | |
|
836 | 852 | cl._nodecache = {nullid: nullrev} |
|
837 | 853 | cl._nodepos = None |
|
838 | 854 | |
|
839 | 855 | |
|
840 | 856 | @command(b'perf::heads|perfheads', formatteropts) |
|
841 | 857 | def perfheads(ui, repo, **opts): |
|
842 | 858 | """benchmark the computation of a changelog heads""" |
|
843 | 859 | opts = _byteskwargs(opts) |
|
844 | 860 | timer, fm = gettimer(ui, opts) |
|
845 | 861 | cl = repo.changelog |
|
846 | 862 | |
|
847 | 863 | def s(): |
|
848 | 864 | clearcaches(cl) |
|
849 | 865 | |
|
850 | 866 | def d(): |
|
851 | 867 | len(cl.headrevs()) |
|
852 | 868 | |
|
853 | 869 | timer(d, setup=s) |
|
854 | 870 | fm.end() |
|
855 | 871 | |
|
856 | 872 | |
|
857 | 873 | @command( |
|
858 | 874 | b'perf::tags|perftags', |
|
859 | 875 | formatteropts |
|
860 | 876 | + [ |
|
861 | 877 | (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), |
|
862 | 878 | ], |
|
863 | 879 | ) |
|
864 | 880 | def perftags(ui, repo, **opts): |
|
865 | 881 | opts = _byteskwargs(opts) |
|
866 | 882 | timer, fm = gettimer(ui, opts) |
|
867 | 883 | repocleartagscache = repocleartagscachefunc(repo) |
|
868 | 884 | clearrevlogs = opts[b'clear_revlogs'] |
|
869 | 885 | |
|
870 | 886 | def s(): |
|
871 | 887 | if clearrevlogs: |
|
872 | 888 | clearchangelog(repo) |
|
873 | 889 | clearfilecache(repo.unfiltered(), 'manifest') |
|
874 | 890 | repocleartagscache() |
|
875 | 891 | |
|
876 | 892 | def t(): |
|
877 | 893 | return len(repo.tags()) |
|
878 | 894 | |
|
879 | 895 | timer(t, setup=s) |
|
880 | 896 | fm.end() |
|
881 | 897 | |
|
882 | 898 | |
|
883 | 899 | @command(b'perf::ancestors|perfancestors', formatteropts) |
|
884 | 900 | def perfancestors(ui, repo, **opts): |
|
885 | 901 | opts = _byteskwargs(opts) |
|
886 | 902 | timer, fm = gettimer(ui, opts) |
|
887 | 903 | heads = repo.changelog.headrevs() |
|
888 | 904 | |
|
889 | 905 | def d(): |
|
890 | 906 | for a in repo.changelog.ancestors(heads): |
|
891 | 907 | pass |
|
892 | 908 | |
|
893 | 909 | timer(d) |
|
894 | 910 | fm.end() |
|
895 | 911 | |
|
896 | 912 | |
|
897 | 913 | @command(b'perf::ancestorset|perfancestorset', formatteropts) |
|
898 | 914 | def perfancestorset(ui, repo, revset, **opts): |
|
899 | 915 | opts = _byteskwargs(opts) |
|
900 | 916 | timer, fm = gettimer(ui, opts) |
|
901 | 917 | revs = repo.revs(revset) |
|
902 | 918 | heads = repo.changelog.headrevs() |
|
903 | 919 | |
|
904 | 920 | def d(): |
|
905 | 921 | s = repo.changelog.ancestors(heads) |
|
906 | 922 | for rev in revs: |
|
907 | 923 | rev in s |
|
908 | 924 | |
|
909 | 925 | timer(d) |
|
910 | 926 | fm.end() |
|
911 | 927 | |
|
912 | 928 | |
|
913 | 929 | @command(b'perf::discovery|perfdiscovery', formatteropts, b'PATH') |
|
914 | 930 | def perfdiscovery(ui, repo, path, **opts): |
|
915 | 931 | """benchmark discovery between local repo and the peer at given path""" |
|
916 | 932 | repos = [repo, None] |
|
917 | 933 | timer, fm = gettimer(ui, opts) |
|
918 | 934 | |
|
919 | 935 | try: |
|
920 | 936 | from mercurial.utils.urlutil import get_unique_pull_path |
|
921 | 937 | |
|
922 | 938 | path = get_unique_pull_path(b'perfdiscovery', repo, ui, path)[0] |
|
923 | 939 | except ImportError: |
|
924 | 940 | path = ui.expandpath(path) |
|
925 | 941 | |
|
926 | 942 | def s(): |
|
927 | 943 | repos[1] = hg.peer(ui, opts, path) |
|
928 | 944 | |
|
929 | 945 | def d(): |
|
930 | 946 | setdiscovery.findcommonheads(ui, *repos) |
|
931 | 947 | |
|
932 | 948 | timer(d, setup=s) |
|
933 | 949 | fm.end() |
|
934 | 950 | |
|
935 | 951 | |
|
936 | 952 | @command( |
|
937 | 953 | b'perf::bookmarks|perfbookmarks', |
|
938 | 954 | formatteropts |
|
939 | 955 | + [ |
|
940 | 956 | (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), |
|
941 | 957 | ], |
|
942 | 958 | ) |
|
943 | 959 | def perfbookmarks(ui, repo, **opts): |
|
944 | 960 | """benchmark parsing bookmarks from disk to memory""" |
|
945 | 961 | opts = _byteskwargs(opts) |
|
946 | 962 | timer, fm = gettimer(ui, opts) |
|
947 | 963 | |
|
948 | 964 | clearrevlogs = opts[b'clear_revlogs'] |
|
949 | 965 | |
|
950 | 966 | def s(): |
|
951 | 967 | if clearrevlogs: |
|
952 | 968 | clearchangelog(repo) |
|
953 | 969 | clearfilecache(repo, b'_bookmarks') |
|
954 | 970 | |
|
955 | 971 | def d(): |
|
956 | 972 | repo._bookmarks |
|
957 | 973 | |
|
958 | 974 | timer(d, setup=s) |
|
959 | 975 | fm.end() |
|
960 | 976 | |
|
961 | 977 | |
|
962 | 978 | @command(b'perf::bundleread|perfbundleread', formatteropts, b'BUNDLE') |
|
963 | 979 | def perfbundleread(ui, repo, bundlepath, **opts): |
|
964 | 980 | """Benchmark reading of bundle files. |
|
965 | 981 | |
|
966 | 982 | This command is meant to isolate the I/O part of bundle reading as |
|
967 | 983 | much as possible. |
|
968 | 984 | """ |
|
969 | 985 | from mercurial import ( |
|
970 | 986 | bundle2, |
|
971 | 987 | exchange, |
|
972 | 988 | streamclone, |
|
973 | 989 | ) |
|
974 | 990 | |
|
975 | 991 | opts = _byteskwargs(opts) |
|
976 | 992 | |
|
977 | 993 | def makebench(fn): |
|
978 | 994 | def run(): |
|
979 | 995 | with open(bundlepath, b'rb') as fh: |
|
980 | 996 | bundle = exchange.readbundle(ui, fh, bundlepath) |
|
981 | 997 | fn(bundle) |
|
982 | 998 | |
|
983 | 999 | return run |
|
984 | 1000 | |
|
985 | 1001 | def makereadnbytes(size): |
|
986 | 1002 | def run(): |
|
987 | 1003 | with open(bundlepath, b'rb') as fh: |
|
988 | 1004 | bundle = exchange.readbundle(ui, fh, bundlepath) |
|
989 | 1005 | while bundle.read(size): |
|
990 | 1006 | pass |
|
991 | 1007 | |
|
992 | 1008 | return run |
|
993 | 1009 | |
|
994 | 1010 | def makestdioread(size): |
|
995 | 1011 | def run(): |
|
996 | 1012 | with open(bundlepath, b'rb') as fh: |
|
997 | 1013 | while fh.read(size): |
|
998 | 1014 | pass |
|
999 | 1015 | |
|
1000 | 1016 | return run |
|
1001 | 1017 | |
|
1002 | 1018 | # bundle1 |
|
1003 | 1019 | |
|
1004 | 1020 | def deltaiter(bundle): |
|
1005 | 1021 | for delta in bundle.deltaiter(): |
|
1006 | 1022 | pass |
|
1007 | 1023 | |
|
1008 | 1024 | def iterchunks(bundle): |
|
1009 | 1025 | for chunk in bundle.getchunks(): |
|
1010 | 1026 | pass |
|
1011 | 1027 | |
|
1012 | 1028 | # bundle2 |
|
1013 | 1029 | |
|
1014 | 1030 | def forwardchunks(bundle): |
|
1015 | 1031 | for chunk in bundle._forwardchunks(): |
|
1016 | 1032 | pass |
|
1017 | 1033 | |
|
1018 | 1034 | def iterparts(bundle): |
|
1019 | 1035 | for part in bundle.iterparts(): |
|
1020 | 1036 | pass |
|
1021 | 1037 | |
|
1022 | 1038 | def iterpartsseekable(bundle): |
|
1023 | 1039 | for part in bundle.iterparts(seekable=True): |
|
1024 | 1040 | pass |
|
1025 | 1041 | |
|
1026 | 1042 | def seek(bundle): |
|
1027 | 1043 | for part in bundle.iterparts(seekable=True): |
|
1028 | 1044 | part.seek(0, os.SEEK_END) |
|
1029 | 1045 | |
|
1030 | 1046 | def makepartreadnbytes(size): |
|
1031 | 1047 | def run(): |
|
1032 | 1048 | with open(bundlepath, b'rb') as fh: |
|
1033 | 1049 | bundle = exchange.readbundle(ui, fh, bundlepath) |
|
1034 | 1050 | for part in bundle.iterparts(): |
|
1035 | 1051 | while part.read(size): |
|
1036 | 1052 | pass |
|
1037 | 1053 | |
|
1038 | 1054 | return run |
|
1039 | 1055 | |
|
1040 | 1056 | benches = [ |
|
1041 | 1057 | (makestdioread(8192), b'read(8k)'), |
|
1042 | 1058 | (makestdioread(16384), b'read(16k)'), |
|
1043 | 1059 | (makestdioread(32768), b'read(32k)'), |
|
1044 | 1060 | (makestdioread(131072), b'read(128k)'), |
|
1045 | 1061 | ] |
|
1046 | 1062 | |
|
1047 | 1063 | with open(bundlepath, b'rb') as fh: |
|
1048 | 1064 | bundle = exchange.readbundle(ui, fh, bundlepath) |
|
1049 | 1065 | |
|
1050 | 1066 | if isinstance(bundle, changegroup.cg1unpacker): |
|
1051 | 1067 | benches.extend( |
|
1052 | 1068 | [ |
|
1053 | 1069 | (makebench(deltaiter), b'cg1 deltaiter()'), |
|
1054 | 1070 | (makebench(iterchunks), b'cg1 getchunks()'), |
|
1055 | 1071 | (makereadnbytes(8192), b'cg1 read(8k)'), |
|
1056 | 1072 | (makereadnbytes(16384), b'cg1 read(16k)'), |
|
1057 | 1073 | (makereadnbytes(32768), b'cg1 read(32k)'), |
|
1058 | 1074 | (makereadnbytes(131072), b'cg1 read(128k)'), |
|
1059 | 1075 | ] |
|
1060 | 1076 | ) |
|
1061 | 1077 | elif isinstance(bundle, bundle2.unbundle20): |
|
1062 | 1078 | benches.extend( |
|
1063 | 1079 | [ |
|
1064 | 1080 | (makebench(forwardchunks), b'bundle2 forwardchunks()'), |
|
1065 | 1081 | (makebench(iterparts), b'bundle2 iterparts()'), |
|
1066 | 1082 | ( |
|
1067 | 1083 | makebench(iterpartsseekable), |
|
1068 | 1084 | b'bundle2 iterparts() seekable', |
|
1069 | 1085 | ), |
|
1070 | 1086 | (makebench(seek), b'bundle2 part seek()'), |
|
1071 | 1087 | (makepartreadnbytes(8192), b'bundle2 part read(8k)'), |
|
1072 | 1088 | (makepartreadnbytes(16384), b'bundle2 part read(16k)'), |
|
1073 | 1089 | (makepartreadnbytes(32768), b'bundle2 part read(32k)'), |
|
1074 | 1090 | (makepartreadnbytes(131072), b'bundle2 part read(128k)'), |
|
1075 | 1091 | ] |
|
1076 | 1092 | ) |
|
1077 | 1093 | elif isinstance(bundle, streamclone.streamcloneapplier): |
|
1078 | 1094 | raise error.Abort(b'stream clone bundles not supported') |
|
1079 | 1095 | else: |
|
1080 | 1096 | raise error.Abort(b'unhandled bundle type: %s' % type(bundle)) |
|
1081 | 1097 | |
|
1082 | 1098 | for fn, title in benches: |
|
1083 | 1099 | timer, fm = gettimer(ui, opts) |
|
1084 | 1100 | timer(fn, title=title) |
|
1085 | 1101 | fm.end() |
|
1086 | 1102 | |
|
1087 | 1103 | |
|
1088 | 1104 | @command( |
|
1089 | 1105 | b'perf::changegroupchangelog|perfchangegroupchangelog', |
|
1090 | 1106 | formatteropts |
|
1091 | 1107 | + [ |
|
1092 | 1108 | (b'', b'cgversion', b'02', b'changegroup version'), |
|
1093 | 1109 | (b'r', b'rev', b'', b'revisions to add to changegroup'), |
|
1094 | 1110 | ], |
|
1095 | 1111 | ) |
|
1096 | 1112 | def perfchangegroupchangelog(ui, repo, cgversion=b'02', rev=None, **opts): |
|
1097 | 1113 | """Benchmark producing a changelog group for a changegroup. |
|
1098 | 1114 | |
|
1099 | 1115 | This measures the time spent processing the changelog during a |
|
1100 | 1116 | bundle operation. This occurs during `hg bundle` and on a server |
|
1101 | 1117 | processing a `getbundle` wire protocol request (handles clones |
|
1102 | 1118 | and pull requests). |
|
1103 | 1119 | |
|
1104 | 1120 | By default, all revisions are added to the changegroup. |
|
1105 | 1121 | """ |
|
1106 | 1122 | opts = _byteskwargs(opts) |
|
1107 | 1123 | cl = repo.changelog |
|
1108 | 1124 | nodes = [cl.lookup(r) for r in repo.revs(rev or b'all()')] |
|
1109 | 1125 | bundler = changegroup.getbundler(cgversion, repo) |
|
1110 | 1126 | |
|
1111 | 1127 | def d(): |
|
1112 | 1128 | state, chunks = bundler._generatechangelog(cl, nodes) |
|
1113 | 1129 | for chunk in chunks: |
|
1114 | 1130 | pass |
|
1115 | 1131 | |
|
1116 | 1132 | timer, fm = gettimer(ui, opts) |
|
1117 | 1133 | |
|
1118 | 1134 | # Terminal printing can interfere with timing. So disable it. |
|
1119 | 1135 | with ui.configoverride({(b'progress', b'disable'): True}): |
|
1120 | 1136 | timer(d) |
|
1121 | 1137 | |
|
1122 | 1138 | fm.end() |
|
1123 | 1139 | |
|
1124 | 1140 | |
|
1125 | 1141 | @command(b'perf::dirs|perfdirs', formatteropts) |
|
1126 | 1142 | def perfdirs(ui, repo, **opts): |
|
1127 | 1143 | opts = _byteskwargs(opts) |
|
1128 | 1144 | timer, fm = gettimer(ui, opts) |
|
1129 | 1145 | dirstate = repo.dirstate |
|
1130 | 1146 | b'a' in dirstate |
|
1131 | 1147 | |
|
1132 | 1148 | def d(): |
|
1133 | 1149 | dirstate.hasdir(b'a') |
|
1134 | del dirstate._map._dirs | |
|
1150 | try: | |
|
1151 | del dirstate._map._dirs | |
|
1152 | except AttributeError: | |
|
1153 | pass | |
|
1135 | 1154 | |
|
1136 | 1155 | timer(d) |
|
1137 | 1156 | fm.end() |
|
1138 | 1157 | |
|
1139 | 1158 | |
|
1140 | 1159 | @command( |
|
1141 | 1160 | b'perf::dirstate|perfdirstate', |
|
1142 | 1161 | [ |
|
1143 | 1162 | ( |
|
1144 | 1163 | b'', |
|
1145 | 1164 | b'iteration', |
|
1146 | 1165 | None, |
|
1147 | 1166 | b'benchmark a full iteration for the dirstate', |
|
1148 | 1167 | ), |
|
1149 | 1168 | ( |
|
1150 | 1169 | b'', |
|
1151 | 1170 | b'contains', |
|
1152 | 1171 | None, |
|
1153 | 1172 | b'benchmark a large amount of `nf in dirstate` calls', |
|
1154 | 1173 | ), |
|
1155 | 1174 | ] |
|
1156 | 1175 | + formatteropts, |
|
1157 | 1176 | ) |
|
1158 | 1177 | def perfdirstate(ui, repo, **opts): |
|
1159 | 1178 | """benchmap the time of various distate operations |
|
1160 | 1179 | |
|
1161 | 1180 | By default benchmark the time necessary to load a dirstate from scratch. |
|
1162 | 1181 | The dirstate is loaded to the point were a "contains" request can be |
|
1163 | 1182 | answered. |
|
1164 | 1183 | """ |
|
1165 | 1184 | opts = _byteskwargs(opts) |
|
1166 | 1185 | timer, fm = gettimer(ui, opts) |
|
1167 | 1186 | b"a" in repo.dirstate |
|
1168 | 1187 | |
|
1169 | 1188 | if opts[b'iteration'] and opts[b'contains']: |
|
1170 | 1189 | msg = b'only specify one of --iteration or --contains' |
|
1171 | 1190 | raise error.Abort(msg) |
|
1172 | 1191 | |
|
1173 | 1192 | if opts[b'iteration']: |
|
1174 | 1193 | setup = None |
|
1175 | 1194 | dirstate = repo.dirstate |
|
1176 | 1195 | |
|
1177 | 1196 | def d(): |
|
1178 | 1197 | for f in dirstate: |
|
1179 | 1198 | pass |
|
1180 | 1199 | |
|
1181 | 1200 | elif opts[b'contains']: |
|
1182 | 1201 | setup = None |
|
1183 | 1202 | dirstate = repo.dirstate |
|
1184 | 1203 | allfiles = list(dirstate) |
|
1185 | 1204 | # also add file path that will be "missing" from the dirstate |
|
1186 | 1205 | allfiles.extend([f[::-1] for f in allfiles]) |
|
1187 | 1206 | |
|
1188 | 1207 | def d(): |
|
1189 | 1208 | for f in allfiles: |
|
1190 | 1209 | f in dirstate |
|
1191 | 1210 | |
|
1192 | 1211 | else: |
|
1193 | 1212 | |
|
1194 | 1213 | def setup(): |
|
1195 | 1214 | repo.dirstate.invalidate() |
|
1196 | 1215 | |
|
1197 | 1216 | def d(): |
|
1198 | 1217 | b"a" in repo.dirstate |
|
1199 | 1218 | |
|
1200 | 1219 | timer(d, setup=setup) |
|
1201 | 1220 | fm.end() |
|
1202 | 1221 | |
|
1203 | 1222 | |
|
1204 | 1223 | @command(b'perf::dirstatedirs|perfdirstatedirs', formatteropts) |
|
1205 | 1224 | def perfdirstatedirs(ui, repo, **opts): |
|
1206 | 1225 | """benchmap a 'dirstate.hasdir' call from an empty `dirs` cache""" |
|
1207 | 1226 | opts = _byteskwargs(opts) |
|
1208 | 1227 | timer, fm = gettimer(ui, opts) |
|
1209 | 1228 | repo.dirstate.hasdir(b"a") |
|
1210 | 1229 | |
|
1211 | 1230 | def setup(): |
|
1212 | del repo.dirstate._map._dirs | |
|
1231 | try: | |
|
1232 | del repo.dirstate._map._dirs | |
|
1233 | except AttributeError: | |
|
1234 | pass | |
|
1213 | 1235 | |
|
1214 | 1236 | def d(): |
|
1215 | 1237 | repo.dirstate.hasdir(b"a") |
|
1216 | 1238 | |
|
1217 | 1239 | timer(d, setup=setup) |
|
1218 | 1240 | fm.end() |
|
1219 | 1241 | |
|
1220 | 1242 | |
|
1221 | 1243 | @command(b'perf::dirstatefoldmap|perfdirstatefoldmap', formatteropts) |
|
1222 | 1244 | def perfdirstatefoldmap(ui, repo, **opts): |
|
1223 | 1245 | """benchmap a `dirstate._map.filefoldmap.get()` request |
|
1224 | 1246 | |
|
1225 | 1247 | The dirstate filefoldmap cache is dropped between every request. |
|
1226 | 1248 | """ |
|
1227 | 1249 | opts = _byteskwargs(opts) |
|
1228 | 1250 | timer, fm = gettimer(ui, opts) |
|
1229 | 1251 | dirstate = repo.dirstate |
|
1230 | 1252 | dirstate._map.filefoldmap.get(b'a') |
|
1231 | 1253 | |
|
1232 | 1254 | def setup(): |
|
1233 | 1255 | del dirstate._map.filefoldmap |
|
1234 | 1256 | |
|
1235 | 1257 | def d(): |
|
1236 | 1258 | dirstate._map.filefoldmap.get(b'a') |
|
1237 | 1259 | |
|
1238 | 1260 | timer(d, setup=setup) |
|
1239 | 1261 | fm.end() |
|
1240 | 1262 | |
|
1241 | 1263 | |
|
1242 | 1264 | @command(b'perf::dirfoldmap|perfdirfoldmap', formatteropts) |
|
1243 | 1265 | def perfdirfoldmap(ui, repo, **opts): |
|
1244 | 1266 | """benchmap a `dirstate._map.dirfoldmap.get()` request |
|
1245 | 1267 | |
|
1246 | 1268 | The dirstate dirfoldmap cache is dropped between every request. |
|
1247 | 1269 | """ |
|
1248 | 1270 | opts = _byteskwargs(opts) |
|
1249 | 1271 | timer, fm = gettimer(ui, opts) |
|
1250 | 1272 | dirstate = repo.dirstate |
|
1251 | 1273 | dirstate._map.dirfoldmap.get(b'a') |
|
1252 | 1274 | |
|
1253 | 1275 | def setup(): |
|
1254 | 1276 | del dirstate._map.dirfoldmap |
|
1255 | del dirstate._map._dirs | |
|
1277 | try: | |
|
1278 | del dirstate._map._dirs | |
|
1279 | except AttributeError: | |
|
1280 | pass | |
|
1256 | 1281 | |
|
1257 | 1282 | def d(): |
|
1258 | 1283 | dirstate._map.dirfoldmap.get(b'a') |
|
1259 | 1284 | |
|
1260 | 1285 | timer(d, setup=setup) |
|
1261 | 1286 | fm.end() |
|
1262 | 1287 | |
|
1263 | 1288 | |
|
1264 | 1289 | @command(b'perf::dirstatewrite|perfdirstatewrite', formatteropts) |
|
1265 | 1290 | def perfdirstatewrite(ui, repo, **opts): |
|
1266 | 1291 | """benchmap the time it take to write a dirstate on disk""" |
|
1267 | 1292 | opts = _byteskwargs(opts) |
|
1268 | 1293 | timer, fm = gettimer(ui, opts) |
|
1269 | 1294 | ds = repo.dirstate |
|
1270 | 1295 | b"a" in ds |
|
1271 | 1296 | |
|
1272 | 1297 | def setup(): |
|
1273 | 1298 | ds._dirty = True |
|
1274 | 1299 | |
|
1275 | 1300 | def d(): |
|
1276 | 1301 | ds.write(repo.currenttransaction()) |
|
1277 | 1302 | |
|
1278 | 1303 | timer(d, setup=setup) |
|
1279 | 1304 | fm.end() |
|
1280 | 1305 | |
|
1281 | 1306 | |
|
1282 | 1307 | def _getmergerevs(repo, opts): |
|
1283 | 1308 | """parse command argument to return rev involved in merge |
|
1284 | 1309 | |
|
1285 | 1310 | input: options dictionnary with `rev`, `from` and `bse` |
|
1286 | 1311 | output: (localctx, otherctx, basectx) |
|
1287 | 1312 | """ |
|
1288 | 1313 | if opts[b'from']: |
|
1289 | 1314 | fromrev = scmutil.revsingle(repo, opts[b'from']) |
|
1290 | 1315 | wctx = repo[fromrev] |
|
1291 | 1316 | else: |
|
1292 | 1317 | wctx = repo[None] |
|
1293 | 1318 | # we don't want working dir files to be stat'd in the benchmark, so |
|
1294 | 1319 | # prime that cache |
|
1295 | 1320 | wctx.dirty() |
|
1296 | 1321 | rctx = scmutil.revsingle(repo, opts[b'rev'], opts[b'rev']) |
|
1297 | 1322 | if opts[b'base']: |
|
1298 | 1323 | fromrev = scmutil.revsingle(repo, opts[b'base']) |
|
1299 | 1324 | ancestor = repo[fromrev] |
|
1300 | 1325 | else: |
|
1301 | 1326 | ancestor = wctx.ancestor(rctx) |
|
1302 | 1327 | return (wctx, rctx, ancestor) |
|
1303 | 1328 | |
|
1304 | 1329 | |
|
1305 | 1330 | @command( |
|
1306 | 1331 | b'perf::mergecalculate|perfmergecalculate', |
|
1307 | 1332 | [ |
|
1308 | 1333 | (b'r', b'rev', b'.', b'rev to merge against'), |
|
1309 | 1334 | (b'', b'from', b'', b'rev to merge from'), |
|
1310 | 1335 | (b'', b'base', b'', b'the revision to use as base'), |
|
1311 | 1336 | ] |
|
1312 | 1337 | + formatteropts, |
|
1313 | 1338 | ) |
|
1314 | 1339 | def perfmergecalculate(ui, repo, **opts): |
|
1315 | 1340 | opts = _byteskwargs(opts) |
|
1316 | 1341 | timer, fm = gettimer(ui, opts) |
|
1317 | 1342 | |
|
1318 | 1343 | wctx, rctx, ancestor = _getmergerevs(repo, opts) |
|
1319 | 1344 | |
|
1320 | 1345 | def d(): |
|
1321 | 1346 | # acceptremote is True because we don't want prompts in the middle of |
|
1322 | 1347 | # our benchmark |
|
1323 | 1348 | merge.calculateupdates( |
|
1324 | 1349 | repo, |
|
1325 | 1350 | wctx, |
|
1326 | 1351 | rctx, |
|
1327 | 1352 | [ancestor], |
|
1328 | 1353 | branchmerge=False, |
|
1329 | 1354 | force=False, |
|
1330 | 1355 | acceptremote=True, |
|
1331 | 1356 | followcopies=True, |
|
1332 | 1357 | ) |
|
1333 | 1358 | |
|
1334 | 1359 | timer(d) |
|
1335 | 1360 | fm.end() |
|
1336 | 1361 | |
|
1337 | 1362 | |
|
1338 | 1363 | @command( |
|
1339 | 1364 | b'perf::mergecopies|perfmergecopies', |
|
1340 | 1365 | [ |
|
1341 | 1366 | (b'r', b'rev', b'.', b'rev to merge against'), |
|
1342 | 1367 | (b'', b'from', b'', b'rev to merge from'), |
|
1343 | 1368 | (b'', b'base', b'', b'the revision to use as base'), |
|
1344 | 1369 | ] |
|
1345 | 1370 | + formatteropts, |
|
1346 | 1371 | ) |
|
1347 | 1372 | def perfmergecopies(ui, repo, **opts): |
|
1348 | 1373 | """measure runtime of `copies.mergecopies`""" |
|
1349 | 1374 | opts = _byteskwargs(opts) |
|
1350 | 1375 | timer, fm = gettimer(ui, opts) |
|
1351 | 1376 | wctx, rctx, ancestor = _getmergerevs(repo, opts) |
|
1352 | 1377 | |
|
1353 | 1378 | def d(): |
|
1354 | 1379 | # acceptremote is True because we don't want prompts in the middle of |
|
1355 | 1380 | # our benchmark |
|
1356 | 1381 | copies.mergecopies(repo, wctx, rctx, ancestor) |
|
1357 | 1382 | |
|
1358 | 1383 | timer(d) |
|
1359 | 1384 | fm.end() |
|
1360 | 1385 | |
|
1361 | 1386 | |
|
1362 | 1387 | @command(b'perf::pathcopies|perfpathcopies', [], b"REV REV") |
|
1363 | 1388 | def perfpathcopies(ui, repo, rev1, rev2, **opts): |
|
1364 | 1389 | """benchmark the copy tracing logic""" |
|
1365 | 1390 | opts = _byteskwargs(opts) |
|
1366 | 1391 | timer, fm = gettimer(ui, opts) |
|
1367 | 1392 | ctx1 = scmutil.revsingle(repo, rev1, rev1) |
|
1368 | 1393 | ctx2 = scmutil.revsingle(repo, rev2, rev2) |
|
1369 | 1394 | |
|
1370 | 1395 | def d(): |
|
1371 | 1396 | copies.pathcopies(ctx1, ctx2) |
|
1372 | 1397 | |
|
1373 | 1398 | timer(d) |
|
1374 | 1399 | fm.end() |
|
1375 | 1400 | |
|
1376 | 1401 | |
|
1377 | 1402 | @command( |
|
1378 | 1403 | b'perf::phases|perfphases', |
|
1379 | 1404 | [ |
|
1380 | 1405 | (b'', b'full', False, b'include file reading time too'), |
|
1381 | 1406 | ], |
|
1382 | 1407 | b"", |
|
1383 | 1408 | ) |
|
1384 | 1409 | def perfphases(ui, repo, **opts): |
|
1385 | 1410 | """benchmark phasesets computation""" |
|
1386 | 1411 | opts = _byteskwargs(opts) |
|
1387 | 1412 | timer, fm = gettimer(ui, opts) |
|
1388 | 1413 | _phases = repo._phasecache |
|
1389 | 1414 | full = opts.get(b'full') |
|
1390 | 1415 | |
|
1391 | 1416 | def d(): |
|
1392 | 1417 | phases = _phases |
|
1393 | 1418 | if full: |
|
1394 | 1419 | clearfilecache(repo, b'_phasecache') |
|
1395 | 1420 | phases = repo._phasecache |
|
1396 | 1421 | phases.invalidate() |
|
1397 | 1422 | phases.loadphaserevs(repo) |
|
1398 | 1423 | |
|
1399 | 1424 | timer(d) |
|
1400 | 1425 | fm.end() |
|
1401 | 1426 | |
|
1402 | 1427 | |
|
1403 | 1428 | @command(b'perf::phasesremote|perfphasesremote', [], b"[DEST]") |
|
1404 | 1429 | def perfphasesremote(ui, repo, dest=None, **opts): |
|
1405 | 1430 | """benchmark time needed to analyse phases of the remote server""" |
|
1406 | 1431 | from mercurial.node import bin |
|
1407 | 1432 | from mercurial import ( |
|
1408 | 1433 | exchange, |
|
1409 | 1434 | hg, |
|
1410 | 1435 | phases, |
|
1411 | 1436 | ) |
|
1412 | 1437 | |
|
1413 | 1438 | opts = _byteskwargs(opts) |
|
1414 | 1439 | timer, fm = gettimer(ui, opts) |
|
1415 | 1440 | |
|
1416 | 1441 | path = ui.getpath(dest, default=(b'default-push', b'default')) |
|
1417 | 1442 | if not path: |
|
1418 | 1443 | raise error.Abort( |
|
1419 | 1444 | b'default repository not configured!', |
|
1420 | 1445 | hint=b"see 'hg help config.paths'", |
|
1421 | 1446 | ) |
|
1422 | 1447 | dest = path.pushloc or path.loc |
|
1423 | 1448 | ui.statusnoi18n(b'analysing phase of %s\n' % util.hidepassword(dest)) |
|
1424 | 1449 | other = hg.peer(repo, opts, dest) |
|
1425 | 1450 | |
|
1426 | 1451 | # easier to perform discovery through the operation |
|
1427 | 1452 | op = exchange.pushoperation(repo, other) |
|
1428 | 1453 | exchange._pushdiscoverychangeset(op) |
|
1429 | 1454 | |
|
1430 | 1455 | remotesubset = op.fallbackheads |
|
1431 | 1456 | |
|
1432 | 1457 | with other.commandexecutor() as e: |
|
1433 | 1458 | remotephases = e.callcommand( |
|
1434 | 1459 | b'listkeys', {b'namespace': b'phases'} |
|
1435 | 1460 | ).result() |
|
1436 | 1461 | del other |
|
1437 | 1462 | publishing = remotephases.get(b'publishing', False) |
|
1438 | 1463 | if publishing: |
|
1439 | 1464 | ui.statusnoi18n(b'publishing: yes\n') |
|
1440 | 1465 | else: |
|
1441 | 1466 | ui.statusnoi18n(b'publishing: no\n') |
|
1442 | 1467 | |
|
1443 | 1468 | has_node = getattr(repo.changelog.index, 'has_node', None) |
|
1444 | 1469 | if has_node is None: |
|
1445 | 1470 | has_node = repo.changelog.nodemap.__contains__ |
|
1446 | 1471 | nonpublishroots = 0 |
|
1447 | 1472 | for nhex, phase in remotephases.iteritems(): |
|
1448 | 1473 | if nhex == b'publishing': # ignore data related to publish option |
|
1449 | 1474 | continue |
|
1450 | 1475 | node = bin(nhex) |
|
1451 | 1476 | if has_node(node) and int(phase): |
|
1452 | 1477 | nonpublishroots += 1 |
|
1453 | 1478 | ui.statusnoi18n(b'number of roots: %d\n' % len(remotephases)) |
|
1454 | 1479 | ui.statusnoi18n(b'number of known non public roots: %d\n' % nonpublishroots) |
|
1455 | 1480 | |
|
1456 | 1481 | def d(): |
|
1457 | 1482 | phases.remotephasessummary(repo, remotesubset, remotephases) |
|
1458 | 1483 | |
|
1459 | 1484 | timer(d) |
|
1460 | 1485 | fm.end() |
|
1461 | 1486 | |
|
1462 | 1487 | |
|
1463 | 1488 | @command( |
|
1464 | 1489 | b'perf::manifest|perfmanifest', |
|
1465 | 1490 | [ |
|
1466 | 1491 | (b'm', b'manifest-rev', False, b'Look up a manifest node revision'), |
|
1467 | 1492 | (b'', b'clear-disk', False, b'clear on-disk caches too'), |
|
1468 | 1493 | ] |
|
1469 | 1494 | + formatteropts, |
|
1470 | 1495 | b'REV|NODE', |
|
1471 | 1496 | ) |
|
1472 | 1497 | def perfmanifest(ui, repo, rev, manifest_rev=False, clear_disk=False, **opts): |
|
1473 | 1498 | """benchmark the time to read a manifest from disk and return a usable |
|
1474 | 1499 | dict-like object |
|
1475 | 1500 | |
|
1476 | 1501 | Manifest caches are cleared before retrieval.""" |
|
1477 | 1502 | opts = _byteskwargs(opts) |
|
1478 | 1503 | timer, fm = gettimer(ui, opts) |
|
1479 | 1504 | if not manifest_rev: |
|
1480 | 1505 | ctx = scmutil.revsingle(repo, rev, rev) |
|
1481 | 1506 | t = ctx.manifestnode() |
|
1482 | 1507 | else: |
|
1483 | 1508 | from mercurial.node import bin |
|
1484 | 1509 | |
|
1485 | 1510 | if len(rev) == 40: |
|
1486 | 1511 | t = bin(rev) |
|
1487 | 1512 | else: |
|
1488 | 1513 | try: |
|
1489 | 1514 | rev = int(rev) |
|
1490 | 1515 | |
|
1491 | 1516 | if util.safehasattr(repo.manifestlog, b'getstorage'): |
|
1492 | 1517 | t = repo.manifestlog.getstorage(b'').node(rev) |
|
1493 | 1518 | else: |
|
1494 | 1519 | t = repo.manifestlog._revlog.lookup(rev) |
|
1495 | 1520 | except ValueError: |
|
1496 | 1521 | raise error.Abort( |
|
1497 | 1522 | b'manifest revision must be integer or full node' |
|
1498 | 1523 | ) |
|
1499 | 1524 | |
|
1500 | 1525 | def d(): |
|
1501 | 1526 | repo.manifestlog.clearcaches(clear_persisted_data=clear_disk) |
|
1502 | 1527 | repo.manifestlog[t].read() |
|
1503 | 1528 | |
|
1504 | 1529 | timer(d) |
|
1505 | 1530 | fm.end() |
|
1506 | 1531 | |
|
1507 | 1532 | |
|
1508 | 1533 | @command(b'perf::changeset|perfchangeset', formatteropts) |
|
1509 | 1534 | def perfchangeset(ui, repo, rev, **opts): |
|
1510 | 1535 | opts = _byteskwargs(opts) |
|
1511 | 1536 | timer, fm = gettimer(ui, opts) |
|
1512 | 1537 | n = scmutil.revsingle(repo, rev).node() |
|
1513 | 1538 | |
|
1514 | 1539 | def d(): |
|
1515 | 1540 | repo.changelog.read(n) |
|
1516 | 1541 | # repo.changelog._cache = None |
|
1517 | 1542 | |
|
1518 | 1543 | timer(d) |
|
1519 | 1544 | fm.end() |
|
1520 | 1545 | |
|
1521 | 1546 | |
|
1522 | 1547 | @command(b'perf::ignore|perfignore', formatteropts) |
|
1523 | 1548 | def perfignore(ui, repo, **opts): |
|
1524 | 1549 | """benchmark operation related to computing ignore""" |
|
1525 | 1550 | opts = _byteskwargs(opts) |
|
1526 | 1551 | timer, fm = gettimer(ui, opts) |
|
1527 | 1552 | dirstate = repo.dirstate |
|
1528 | 1553 | |
|
1529 | 1554 | def setupone(): |
|
1530 | 1555 | dirstate.invalidate() |
|
1531 | 1556 | clearfilecache(dirstate, b'_ignore') |
|
1532 | 1557 | |
|
1533 | 1558 | def runone(): |
|
1534 | 1559 | dirstate._ignore |
|
1535 | 1560 | |
|
1536 | 1561 | timer(runone, setup=setupone, title=b"load") |
|
1537 | 1562 | fm.end() |
|
1538 | 1563 | |
|
1539 | 1564 | |
|
1540 | 1565 | @command( |
|
1541 | 1566 | b'perf::index|perfindex', |
|
1542 | 1567 | [ |
|
1543 | 1568 | (b'', b'rev', [], b'revision to be looked up (default tip)'), |
|
1544 | 1569 | (b'', b'no-lookup', None, b'do not revision lookup post creation'), |
|
1545 | 1570 | ] |
|
1546 | 1571 | + formatteropts, |
|
1547 | 1572 | ) |
|
1548 | 1573 | def perfindex(ui, repo, **opts): |
|
1549 | 1574 | """benchmark index creation time followed by a lookup |
|
1550 | 1575 | |
|
1551 | 1576 | The default is to look `tip` up. Depending on the index implementation, |
|
1552 | 1577 | the revision looked up can matters. For example, an implementation |
|
1553 | 1578 | scanning the index will have a faster lookup time for `--rev tip` than for |
|
1554 | 1579 | `--rev 0`. The number of looked up revisions and their order can also |
|
1555 | 1580 | matters. |
|
1556 | 1581 | |
|
1557 | 1582 | Example of useful set to test: |
|
1558 | 1583 | |
|
1559 | 1584 | * tip |
|
1560 | 1585 | * 0 |
|
1561 | 1586 | * -10: |
|
1562 | 1587 | * :10 |
|
1563 | 1588 | * -10: + :10 |
|
1564 | 1589 | * :10: + -10: |
|
1565 | 1590 | * -10000: |
|
1566 | 1591 | * -10000: + 0 |
|
1567 | 1592 | |
|
1568 | 1593 | It is not currently possible to check for lookup of a missing node. For |
|
1569 | 1594 | deeper lookup benchmarking, checkout the `perfnodemap` command.""" |
|
1570 | 1595 | import mercurial.revlog |
|
1571 | 1596 | |
|
1572 | 1597 | opts = _byteskwargs(opts) |
|
1573 | 1598 | timer, fm = gettimer(ui, opts) |
|
1574 | 1599 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg |
|
1575 | 1600 | if opts[b'no_lookup']: |
|
1576 | 1601 | if opts['rev']: |
|
1577 | 1602 | raise error.Abort('--no-lookup and --rev are mutually exclusive') |
|
1578 | 1603 | nodes = [] |
|
1579 | 1604 | elif not opts[b'rev']: |
|
1580 | 1605 | nodes = [repo[b"tip"].node()] |
|
1581 | 1606 | else: |
|
1582 | 1607 | revs = scmutil.revrange(repo, opts[b'rev']) |
|
1583 | 1608 | cl = repo.changelog |
|
1584 | 1609 | nodes = [cl.node(r) for r in revs] |
|
1585 | 1610 | |
|
1586 | 1611 | unfi = repo.unfiltered() |
|
1587 | 1612 | # find the filecache func directly |
|
1588 | 1613 | # This avoid polluting the benchmark with the filecache logic |
|
1589 | 1614 | makecl = unfi.__class__.changelog.func |
|
1590 | 1615 | |
|
1591 | 1616 | def setup(): |
|
1592 | 1617 | # probably not necessary, but for good measure |
|
1593 | 1618 | clearchangelog(unfi) |
|
1594 | 1619 | |
|
1595 | 1620 | def d(): |
|
1596 | 1621 | cl = makecl(unfi) |
|
1597 | 1622 | for n in nodes: |
|
1598 | 1623 | cl.rev(n) |
|
1599 | 1624 | |
|
1600 | 1625 | timer(d, setup=setup) |
|
1601 | 1626 | fm.end() |
|
1602 | 1627 | |
|
1603 | 1628 | |
|
1604 | 1629 | @command( |
|
1605 | 1630 | b'perf::nodemap|perfnodemap', |
|
1606 | 1631 | [ |
|
1607 | 1632 | (b'', b'rev', [], b'revision to be looked up (default tip)'), |
|
1608 | 1633 | (b'', b'clear-caches', True, b'clear revlog cache between calls'), |
|
1609 | 1634 | ] |
|
1610 | 1635 | + formatteropts, |
|
1611 | 1636 | ) |
|
1612 | 1637 | def perfnodemap(ui, repo, **opts): |
|
1613 | 1638 | """benchmark the time necessary to look up revision from a cold nodemap |
|
1614 | 1639 | |
|
1615 | 1640 | Depending on the implementation, the amount and order of revision we look |
|
1616 | 1641 | up can varies. Example of useful set to test: |
|
1617 | 1642 | * tip |
|
1618 | 1643 | * 0 |
|
1619 | 1644 | * -10: |
|
1620 | 1645 | * :10 |
|
1621 | 1646 | * -10: + :10 |
|
1622 | 1647 | * :10: + -10: |
|
1623 | 1648 | * -10000: |
|
1624 | 1649 | * -10000: + 0 |
|
1625 | 1650 | |
|
1626 | 1651 | The command currently focus on valid binary lookup. Benchmarking for |
|
1627 | 1652 | hexlookup, prefix lookup and missing lookup would also be valuable. |
|
1628 | 1653 | """ |
|
1629 | 1654 | import mercurial.revlog |
|
1630 | 1655 | |
|
1631 | 1656 | opts = _byteskwargs(opts) |
|
1632 | 1657 | timer, fm = gettimer(ui, opts) |
|
1633 | 1658 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg |
|
1634 | 1659 | |
|
1635 | 1660 | unfi = repo.unfiltered() |
|
1636 | 1661 | clearcaches = opts[b'clear_caches'] |
|
1637 | 1662 | # find the filecache func directly |
|
1638 | 1663 | # This avoid polluting the benchmark with the filecache logic |
|
1639 | 1664 | makecl = unfi.__class__.changelog.func |
|
1640 | 1665 | if not opts[b'rev']: |
|
1641 | 1666 | raise error.Abort(b'use --rev to specify revisions to look up') |
|
1642 | 1667 | revs = scmutil.revrange(repo, opts[b'rev']) |
|
1643 | 1668 | cl = repo.changelog |
|
1644 | 1669 | nodes = [cl.node(r) for r in revs] |
|
1645 | 1670 | |
|
1646 | 1671 | # use a list to pass reference to a nodemap from one closure to the next |
|
1647 | 1672 | nodeget = [None] |
|
1648 | 1673 | |
|
1649 | 1674 | def setnodeget(): |
|
1650 | 1675 | # probably not necessary, but for good measure |
|
1651 | 1676 | clearchangelog(unfi) |
|
1652 | 1677 | cl = makecl(unfi) |
|
1653 | 1678 | if util.safehasattr(cl.index, 'get_rev'): |
|
1654 | 1679 | nodeget[0] = cl.index.get_rev |
|
1655 | 1680 | else: |
|
1656 | 1681 | nodeget[0] = cl.nodemap.get |
|
1657 | 1682 | |
|
1658 | 1683 | def d(): |
|
1659 | 1684 | get = nodeget[0] |
|
1660 | 1685 | for n in nodes: |
|
1661 | 1686 | get(n) |
|
1662 | 1687 | |
|
1663 | 1688 | setup = None |
|
1664 | 1689 | if clearcaches: |
|
1665 | 1690 | |
|
1666 | 1691 | def setup(): |
|
1667 | 1692 | setnodeget() |
|
1668 | 1693 | |
|
1669 | 1694 | else: |
|
1670 | 1695 | setnodeget() |
|
1671 | 1696 | d() # prewarm the data structure |
|
1672 | 1697 | timer(d, setup=setup) |
|
1673 | 1698 | fm.end() |
|
1674 | 1699 | |
|
1675 | 1700 | |
|
1676 | 1701 | @command(b'perf::startup|perfstartup', formatteropts) |
|
1677 | 1702 | def perfstartup(ui, repo, **opts): |
|
1678 | 1703 | opts = _byteskwargs(opts) |
|
1679 | 1704 | timer, fm = gettimer(ui, opts) |
|
1680 | 1705 | |
|
1681 | 1706 | def d(): |
|
1682 | 1707 | if os.name != 'nt': |
|
1683 | 1708 | os.system( |
|
1684 | 1709 | b"HGRCPATH= %s version -q > /dev/null" % fsencode(sys.argv[0]) |
|
1685 | 1710 | ) |
|
1686 | 1711 | else: |
|
1687 | 1712 | os.environ['HGRCPATH'] = r' ' |
|
1688 | 1713 | os.system("%s version -q > NUL" % sys.argv[0]) |
|
1689 | 1714 | |
|
1690 | 1715 | timer(d) |
|
1691 | 1716 | fm.end() |
|
1692 | 1717 | |
|
1693 | 1718 | |
|
1694 | 1719 | @command(b'perf::parents|perfparents', formatteropts) |
|
1695 | 1720 | def perfparents(ui, repo, **opts): |
|
1696 | 1721 | """benchmark the time necessary to fetch one changeset's parents. |
|
1697 | 1722 | |
|
1698 | 1723 | The fetch is done using the `node identifier`, traversing all object layers |
|
1699 | 1724 | from the repository object. The first N revisions will be used for this |
|
1700 | 1725 | benchmark. N is controlled by the ``perf.parentscount`` config option |
|
1701 | 1726 | (default: 1000). |
|
1702 | 1727 | """ |
|
1703 | 1728 | opts = _byteskwargs(opts) |
|
1704 | 1729 | timer, fm = gettimer(ui, opts) |
|
1705 | 1730 | # control the number of commits perfparents iterates over |
|
1706 | 1731 | # experimental config: perf.parentscount |
|
1707 | 1732 | count = getint(ui, b"perf", b"parentscount", 1000) |
|
1708 | 1733 | if len(repo.changelog) < count: |
|
1709 | 1734 | raise error.Abort(b"repo needs %d commits for this test" % count) |
|
1710 | 1735 | repo = repo.unfiltered() |
|
1711 | 1736 | nl = [repo.changelog.node(i) for i in _xrange(count)] |
|
1712 | 1737 | |
|
1713 | 1738 | def d(): |
|
1714 | 1739 | for n in nl: |
|
1715 | 1740 | repo.changelog.parents(n) |
|
1716 | 1741 | |
|
1717 | 1742 | timer(d) |
|
1718 | 1743 | fm.end() |
|
1719 | 1744 | |
|
1720 | 1745 | |
|
1721 | 1746 | @command(b'perf::ctxfiles|perfctxfiles', formatteropts) |
|
1722 | 1747 | def perfctxfiles(ui, repo, x, **opts): |
|
1723 | 1748 | opts = _byteskwargs(opts) |
|
1724 | 1749 | x = int(x) |
|
1725 | 1750 | timer, fm = gettimer(ui, opts) |
|
1726 | 1751 | |
|
1727 | 1752 | def d(): |
|
1728 | 1753 | len(repo[x].files()) |
|
1729 | 1754 | |
|
1730 | 1755 | timer(d) |
|
1731 | 1756 | fm.end() |
|
1732 | 1757 | |
|
1733 | 1758 | |
|
1734 | 1759 | @command(b'perf::rawfiles|perfrawfiles', formatteropts) |
|
1735 | 1760 | def perfrawfiles(ui, repo, x, **opts): |
|
1736 | 1761 | opts = _byteskwargs(opts) |
|
1737 | 1762 | x = int(x) |
|
1738 | 1763 | timer, fm = gettimer(ui, opts) |
|
1739 | 1764 | cl = repo.changelog |
|
1740 | 1765 | |
|
1741 | 1766 | def d(): |
|
1742 | 1767 | len(cl.read(x)[3]) |
|
1743 | 1768 | |
|
1744 | 1769 | timer(d) |
|
1745 | 1770 | fm.end() |
|
1746 | 1771 | |
|
1747 | 1772 | |
|
1748 | 1773 | @command(b'perf::lookup|perflookup', formatteropts) |
|
1749 | 1774 | def perflookup(ui, repo, rev, **opts): |
|
1750 | 1775 | opts = _byteskwargs(opts) |
|
1751 | 1776 | timer, fm = gettimer(ui, opts) |
|
1752 | 1777 | timer(lambda: len(repo.lookup(rev))) |
|
1753 | 1778 | fm.end() |
|
1754 | 1779 | |
|
1755 | 1780 | |
|
1756 | 1781 | @command( |
|
1757 | 1782 | b'perf::linelogedits|perflinelogedits', |
|
1758 | 1783 | [ |
|
1759 | 1784 | (b'n', b'edits', 10000, b'number of edits'), |
|
1760 | 1785 | (b'', b'max-hunk-lines', 10, b'max lines in a hunk'), |
|
1761 | 1786 | ], |
|
1762 | 1787 | norepo=True, |
|
1763 | 1788 | ) |
|
1764 | 1789 | def perflinelogedits(ui, **opts): |
|
1765 | 1790 | from mercurial import linelog |
|
1766 | 1791 | |
|
1767 | 1792 | opts = _byteskwargs(opts) |
|
1768 | 1793 | |
|
1769 | 1794 | edits = opts[b'edits'] |
|
1770 | 1795 | maxhunklines = opts[b'max_hunk_lines'] |
|
1771 | 1796 | |
|
1772 | 1797 | maxb1 = 100000 |
|
1773 | 1798 | random.seed(0) |
|
1774 | 1799 | randint = random.randint |
|
1775 | 1800 | currentlines = 0 |
|
1776 | 1801 | arglist = [] |
|
1777 | 1802 | for rev in _xrange(edits): |
|
1778 | 1803 | a1 = randint(0, currentlines) |
|
1779 | 1804 | a2 = randint(a1, min(currentlines, a1 + maxhunklines)) |
|
1780 | 1805 | b1 = randint(0, maxb1) |
|
1781 | 1806 | b2 = randint(b1, b1 + maxhunklines) |
|
1782 | 1807 | currentlines += (b2 - b1) - (a2 - a1) |
|
1783 | 1808 | arglist.append((rev, a1, a2, b1, b2)) |
|
1784 | 1809 | |
|
1785 | 1810 | def d(): |
|
1786 | 1811 | ll = linelog.linelog() |
|
1787 | 1812 | for args in arglist: |
|
1788 | 1813 | ll.replacelines(*args) |
|
1789 | 1814 | |
|
1790 | 1815 | timer, fm = gettimer(ui, opts) |
|
1791 | 1816 | timer(d) |
|
1792 | 1817 | fm.end() |
|
1793 | 1818 | |
|
1794 | 1819 | |
|
1795 | 1820 | @command(b'perf::revrange|perfrevrange', formatteropts) |
|
1796 | 1821 | def perfrevrange(ui, repo, *specs, **opts): |
|
1797 | 1822 | opts = _byteskwargs(opts) |
|
1798 | 1823 | timer, fm = gettimer(ui, opts) |
|
1799 | 1824 | revrange = scmutil.revrange |
|
1800 | 1825 | timer(lambda: len(revrange(repo, specs))) |
|
1801 | 1826 | fm.end() |
|
1802 | 1827 | |
|
1803 | 1828 | |
|
1804 | 1829 | @command(b'perf::nodelookup|perfnodelookup', formatteropts) |
|
1805 | 1830 | def perfnodelookup(ui, repo, rev, **opts): |
|
1806 | 1831 | opts = _byteskwargs(opts) |
|
1807 | 1832 | timer, fm = gettimer(ui, opts) |
|
1808 | 1833 | import mercurial.revlog |
|
1809 | 1834 | |
|
1810 | 1835 | mercurial.revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg |
|
1811 | 1836 | n = scmutil.revsingle(repo, rev).node() |
|
1812 | cl = mercurial.revlog.revlog(getsvfs(repo), b"00changelog.i") | |
|
1837 | ||
|
1838 | try: | |
|
1839 | cl = revlog(getsvfs(repo), radix=b"00changelog") | |
|
1840 | except TypeError: | |
|
1841 | cl = revlog(getsvfs(repo), indexfile=b"00changelog.i") | |
|
1813 | 1842 | |
|
1814 | 1843 | def d(): |
|
1815 | 1844 | cl.rev(n) |
|
1816 | 1845 | clearcaches(cl) |
|
1817 | 1846 | |
|
1818 | 1847 | timer(d) |
|
1819 | 1848 | fm.end() |
|
1820 | 1849 | |
|
1821 | 1850 | |
|
1822 | 1851 | @command( |
|
1823 | 1852 | b'perf::log|perflog', |
|
1824 | 1853 | [(b'', b'rename', False, b'ask log to follow renames')] + formatteropts, |
|
1825 | 1854 | ) |
|
1826 | 1855 | def perflog(ui, repo, rev=None, **opts): |
|
1827 | 1856 | opts = _byteskwargs(opts) |
|
1828 | 1857 | if rev is None: |
|
1829 | 1858 | rev = [] |
|
1830 | 1859 | timer, fm = gettimer(ui, opts) |
|
1831 | 1860 | ui.pushbuffer() |
|
1832 | 1861 | timer( |
|
1833 | 1862 | lambda: commands.log( |
|
1834 | 1863 | ui, repo, rev=rev, date=b'', user=b'', copies=opts.get(b'rename') |
|
1835 | 1864 | ) |
|
1836 | 1865 | ) |
|
1837 | 1866 | ui.popbuffer() |
|
1838 | 1867 | fm.end() |
|
1839 | 1868 | |
|
1840 | 1869 | |
|
1841 | 1870 | @command(b'perf::moonwalk|perfmoonwalk', formatteropts) |
|
1842 | 1871 | def perfmoonwalk(ui, repo, **opts): |
|
1843 | 1872 | """benchmark walking the changelog backwards |
|
1844 | 1873 | |
|
1845 | 1874 | This also loads the changelog data for each revision in the changelog. |
|
1846 | 1875 | """ |
|
1847 | 1876 | opts = _byteskwargs(opts) |
|
1848 | 1877 | timer, fm = gettimer(ui, opts) |
|
1849 | 1878 | |
|
1850 | 1879 | def moonwalk(): |
|
1851 | 1880 | for i in repo.changelog.revs(start=(len(repo) - 1), stop=-1): |
|
1852 | 1881 | ctx = repo[i] |
|
1853 | 1882 | ctx.branch() # read changelog data (in addition to the index) |
|
1854 | 1883 | |
|
1855 | 1884 | timer(moonwalk) |
|
1856 | 1885 | fm.end() |
|
1857 | 1886 | |
|
1858 | 1887 | |
|
1859 | 1888 | @command( |
|
1860 | 1889 | b'perf::templating|perftemplating', |
|
1861 | 1890 | [ |
|
1862 | 1891 | (b'r', b'rev', [], b'revisions to run the template on'), |
|
1863 | 1892 | ] |
|
1864 | 1893 | + formatteropts, |
|
1865 | 1894 | ) |
|
1866 | 1895 | def perftemplating(ui, repo, testedtemplate=None, **opts): |
|
1867 | 1896 | """test the rendering time of a given template""" |
|
1868 | 1897 | if makelogtemplater is None: |
|
1869 | 1898 | raise error.Abort( |
|
1870 | 1899 | b"perftemplating not available with this Mercurial", |
|
1871 | 1900 | hint=b"use 4.3 or later", |
|
1872 | 1901 | ) |
|
1873 | 1902 | |
|
1874 | 1903 | opts = _byteskwargs(opts) |
|
1875 | 1904 | |
|
1876 | 1905 | nullui = ui.copy() |
|
1877 | 1906 | nullui.fout = open(os.devnull, 'wb') |
|
1878 | 1907 | nullui.disablepager() |
|
1879 | 1908 | revs = opts.get(b'rev') |
|
1880 | 1909 | if not revs: |
|
1881 | 1910 | revs = [b'all()'] |
|
1882 | 1911 | revs = list(scmutil.revrange(repo, revs)) |
|
1883 | 1912 | |
|
1884 | 1913 | defaulttemplate = ( |
|
1885 | 1914 | b'{date|shortdate} [{rev}:{node|short}]' |
|
1886 | 1915 | b' {author|person}: {desc|firstline}\n' |
|
1887 | 1916 | ) |
|
1888 | 1917 | if testedtemplate is None: |
|
1889 | 1918 | testedtemplate = defaulttemplate |
|
1890 | 1919 | displayer = makelogtemplater(nullui, repo, testedtemplate) |
|
1891 | 1920 | |
|
1892 | 1921 | def format(): |
|
1893 | 1922 | for r in revs: |
|
1894 | 1923 | ctx = repo[r] |
|
1895 | 1924 | displayer.show(ctx) |
|
1896 | 1925 | displayer.flush(ctx) |
|
1897 | 1926 | |
|
1898 | 1927 | timer, fm = gettimer(ui, opts) |
|
1899 | 1928 | timer(format) |
|
1900 | 1929 | fm.end() |
|
1901 | 1930 | |
|
1902 | 1931 | |
|
1903 | 1932 | def _displaystats(ui, opts, entries, data): |
|
1904 | 1933 | # use a second formatter because the data are quite different, not sure |
|
1905 | 1934 | # how it flies with the templater. |
|
1906 | 1935 | fm = ui.formatter(b'perf-stats', opts) |
|
1907 | 1936 | for key, title in entries: |
|
1908 | 1937 | values = data[key] |
|
1909 | 1938 | nbvalues = len(data) |
|
1910 | 1939 | values.sort() |
|
1911 | 1940 | stats = { |
|
1912 | 1941 | 'key': key, |
|
1913 | 1942 | 'title': title, |
|
1914 | 1943 | 'nbitems': len(values), |
|
1915 | 1944 | 'min': values[0][0], |
|
1916 | 1945 | '10%': values[(nbvalues * 10) // 100][0], |
|
1917 | 1946 | '25%': values[(nbvalues * 25) // 100][0], |
|
1918 | 1947 | '50%': values[(nbvalues * 50) // 100][0], |
|
1919 | 1948 | '75%': values[(nbvalues * 75) // 100][0], |
|
1920 | 1949 | '80%': values[(nbvalues * 80) // 100][0], |
|
1921 | 1950 | '85%': values[(nbvalues * 85) // 100][0], |
|
1922 | 1951 | '90%': values[(nbvalues * 90) // 100][0], |
|
1923 | 1952 | '95%': values[(nbvalues * 95) // 100][0], |
|
1924 | 1953 | '99%': values[(nbvalues * 99) // 100][0], |
|
1925 | 1954 | 'max': values[-1][0], |
|
1926 | 1955 | } |
|
1927 | 1956 | fm.startitem() |
|
1928 | 1957 | fm.data(**stats) |
|
1929 | 1958 | # make node pretty for the human output |
|
1930 | 1959 | fm.plain('### %s (%d items)\n' % (title, len(values))) |
|
1931 | 1960 | lines = [ |
|
1932 | 1961 | 'min', |
|
1933 | 1962 | '10%', |
|
1934 | 1963 | '25%', |
|
1935 | 1964 | '50%', |
|
1936 | 1965 | '75%', |
|
1937 | 1966 | '80%', |
|
1938 | 1967 | '85%', |
|
1939 | 1968 | '90%', |
|
1940 | 1969 | '95%', |
|
1941 | 1970 | '99%', |
|
1942 | 1971 | 'max', |
|
1943 | 1972 | ] |
|
1944 | 1973 | for l in lines: |
|
1945 | 1974 | fm.plain('%s: %s\n' % (l, stats[l])) |
|
1946 | 1975 | fm.end() |
|
1947 | 1976 | |
|
1948 | 1977 | |
|
1949 | 1978 | @command( |
|
1950 | 1979 | b'perf::helper-mergecopies|perfhelper-mergecopies', |
|
1951 | 1980 | formatteropts |
|
1952 | 1981 | + [ |
|
1953 | 1982 | (b'r', b'revs', [], b'restrict search to these revisions'), |
|
1954 | 1983 | (b'', b'timing', False, b'provides extra data (costly)'), |
|
1955 | 1984 | (b'', b'stats', False, b'provides statistic about the measured data'), |
|
1956 | 1985 | ], |
|
1957 | 1986 | ) |
|
1958 | 1987 | def perfhelpermergecopies(ui, repo, revs=[], **opts): |
|
1959 | 1988 | """find statistics about potential parameters for `perfmergecopies` |
|
1960 | 1989 | |
|
1961 | 1990 | This command find (base, p1, p2) triplet relevant for copytracing |
|
1962 | 1991 | benchmarking in the context of a merge. It reports values for some of the |
|
1963 | 1992 | parameters that impact merge copy tracing time during merge. |
|
1964 | 1993 | |
|
1965 | 1994 | If `--timing` is set, rename detection is run and the associated timing |
|
1966 | 1995 | will be reported. The extra details come at the cost of slower command |
|
1967 | 1996 | execution. |
|
1968 | 1997 | |
|
1969 | 1998 | Since rename detection is only run once, other factors might easily |
|
1970 | 1999 | affect the precision of the timing. However it should give a good |
|
1971 | 2000 | approximation of which revision triplets are very costly. |
|
1972 | 2001 | """ |
|
1973 | 2002 | opts = _byteskwargs(opts) |
|
1974 | 2003 | fm = ui.formatter(b'perf', opts) |
|
1975 | 2004 | dotiming = opts[b'timing'] |
|
1976 | 2005 | dostats = opts[b'stats'] |
|
1977 | 2006 | |
|
1978 | 2007 | output_template = [ |
|
1979 | 2008 | ("base", "%(base)12s"), |
|
1980 | 2009 | ("p1", "%(p1.node)12s"), |
|
1981 | 2010 | ("p2", "%(p2.node)12s"), |
|
1982 | 2011 | ("p1.nb-revs", "%(p1.nbrevs)12d"), |
|
1983 | 2012 | ("p1.nb-files", "%(p1.nbmissingfiles)12d"), |
|
1984 | 2013 | ("p1.renames", "%(p1.renamedfiles)12d"), |
|
1985 | 2014 | ("p1.time", "%(p1.time)12.3f"), |
|
1986 | 2015 | ("p2.nb-revs", "%(p2.nbrevs)12d"), |
|
1987 | 2016 | ("p2.nb-files", "%(p2.nbmissingfiles)12d"), |
|
1988 | 2017 | ("p2.renames", "%(p2.renamedfiles)12d"), |
|
1989 | 2018 | ("p2.time", "%(p2.time)12.3f"), |
|
1990 | 2019 | ("renames", "%(nbrenamedfiles)12d"), |
|
1991 | 2020 | ("total.time", "%(time)12.3f"), |
|
1992 | 2021 | ] |
|
1993 | 2022 | if not dotiming: |
|
1994 | 2023 | output_template = [ |
|
1995 | 2024 | i |
|
1996 | 2025 | for i in output_template |
|
1997 | 2026 | if not ('time' in i[0] or 'renames' in i[0]) |
|
1998 | 2027 | ] |
|
1999 | 2028 | header_names = [h for (h, v) in output_template] |
|
2000 | 2029 | output = ' '.join([v for (h, v) in output_template]) + '\n' |
|
2001 | 2030 | header = ' '.join(['%12s'] * len(header_names)) + '\n' |
|
2002 | 2031 | fm.plain(header % tuple(header_names)) |
|
2003 | 2032 | |
|
2004 | 2033 | if not revs: |
|
2005 | 2034 | revs = ['all()'] |
|
2006 | 2035 | revs = scmutil.revrange(repo, revs) |
|
2007 | 2036 | |
|
2008 | 2037 | if dostats: |
|
2009 | 2038 | alldata = { |
|
2010 | 2039 | 'nbrevs': [], |
|
2011 | 2040 | 'nbmissingfiles': [], |
|
2012 | 2041 | } |
|
2013 | 2042 | if dotiming: |
|
2014 | 2043 | alldata['parentnbrenames'] = [] |
|
2015 | 2044 | alldata['totalnbrenames'] = [] |
|
2016 | 2045 | alldata['parenttime'] = [] |
|
2017 | 2046 | alldata['totaltime'] = [] |
|
2018 | 2047 | |
|
2019 | 2048 | roi = repo.revs('merge() and %ld', revs) |
|
2020 | 2049 | for r in roi: |
|
2021 | 2050 | ctx = repo[r] |
|
2022 | 2051 | p1 = ctx.p1() |
|
2023 | 2052 | p2 = ctx.p2() |
|
2024 | 2053 | bases = repo.changelog._commonancestorsheads(p1.rev(), p2.rev()) |
|
2025 | 2054 | for b in bases: |
|
2026 | 2055 | b = repo[b] |
|
2027 | 2056 | p1missing = copies._computeforwardmissing(b, p1) |
|
2028 | 2057 | p2missing = copies._computeforwardmissing(b, p2) |
|
2029 | 2058 | data = { |
|
2030 | 2059 | b'base': b.hex(), |
|
2031 | 2060 | b'p1.node': p1.hex(), |
|
2032 | 2061 | b'p1.nbrevs': len(repo.revs('only(%d, %d)', p1.rev(), b.rev())), |
|
2033 | 2062 | b'p1.nbmissingfiles': len(p1missing), |
|
2034 | 2063 | b'p2.node': p2.hex(), |
|
2035 | 2064 | b'p2.nbrevs': len(repo.revs('only(%d, %d)', p2.rev(), b.rev())), |
|
2036 | 2065 | b'p2.nbmissingfiles': len(p2missing), |
|
2037 | 2066 | } |
|
2038 | 2067 | if dostats: |
|
2039 | 2068 | if p1missing: |
|
2040 | 2069 | alldata['nbrevs'].append( |
|
2041 | 2070 | (data['p1.nbrevs'], b.hex(), p1.hex()) |
|
2042 | 2071 | ) |
|
2043 | 2072 | alldata['nbmissingfiles'].append( |
|
2044 | 2073 | (data['p1.nbmissingfiles'], b.hex(), p1.hex()) |
|
2045 | 2074 | ) |
|
2046 | 2075 | if p2missing: |
|
2047 | 2076 | alldata['nbrevs'].append( |
|
2048 | 2077 | (data['p2.nbrevs'], b.hex(), p2.hex()) |
|
2049 | 2078 | ) |
|
2050 | 2079 | alldata['nbmissingfiles'].append( |
|
2051 | 2080 | (data['p2.nbmissingfiles'], b.hex(), p2.hex()) |
|
2052 | 2081 | ) |
|
2053 | 2082 | if dotiming: |
|
2054 | 2083 | begin = util.timer() |
|
2055 | 2084 | mergedata = copies.mergecopies(repo, p1, p2, b) |
|
2056 | 2085 | end = util.timer() |
|
2057 | 2086 | # not very stable timing since we did only one run |
|
2058 | 2087 | data['time'] = end - begin |
|
2059 | 2088 | # mergedata contains five dicts: "copy", "movewithdir", |
|
2060 | 2089 | # "diverge", "renamedelete" and "dirmove". |
|
2061 | 2090 | # The first 4 are about renamed file so lets count that. |
|
2062 | 2091 | renames = len(mergedata[0]) |
|
2063 | 2092 | renames += len(mergedata[1]) |
|
2064 | 2093 | renames += len(mergedata[2]) |
|
2065 | 2094 | renames += len(mergedata[3]) |
|
2066 | 2095 | data['nbrenamedfiles'] = renames |
|
2067 | 2096 | begin = util.timer() |
|
2068 | 2097 | p1renames = copies.pathcopies(b, p1) |
|
2069 | 2098 | end = util.timer() |
|
2070 | 2099 | data['p1.time'] = end - begin |
|
2071 | 2100 | begin = util.timer() |
|
2072 | 2101 | p2renames = copies.pathcopies(b, p2) |
|
2073 | 2102 | end = util.timer() |
|
2074 | 2103 | data['p2.time'] = end - begin |
|
2075 | 2104 | data['p1.renamedfiles'] = len(p1renames) |
|
2076 | 2105 | data['p2.renamedfiles'] = len(p2renames) |
|
2077 | 2106 | |
|
2078 | 2107 | if dostats: |
|
2079 | 2108 | if p1missing: |
|
2080 | 2109 | alldata['parentnbrenames'].append( |
|
2081 | 2110 | (data['p1.renamedfiles'], b.hex(), p1.hex()) |
|
2082 | 2111 | ) |
|
2083 | 2112 | alldata['parenttime'].append( |
|
2084 | 2113 | (data['p1.time'], b.hex(), p1.hex()) |
|
2085 | 2114 | ) |
|
2086 | 2115 | if p2missing: |
|
2087 | 2116 | alldata['parentnbrenames'].append( |
|
2088 | 2117 | (data['p2.renamedfiles'], b.hex(), p2.hex()) |
|
2089 | 2118 | ) |
|
2090 | 2119 | alldata['parenttime'].append( |
|
2091 | 2120 | (data['p2.time'], b.hex(), p2.hex()) |
|
2092 | 2121 | ) |
|
2093 | 2122 | if p1missing or p2missing: |
|
2094 | 2123 | alldata['totalnbrenames'].append( |
|
2095 | 2124 | ( |
|
2096 | 2125 | data['nbrenamedfiles'], |
|
2097 | 2126 | b.hex(), |
|
2098 | 2127 | p1.hex(), |
|
2099 | 2128 | p2.hex(), |
|
2100 | 2129 | ) |
|
2101 | 2130 | ) |
|
2102 | 2131 | alldata['totaltime'].append( |
|
2103 | 2132 | (data['time'], b.hex(), p1.hex(), p2.hex()) |
|
2104 | 2133 | ) |
|
2105 | 2134 | fm.startitem() |
|
2106 | 2135 | fm.data(**data) |
|
2107 | 2136 | # make node pretty for the human output |
|
2108 | 2137 | out = data.copy() |
|
2109 | 2138 | out['base'] = fm.hexfunc(b.node()) |
|
2110 | 2139 | out['p1.node'] = fm.hexfunc(p1.node()) |
|
2111 | 2140 | out['p2.node'] = fm.hexfunc(p2.node()) |
|
2112 | 2141 | fm.plain(output % out) |
|
2113 | 2142 | |
|
2114 | 2143 | fm.end() |
|
2115 | 2144 | if dostats: |
|
2116 | 2145 | # use a second formatter because the data are quite different, not sure |
|
2117 | 2146 | # how it flies with the templater. |
|
2118 | 2147 | entries = [ |
|
2119 | 2148 | ('nbrevs', 'number of revision covered'), |
|
2120 | 2149 | ('nbmissingfiles', 'number of missing files at head'), |
|
2121 | 2150 | ] |
|
2122 | 2151 | if dotiming: |
|
2123 | 2152 | entries.append( |
|
2124 | 2153 | ('parentnbrenames', 'rename from one parent to base') |
|
2125 | 2154 | ) |
|
2126 | 2155 | entries.append(('totalnbrenames', 'total number of renames')) |
|
2127 | 2156 | entries.append(('parenttime', 'time for one parent')) |
|
2128 | 2157 | entries.append(('totaltime', 'time for both parents')) |
|
2129 | 2158 | _displaystats(ui, opts, entries, alldata) |
|
2130 | 2159 | |
|
2131 | 2160 | |
|
2132 | 2161 | @command( |
|
2133 | 2162 | b'perf::helper-pathcopies|perfhelper-pathcopies', |
|
2134 | 2163 | formatteropts |
|
2135 | 2164 | + [ |
|
2136 | 2165 | (b'r', b'revs', [], b'restrict search to these revisions'), |
|
2137 | 2166 | (b'', b'timing', False, b'provides extra data (costly)'), |
|
2138 | 2167 | (b'', b'stats', False, b'provides statistic about the measured data'), |
|
2139 | 2168 | ], |
|
2140 | 2169 | ) |
|
2141 | 2170 | def perfhelperpathcopies(ui, repo, revs=[], **opts): |
|
2142 | 2171 | """find statistic about potential parameters for the `perftracecopies` |
|
2143 | 2172 | |
|
2144 | 2173 | This command find source-destination pair relevant for copytracing testing. |
|
2145 | 2174 | It report value for some of the parameters that impact copy tracing time. |
|
2146 | 2175 | |
|
2147 | 2176 | If `--timing` is set, rename detection is run and the associated timing |
|
2148 | 2177 | will be reported. The extra details comes at the cost of a slower command |
|
2149 | 2178 | execution. |
|
2150 | 2179 | |
|
2151 | 2180 | Since the rename detection is only run once, other factors might easily |
|
2152 | 2181 | affect the precision of the timing. However it should give a good |
|
2153 | 2182 | approximation of which revision pairs are very costly. |
|
2154 | 2183 | """ |
|
2155 | 2184 | opts = _byteskwargs(opts) |
|
2156 | 2185 | fm = ui.formatter(b'perf', opts) |
|
2157 | 2186 | dotiming = opts[b'timing'] |
|
2158 | 2187 | dostats = opts[b'stats'] |
|
2159 | 2188 | |
|
2160 | 2189 | if dotiming: |
|
2161 | 2190 | header = '%12s %12s %12s %12s %12s %12s\n' |
|
2162 | 2191 | output = ( |
|
2163 | 2192 | "%(source)12s %(destination)12s " |
|
2164 | 2193 | "%(nbrevs)12d %(nbmissingfiles)12d " |
|
2165 | 2194 | "%(nbrenamedfiles)12d %(time)18.5f\n" |
|
2166 | 2195 | ) |
|
2167 | 2196 | header_names = ( |
|
2168 | 2197 | "source", |
|
2169 | 2198 | "destination", |
|
2170 | 2199 | "nb-revs", |
|
2171 | 2200 | "nb-files", |
|
2172 | 2201 | "nb-renames", |
|
2173 | 2202 | "time", |
|
2174 | 2203 | ) |
|
2175 | 2204 | fm.plain(header % header_names) |
|
2176 | 2205 | else: |
|
2177 | 2206 | header = '%12s %12s %12s %12s\n' |
|
2178 | 2207 | output = ( |
|
2179 | 2208 | "%(source)12s %(destination)12s " |
|
2180 | 2209 | "%(nbrevs)12d %(nbmissingfiles)12d\n" |
|
2181 | 2210 | ) |
|
2182 | 2211 | fm.plain(header % ("source", "destination", "nb-revs", "nb-files")) |
|
2183 | 2212 | |
|
2184 | 2213 | if not revs: |
|
2185 | 2214 | revs = ['all()'] |
|
2186 | 2215 | revs = scmutil.revrange(repo, revs) |
|
2187 | 2216 | |
|
2188 | 2217 | if dostats: |
|
2189 | 2218 | alldata = { |
|
2190 | 2219 | 'nbrevs': [], |
|
2191 | 2220 | 'nbmissingfiles': [], |
|
2192 | 2221 | } |
|
2193 | 2222 | if dotiming: |
|
2194 | 2223 | alldata['nbrenames'] = [] |
|
2195 | 2224 | alldata['time'] = [] |
|
2196 | 2225 | |
|
2197 | 2226 | roi = repo.revs('merge() and %ld', revs) |
|
2198 | 2227 | for r in roi: |
|
2199 | 2228 | ctx = repo[r] |
|
2200 | 2229 | p1 = ctx.p1().rev() |
|
2201 | 2230 | p2 = ctx.p2().rev() |
|
2202 | 2231 | bases = repo.changelog._commonancestorsheads(p1, p2) |
|
2203 | 2232 | for p in (p1, p2): |
|
2204 | 2233 | for b in bases: |
|
2205 | 2234 | base = repo[b] |
|
2206 | 2235 | parent = repo[p] |
|
2207 | 2236 | missing = copies._computeforwardmissing(base, parent) |
|
2208 | 2237 | if not missing: |
|
2209 | 2238 | continue |
|
2210 | 2239 | data = { |
|
2211 | 2240 | b'source': base.hex(), |
|
2212 | 2241 | b'destination': parent.hex(), |
|
2213 | 2242 | b'nbrevs': len(repo.revs('only(%d, %d)', p, b)), |
|
2214 | 2243 | b'nbmissingfiles': len(missing), |
|
2215 | 2244 | } |
|
2216 | 2245 | if dostats: |
|
2217 | 2246 | alldata['nbrevs'].append( |
|
2218 | 2247 | ( |
|
2219 | 2248 | data['nbrevs'], |
|
2220 | 2249 | base.hex(), |
|
2221 | 2250 | parent.hex(), |
|
2222 | 2251 | ) |
|
2223 | 2252 | ) |
|
2224 | 2253 | alldata['nbmissingfiles'].append( |
|
2225 | 2254 | ( |
|
2226 | 2255 | data['nbmissingfiles'], |
|
2227 | 2256 | base.hex(), |
|
2228 | 2257 | parent.hex(), |
|
2229 | 2258 | ) |
|
2230 | 2259 | ) |
|
2231 | 2260 | if dotiming: |
|
2232 | 2261 | begin = util.timer() |
|
2233 | 2262 | renames = copies.pathcopies(base, parent) |
|
2234 | 2263 | end = util.timer() |
|
2235 | 2264 | # not very stable timing since we did only one run |
|
2236 | 2265 | data['time'] = end - begin |
|
2237 | 2266 | data['nbrenamedfiles'] = len(renames) |
|
2238 | 2267 | if dostats: |
|
2239 | 2268 | alldata['time'].append( |
|
2240 | 2269 | ( |
|
2241 | 2270 | data['time'], |
|
2242 | 2271 | base.hex(), |
|
2243 | 2272 | parent.hex(), |
|
2244 | 2273 | ) |
|
2245 | 2274 | ) |
|
2246 | 2275 | alldata['nbrenames'].append( |
|
2247 | 2276 | ( |
|
2248 | 2277 | data['nbrenamedfiles'], |
|
2249 | 2278 | base.hex(), |
|
2250 | 2279 | parent.hex(), |
|
2251 | 2280 | ) |
|
2252 | 2281 | ) |
|
2253 | 2282 | fm.startitem() |
|
2254 | 2283 | fm.data(**data) |
|
2255 | 2284 | out = data.copy() |
|
2256 | 2285 | out['source'] = fm.hexfunc(base.node()) |
|
2257 | 2286 | out['destination'] = fm.hexfunc(parent.node()) |
|
2258 | 2287 | fm.plain(output % out) |
|
2259 | 2288 | |
|
2260 | 2289 | fm.end() |
|
2261 | 2290 | if dostats: |
|
2262 | 2291 | entries = [ |
|
2263 | 2292 | ('nbrevs', 'number of revision covered'), |
|
2264 | 2293 | ('nbmissingfiles', 'number of missing files at head'), |
|
2265 | 2294 | ] |
|
2266 | 2295 | if dotiming: |
|
2267 | 2296 | entries.append(('nbrenames', 'renamed files')) |
|
2268 | 2297 | entries.append(('time', 'time')) |
|
2269 | 2298 | _displaystats(ui, opts, entries, alldata) |
|
2270 | 2299 | |
|
2271 | 2300 | |
|
2272 | 2301 | @command(b'perf::cca|perfcca', formatteropts) |
|
2273 | 2302 | def perfcca(ui, repo, **opts): |
|
2274 | 2303 | opts = _byteskwargs(opts) |
|
2275 | 2304 | timer, fm = gettimer(ui, opts) |
|
2276 | 2305 | timer(lambda: scmutil.casecollisionauditor(ui, False, repo.dirstate)) |
|
2277 | 2306 | fm.end() |
|
2278 | 2307 | |
|
2279 | 2308 | |
|
2280 | 2309 | @command(b'perf::fncacheload|perffncacheload', formatteropts) |
|
2281 | 2310 | def perffncacheload(ui, repo, **opts): |
|
2282 | 2311 | opts = _byteskwargs(opts) |
|
2283 | 2312 | timer, fm = gettimer(ui, opts) |
|
2284 | 2313 | s = repo.store |
|
2285 | 2314 | |
|
2286 | 2315 | def d(): |
|
2287 | 2316 | s.fncache._load() |
|
2288 | 2317 | |
|
2289 | 2318 | timer(d) |
|
2290 | 2319 | fm.end() |
|
2291 | 2320 | |
|
2292 | 2321 | |
|
2293 | 2322 | @command(b'perf::fncachewrite|perffncachewrite', formatteropts) |
|
2294 | 2323 | def perffncachewrite(ui, repo, **opts): |
|
2295 | 2324 | opts = _byteskwargs(opts) |
|
2296 | 2325 | timer, fm = gettimer(ui, opts) |
|
2297 | 2326 | s = repo.store |
|
2298 | 2327 | lock = repo.lock() |
|
2299 | 2328 | s.fncache._load() |
|
2300 | 2329 | tr = repo.transaction(b'perffncachewrite') |
|
2301 | 2330 | tr.addbackup(b'fncache') |
|
2302 | 2331 | |
|
2303 | 2332 | def d(): |
|
2304 | 2333 | s.fncache._dirty = True |
|
2305 | 2334 | s.fncache.write(tr) |
|
2306 | 2335 | |
|
2307 | 2336 | timer(d) |
|
2308 | 2337 | tr.close() |
|
2309 | 2338 | lock.release() |
|
2310 | 2339 | fm.end() |
|
2311 | 2340 | |
|
2312 | 2341 | |
|
2313 | 2342 | @command(b'perf::fncacheencode|perffncacheencode', formatteropts) |
|
2314 | 2343 | def perffncacheencode(ui, repo, **opts): |
|
2315 | 2344 | opts = _byteskwargs(opts) |
|
2316 | 2345 | timer, fm = gettimer(ui, opts) |
|
2317 | 2346 | s = repo.store |
|
2318 | 2347 | s.fncache._load() |
|
2319 | 2348 | |
|
2320 | 2349 | def d(): |
|
2321 | 2350 | for p in s.fncache.entries: |
|
2322 | 2351 | s.encode(p) |
|
2323 | 2352 | |
|
2324 | 2353 | timer(d) |
|
2325 | 2354 | fm.end() |
|
2326 | 2355 | |
|
2327 | 2356 | |
|
2328 | 2357 | def _bdiffworker(q, blocks, xdiff, ready, done): |
|
2329 | 2358 | while not done.is_set(): |
|
2330 | 2359 | pair = q.get() |
|
2331 | 2360 | while pair is not None: |
|
2332 | 2361 | if xdiff: |
|
2333 | 2362 | mdiff.bdiff.xdiffblocks(*pair) |
|
2334 | 2363 | elif blocks: |
|
2335 | 2364 | mdiff.bdiff.blocks(*pair) |
|
2336 | 2365 | else: |
|
2337 | 2366 | mdiff.textdiff(*pair) |
|
2338 | 2367 | q.task_done() |
|
2339 | 2368 | pair = q.get() |
|
2340 | 2369 | q.task_done() # for the None one |
|
2341 | 2370 | with ready: |
|
2342 | 2371 | ready.wait() |
|
2343 | 2372 | |
|
2344 | 2373 | |
|
2345 | 2374 | def _manifestrevision(repo, mnode): |
|
2346 | 2375 | ml = repo.manifestlog |
|
2347 | 2376 | |
|
2348 | 2377 | if util.safehasattr(ml, b'getstorage'): |
|
2349 | 2378 | store = ml.getstorage(b'') |
|
2350 | 2379 | else: |
|
2351 | 2380 | store = ml._revlog |
|
2352 | 2381 | |
|
2353 | 2382 | return store.revision(mnode) |
|
2354 | 2383 | |
|
2355 | 2384 | |
|
2356 | 2385 | @command( |
|
2357 | 2386 | b'perf::bdiff|perfbdiff', |
|
2358 | 2387 | revlogopts |
|
2359 | 2388 | + formatteropts |
|
2360 | 2389 | + [ |
|
2361 | 2390 | ( |
|
2362 | 2391 | b'', |
|
2363 | 2392 | b'count', |
|
2364 | 2393 | 1, |
|
2365 | 2394 | b'number of revisions to test (when using --startrev)', |
|
2366 | 2395 | ), |
|
2367 | 2396 | (b'', b'alldata', False, b'test bdiffs for all associated revisions'), |
|
2368 | 2397 | (b'', b'threads', 0, b'number of thread to use (disable with 0)'), |
|
2369 | 2398 | (b'', b'blocks', False, b'test computing diffs into blocks'), |
|
2370 | 2399 | (b'', b'xdiff', False, b'use xdiff algorithm'), |
|
2371 | 2400 | ], |
|
2372 | 2401 | b'-c|-m|FILE REV', |
|
2373 | 2402 | ) |
|
2374 | 2403 | def perfbdiff(ui, repo, file_, rev=None, count=None, threads=0, **opts): |
|
2375 | 2404 | """benchmark a bdiff between revisions |
|
2376 | 2405 | |
|
2377 | 2406 | By default, benchmark a bdiff between its delta parent and itself. |
|
2378 | 2407 | |
|
2379 | 2408 | With ``--count``, benchmark bdiffs between delta parents and self for N |
|
2380 | 2409 | revisions starting at the specified revision. |
|
2381 | 2410 | |
|
2382 | 2411 | With ``--alldata``, assume the requested revision is a changeset and |
|
2383 | 2412 | measure bdiffs for all changes related to that changeset (manifest |
|
2384 | 2413 | and filelogs). |
|
2385 | 2414 | """ |
|
2386 | 2415 | opts = _byteskwargs(opts) |
|
2387 | 2416 | |
|
2388 | 2417 | if opts[b'xdiff'] and not opts[b'blocks']: |
|
2389 | 2418 | raise error.CommandError(b'perfbdiff', b'--xdiff requires --blocks') |
|
2390 | 2419 | |
|
2391 | 2420 | if opts[b'alldata']: |
|
2392 | 2421 | opts[b'changelog'] = True |
|
2393 | 2422 | |
|
2394 | 2423 | if opts.get(b'changelog') or opts.get(b'manifest'): |
|
2395 | 2424 | file_, rev = None, file_ |
|
2396 | 2425 | elif rev is None: |
|
2397 | 2426 | raise error.CommandError(b'perfbdiff', b'invalid arguments') |
|
2398 | 2427 | |
|
2399 | 2428 | blocks = opts[b'blocks'] |
|
2400 | 2429 | xdiff = opts[b'xdiff'] |
|
2401 | 2430 | textpairs = [] |
|
2402 | 2431 | |
|
2403 | 2432 | r = cmdutil.openrevlog(repo, b'perfbdiff', file_, opts) |
|
2404 | 2433 | |
|
2405 | 2434 | startrev = r.rev(r.lookup(rev)) |
|
2406 | 2435 | for rev in range(startrev, min(startrev + count, len(r) - 1)): |
|
2407 | 2436 | if opts[b'alldata']: |
|
2408 | 2437 | # Load revisions associated with changeset. |
|
2409 | 2438 | ctx = repo[rev] |
|
2410 | 2439 | mtext = _manifestrevision(repo, ctx.manifestnode()) |
|
2411 | 2440 | for pctx in ctx.parents(): |
|
2412 | 2441 | pman = _manifestrevision(repo, pctx.manifestnode()) |
|
2413 | 2442 | textpairs.append((pman, mtext)) |
|
2414 | 2443 | |
|
2415 | 2444 | # Load filelog revisions by iterating manifest delta. |
|
2416 | 2445 | man = ctx.manifest() |
|
2417 | 2446 | pman = ctx.p1().manifest() |
|
2418 | 2447 | for filename, change in pman.diff(man).items(): |
|
2419 | 2448 | fctx = repo.file(filename) |
|
2420 | 2449 | f1 = fctx.revision(change[0][0] or -1) |
|
2421 | 2450 | f2 = fctx.revision(change[1][0] or -1) |
|
2422 | 2451 | textpairs.append((f1, f2)) |
|
2423 | 2452 | else: |
|
2424 | 2453 | dp = r.deltaparent(rev) |
|
2425 | 2454 | textpairs.append((r.revision(dp), r.revision(rev))) |
|
2426 | 2455 | |
|
2427 | 2456 | withthreads = threads > 0 |
|
2428 | 2457 | if not withthreads: |
|
2429 | 2458 | |
|
2430 | 2459 | def d(): |
|
2431 | 2460 | for pair in textpairs: |
|
2432 | 2461 | if xdiff: |
|
2433 | 2462 | mdiff.bdiff.xdiffblocks(*pair) |
|
2434 | 2463 | elif blocks: |
|
2435 | 2464 | mdiff.bdiff.blocks(*pair) |
|
2436 | 2465 | else: |
|
2437 | 2466 | mdiff.textdiff(*pair) |
|
2438 | 2467 | |
|
2439 | 2468 | else: |
|
2440 | 2469 | q = queue() |
|
2441 | 2470 | for i in _xrange(threads): |
|
2442 | 2471 | q.put(None) |
|
2443 | 2472 | ready = threading.Condition() |
|
2444 | 2473 | done = threading.Event() |
|
2445 | 2474 | for i in _xrange(threads): |
|
2446 | 2475 | threading.Thread( |
|
2447 | 2476 | target=_bdiffworker, args=(q, blocks, xdiff, ready, done) |
|
2448 | 2477 | ).start() |
|
2449 | 2478 | q.join() |
|
2450 | 2479 | |
|
2451 | 2480 | def d(): |
|
2452 | 2481 | for pair in textpairs: |
|
2453 | 2482 | q.put(pair) |
|
2454 | 2483 | for i in _xrange(threads): |
|
2455 | 2484 | q.put(None) |
|
2456 | 2485 | with ready: |
|
2457 | 2486 | ready.notify_all() |
|
2458 | 2487 | q.join() |
|
2459 | 2488 | |
|
2460 | 2489 | timer, fm = gettimer(ui, opts) |
|
2461 | 2490 | timer(d) |
|
2462 | 2491 | fm.end() |
|
2463 | 2492 | |
|
2464 | 2493 | if withthreads: |
|
2465 | 2494 | done.set() |
|
2466 | 2495 | for i in _xrange(threads): |
|
2467 | 2496 | q.put(None) |
|
2468 | 2497 | with ready: |
|
2469 | 2498 | ready.notify_all() |
|
2470 | 2499 | |
|
2471 | 2500 | |
|
2472 | 2501 | @command( |
|
2473 | 2502 | b'perf::unidiff|perfunidiff', |
|
2474 | 2503 | revlogopts |
|
2475 | 2504 | + formatteropts |
|
2476 | 2505 | + [ |
|
2477 | 2506 | ( |
|
2478 | 2507 | b'', |
|
2479 | 2508 | b'count', |
|
2480 | 2509 | 1, |
|
2481 | 2510 | b'number of revisions to test (when using --startrev)', |
|
2482 | 2511 | ), |
|
2483 | 2512 | (b'', b'alldata', False, b'test unidiffs for all associated revisions'), |
|
2484 | 2513 | ], |
|
2485 | 2514 | b'-c|-m|FILE REV', |
|
2486 | 2515 | ) |
|
2487 | 2516 | def perfunidiff(ui, repo, file_, rev=None, count=None, **opts): |
|
2488 | 2517 | """benchmark a unified diff between revisions |
|
2489 | 2518 | |
|
2490 | 2519 | This doesn't include any copy tracing - it's just a unified diff |
|
2491 | 2520 | of the texts. |
|
2492 | 2521 | |
|
2493 | 2522 | By default, benchmark a diff between its delta parent and itself. |
|
2494 | 2523 | |
|
2495 | 2524 | With ``--count``, benchmark diffs between delta parents and self for N |
|
2496 | 2525 | revisions starting at the specified revision. |
|
2497 | 2526 | |
|
2498 | 2527 | With ``--alldata``, assume the requested revision is a changeset and |
|
2499 | 2528 | measure diffs for all changes related to that changeset (manifest |
|
2500 | 2529 | and filelogs). |
|
2501 | 2530 | """ |
|
2502 | 2531 | opts = _byteskwargs(opts) |
|
2503 | 2532 | if opts[b'alldata']: |
|
2504 | 2533 | opts[b'changelog'] = True |
|
2505 | 2534 | |
|
2506 | 2535 | if opts.get(b'changelog') or opts.get(b'manifest'): |
|
2507 | 2536 | file_, rev = None, file_ |
|
2508 | 2537 | elif rev is None: |
|
2509 | 2538 | raise error.CommandError(b'perfunidiff', b'invalid arguments') |
|
2510 | 2539 | |
|
2511 | 2540 | textpairs = [] |
|
2512 | 2541 | |
|
2513 | 2542 | r = cmdutil.openrevlog(repo, b'perfunidiff', file_, opts) |
|
2514 | 2543 | |
|
2515 | 2544 | startrev = r.rev(r.lookup(rev)) |
|
2516 | 2545 | for rev in range(startrev, min(startrev + count, len(r) - 1)): |
|
2517 | 2546 | if opts[b'alldata']: |
|
2518 | 2547 | # Load revisions associated with changeset. |
|
2519 | 2548 | ctx = repo[rev] |
|
2520 | 2549 | mtext = _manifestrevision(repo, ctx.manifestnode()) |
|
2521 | 2550 | for pctx in ctx.parents(): |
|
2522 | 2551 | pman = _manifestrevision(repo, pctx.manifestnode()) |
|
2523 | 2552 | textpairs.append((pman, mtext)) |
|
2524 | 2553 | |
|
2525 | 2554 | # Load filelog revisions by iterating manifest delta. |
|
2526 | 2555 | man = ctx.manifest() |
|
2527 | 2556 | pman = ctx.p1().manifest() |
|
2528 | 2557 | for filename, change in pman.diff(man).items(): |
|
2529 | 2558 | fctx = repo.file(filename) |
|
2530 | 2559 | f1 = fctx.revision(change[0][0] or -1) |
|
2531 | 2560 | f2 = fctx.revision(change[1][0] or -1) |
|
2532 | 2561 | textpairs.append((f1, f2)) |
|
2533 | 2562 | else: |
|
2534 | 2563 | dp = r.deltaparent(rev) |
|
2535 | 2564 | textpairs.append((r.revision(dp), r.revision(rev))) |
|
2536 | 2565 | |
|
2537 | 2566 | def d(): |
|
2538 | 2567 | for left, right in textpairs: |
|
2539 | 2568 | # The date strings don't matter, so we pass empty strings. |
|
2540 | 2569 | headerlines, hunks = mdiff.unidiff( |
|
2541 | 2570 | left, b'', right, b'', b'left', b'right', binary=False |
|
2542 | 2571 | ) |
|
2543 | 2572 | # consume iterators in roughly the way patch.py does |
|
2544 | 2573 | b'\n'.join(headerlines) |
|
2545 | 2574 | b''.join(sum((list(hlines) for hrange, hlines in hunks), [])) |
|
2546 | 2575 | |
|
2547 | 2576 | timer, fm = gettimer(ui, opts) |
|
2548 | 2577 | timer(d) |
|
2549 | 2578 | fm.end() |
|
2550 | 2579 | |
|
2551 | 2580 | |
|
2552 | 2581 | @command(b'perf::diffwd|perfdiffwd', formatteropts) |
|
2553 | 2582 | def perfdiffwd(ui, repo, **opts): |
|
2554 | 2583 | """Profile diff of working directory changes""" |
|
2555 | 2584 | opts = _byteskwargs(opts) |
|
2556 | 2585 | timer, fm = gettimer(ui, opts) |
|
2557 | 2586 | options = { |
|
2558 | 2587 | 'w': 'ignore_all_space', |
|
2559 | 2588 | 'b': 'ignore_space_change', |
|
2560 | 2589 | 'B': 'ignore_blank_lines', |
|
2561 | 2590 | } |
|
2562 | 2591 | |
|
2563 | 2592 | for diffopt in ('', 'w', 'b', 'B', 'wB'): |
|
2564 | 2593 | opts = {options[c]: b'1' for c in diffopt} |
|
2565 | 2594 | |
|
2566 | 2595 | def d(): |
|
2567 | 2596 | ui.pushbuffer() |
|
2568 | 2597 | commands.diff(ui, repo, **opts) |
|
2569 | 2598 | ui.popbuffer() |
|
2570 | 2599 | |
|
2571 | 2600 | diffopt = diffopt.encode('ascii') |
|
2572 | 2601 | title = b'diffopts: %s' % (diffopt and (b'-' + diffopt) or b'none') |
|
2573 | 2602 | timer(d, title=title) |
|
2574 | 2603 | fm.end() |
|
2575 | 2604 | |
|
2576 | 2605 | |
|
2577 | 2606 | @command( |
|
2578 | 2607 | b'perf::revlogindex|perfrevlogindex', |
|
2579 | 2608 | revlogopts + formatteropts, |
|
2580 | 2609 | b'-c|-m|FILE', |
|
2581 | 2610 | ) |
|
2582 | 2611 | def perfrevlogindex(ui, repo, file_=None, **opts): |
|
2583 | 2612 | """Benchmark operations against a revlog index. |
|
2584 | 2613 | |
|
2585 | 2614 | This tests constructing a revlog instance, reading index data, |
|
2586 | 2615 | parsing index data, and performing various operations related to |
|
2587 | 2616 | index data. |
|
2588 | 2617 | """ |
|
2589 | 2618 | |
|
2590 | 2619 | opts = _byteskwargs(opts) |
|
2591 | 2620 | |
|
2592 | 2621 | rl = cmdutil.openrevlog(repo, b'perfrevlogindex', file_, opts) |
|
2593 | 2622 | |
|
2594 | 2623 | opener = getattr(rl, 'opener') # trick linter |
|
2595 | indexfile = rl.indexfile | |
|
2624 | # compat with hg <= 5.8 | |
|
2625 | radix = getattr(rl, 'radix', None) | |
|
2626 | indexfile = getattr(rl, '_indexfile', None) | |
|
2627 | if indexfile is None: | |
|
2628 | # compatibility with <= hg-5.8 | |
|
2629 | indexfile = getattr(rl, 'indexfile') | |
|
2596 | 2630 | data = opener.read(indexfile) |
|
2597 | 2631 | |
|
2598 | 2632 | header = struct.unpack(b'>I', data[0:4])[0] |
|
2599 | 2633 | version = header & 0xFFFF |
|
2600 | 2634 | if version == 1: |
|
2601 | revlogio = revlog.revlogio() | |
|
2602 | 2635 | inline = header & (1 << 16) |
|
2603 | 2636 | else: |
|
2604 | 2637 | raise error.Abort(b'unsupported revlog version: %d' % version) |
|
2605 | 2638 | |
|
2639 | parse_index_v1 = getattr(mercurial.revlog, 'parse_index_v1', None) | |
|
2640 | if parse_index_v1 is None: | |
|
2641 | parse_index_v1 = mercurial.revlog.revlogio().parseindex | |
|
2642 | ||
|
2606 | 2643 | rllen = len(rl) |
|
2607 | 2644 | |
|
2608 | 2645 | node0 = rl.node(0) |
|
2609 | 2646 | node25 = rl.node(rllen // 4) |
|
2610 | 2647 | node50 = rl.node(rllen // 2) |
|
2611 | 2648 | node75 = rl.node(rllen // 4 * 3) |
|
2612 | 2649 | node100 = rl.node(rllen - 1) |
|
2613 | 2650 | |
|
2614 | 2651 | allrevs = range(rllen) |
|
2615 | 2652 | allrevsrev = list(reversed(allrevs)) |
|
2616 | 2653 | allnodes = [rl.node(rev) for rev in range(rllen)] |
|
2617 | 2654 | allnodesrev = list(reversed(allnodes)) |
|
2618 | 2655 | |
|
2619 | 2656 | def constructor(): |
|
2620 | revlog.revlog(opener, indexfile) | |
|
2657 | if radix is not None: | |
|
2658 | revlog(opener, radix=radix) | |
|
2659 | else: | |
|
2660 | # hg <= 5.8 | |
|
2661 | revlog(opener, indexfile=indexfile) | |
|
2621 | 2662 | |
|
2622 | 2663 | def read(): |
|
2623 | 2664 | with opener(indexfile) as fh: |
|
2624 | 2665 | fh.read() |
|
2625 | 2666 | |
|
2626 | 2667 | def parseindex(): |
|
2627 |
|
|
|
2668 | parse_index_v1(data, inline) | |
|
2628 | 2669 | |
|
2629 | 2670 | def getentry(revornode): |
|
2630 |
index = |
|
|
2671 | index = parse_index_v1(data, inline)[0] | |
|
2631 | 2672 | index[revornode] |
|
2632 | 2673 | |
|
2633 | 2674 | def getentries(revs, count=1): |
|
2634 |
index = |
|
|
2675 | index = parse_index_v1(data, inline)[0] | |
|
2635 | 2676 | |
|
2636 | 2677 | for i in range(count): |
|
2637 | 2678 | for rev in revs: |
|
2638 | 2679 | index[rev] |
|
2639 | 2680 | |
|
2640 | 2681 | def resolvenode(node): |
|
2641 |
index = |
|
|
2682 | index = parse_index_v1(data, inline)[0] | |
|
2642 | 2683 | rev = getattr(index, 'rev', None) |
|
2643 | 2684 | if rev is None: |
|
2644 | nodemap = getattr( | |
|
2645 | revlogio.parseindex(data, inline)[0], 'nodemap', None | |
|
2646 | ) | |
|
2685 | nodemap = getattr(parse_index_v1(data, inline)[0], 'nodemap', None) | |
|
2647 | 2686 | # This only works for the C code. |
|
2648 | 2687 | if nodemap is None: |
|
2649 | 2688 | return |
|
2650 | 2689 | rev = nodemap.__getitem__ |
|
2651 | 2690 | |
|
2652 | 2691 | try: |
|
2653 | 2692 | rev(node) |
|
2654 | 2693 | except error.RevlogError: |
|
2655 | 2694 | pass |
|
2656 | 2695 | |
|
2657 | 2696 | def resolvenodes(nodes, count=1): |
|
2658 |
index = |
|
|
2697 | index = parse_index_v1(data, inline)[0] | |
|
2659 | 2698 | rev = getattr(index, 'rev', None) |
|
2660 | 2699 | if rev is None: |
|
2661 | nodemap = getattr( | |
|
2662 | revlogio.parseindex(data, inline)[0], 'nodemap', None | |
|
2663 | ) | |
|
2700 | nodemap = getattr(parse_index_v1(data, inline)[0], 'nodemap', None) | |
|
2664 | 2701 | # This only works for the C code. |
|
2665 | 2702 | if nodemap is None: |
|
2666 | 2703 | return |
|
2667 | 2704 | rev = nodemap.__getitem__ |
|
2668 | 2705 | |
|
2669 | 2706 | for i in range(count): |
|
2670 | 2707 | for node in nodes: |
|
2671 | 2708 | try: |
|
2672 | 2709 | rev(node) |
|
2673 | 2710 | except error.RevlogError: |
|
2674 | 2711 | pass |
|
2675 | 2712 | |
|
2676 | 2713 | benches = [ |
|
2677 | 2714 | (constructor, b'revlog constructor'), |
|
2678 | 2715 | (read, b'read'), |
|
2679 | 2716 | (parseindex, b'create index object'), |
|
2680 | 2717 | (lambda: getentry(0), b'retrieve index entry for rev 0'), |
|
2681 | 2718 | (lambda: resolvenode(b'a' * 20), b'look up missing node'), |
|
2682 | 2719 | (lambda: resolvenode(node0), b'look up node at rev 0'), |
|
2683 | 2720 | (lambda: resolvenode(node25), b'look up node at 1/4 len'), |
|
2684 | 2721 | (lambda: resolvenode(node50), b'look up node at 1/2 len'), |
|
2685 | 2722 | (lambda: resolvenode(node75), b'look up node at 3/4 len'), |
|
2686 | 2723 | (lambda: resolvenode(node100), b'look up node at tip'), |
|
2687 | 2724 | # 2x variation is to measure caching impact. |
|
2688 | 2725 | (lambda: resolvenodes(allnodes), b'look up all nodes (forward)'), |
|
2689 | 2726 | (lambda: resolvenodes(allnodes, 2), b'look up all nodes 2x (forward)'), |
|
2690 | 2727 | (lambda: resolvenodes(allnodesrev), b'look up all nodes (reverse)'), |
|
2691 | 2728 | ( |
|
2692 | 2729 | lambda: resolvenodes(allnodesrev, 2), |
|
2693 | 2730 | b'look up all nodes 2x (reverse)', |
|
2694 | 2731 | ), |
|
2695 | 2732 | (lambda: getentries(allrevs), b'retrieve all index entries (forward)'), |
|
2696 | 2733 | ( |
|
2697 | 2734 | lambda: getentries(allrevs, 2), |
|
2698 | 2735 | b'retrieve all index entries 2x (forward)', |
|
2699 | 2736 | ), |
|
2700 | 2737 | ( |
|
2701 | 2738 | lambda: getentries(allrevsrev), |
|
2702 | 2739 | b'retrieve all index entries (reverse)', |
|
2703 | 2740 | ), |
|
2704 | 2741 | ( |
|
2705 | 2742 | lambda: getentries(allrevsrev, 2), |
|
2706 | 2743 | b'retrieve all index entries 2x (reverse)', |
|
2707 | 2744 | ), |
|
2708 | 2745 | ] |
|
2709 | 2746 | |
|
2710 | 2747 | for fn, title in benches: |
|
2711 | 2748 | timer, fm = gettimer(ui, opts) |
|
2712 | 2749 | timer(fn, title=title) |
|
2713 | 2750 | fm.end() |
|
2714 | 2751 | |
|
2715 | 2752 | |
|
2716 | 2753 | @command( |
|
2717 | 2754 | b'perf::revlogrevisions|perfrevlogrevisions', |
|
2718 | 2755 | revlogopts |
|
2719 | 2756 | + formatteropts |
|
2720 | 2757 | + [ |
|
2721 | 2758 | (b'd', b'dist', 100, b'distance between the revisions'), |
|
2722 | 2759 | (b's', b'startrev', 0, b'revision to start reading at'), |
|
2723 | 2760 | (b'', b'reverse', False, b'read in reverse'), |
|
2724 | 2761 | ], |
|
2725 | 2762 | b'-c|-m|FILE', |
|
2726 | 2763 | ) |
|
2727 | 2764 | def perfrevlogrevisions( |
|
2728 | 2765 | ui, repo, file_=None, startrev=0, reverse=False, **opts |
|
2729 | 2766 | ): |
|
2730 | 2767 | """Benchmark reading a series of revisions from a revlog. |
|
2731 | 2768 | |
|
2732 | 2769 | By default, we read every ``-d/--dist`` revision from 0 to tip of |
|
2733 | 2770 | the specified revlog. |
|
2734 | 2771 | |
|
2735 | 2772 | The start revision can be defined via ``-s/--startrev``. |
|
2736 | 2773 | """ |
|
2737 | 2774 | opts = _byteskwargs(opts) |
|
2738 | 2775 | |
|
2739 | 2776 | rl = cmdutil.openrevlog(repo, b'perfrevlogrevisions', file_, opts) |
|
2740 | 2777 | rllen = getlen(ui)(rl) |
|
2741 | 2778 | |
|
2742 | 2779 | if startrev < 0: |
|
2743 | 2780 | startrev = rllen + startrev |
|
2744 | 2781 | |
|
2745 | 2782 | def d(): |
|
2746 | 2783 | rl.clearcaches() |
|
2747 | 2784 | |
|
2748 | 2785 | beginrev = startrev |
|
2749 | 2786 | endrev = rllen |
|
2750 | 2787 | dist = opts[b'dist'] |
|
2751 | 2788 | |
|
2752 | 2789 | if reverse: |
|
2753 | 2790 | beginrev, endrev = endrev - 1, beginrev - 1 |
|
2754 | 2791 | dist = -1 * dist |
|
2755 | 2792 | |
|
2756 | 2793 | for x in _xrange(beginrev, endrev, dist): |
|
2757 | 2794 | # Old revisions don't support passing int. |
|
2758 | 2795 | n = rl.node(x) |
|
2759 | 2796 | rl.revision(n) |
|
2760 | 2797 | |
|
2761 | 2798 | timer, fm = gettimer(ui, opts) |
|
2762 | 2799 | timer(d) |
|
2763 | 2800 | fm.end() |
|
2764 | 2801 | |
|
2765 | 2802 | |
|
2766 | 2803 | @command( |
|
2767 | 2804 | b'perf::revlogwrite|perfrevlogwrite', |
|
2768 | 2805 | revlogopts |
|
2769 | 2806 | + formatteropts |
|
2770 | 2807 | + [ |
|
2771 | 2808 | (b's', b'startrev', 1000, b'revision to start writing at'), |
|
2772 | 2809 | (b'', b'stoprev', -1, b'last revision to write'), |
|
2773 | 2810 | (b'', b'count', 3, b'number of passes to perform'), |
|
2774 | 2811 | (b'', b'details', False, b'print timing for every revisions tested'), |
|
2775 | 2812 | (b'', b'source', b'full', b'the kind of data feed in the revlog'), |
|
2776 | 2813 | (b'', b'lazydeltabase', True, b'try the provided delta first'), |
|
2777 | 2814 | (b'', b'clear-caches', True, b'clear revlog cache between calls'), |
|
2778 | 2815 | ], |
|
2779 | 2816 | b'-c|-m|FILE', |
|
2780 | 2817 | ) |
|
2781 | 2818 | def perfrevlogwrite(ui, repo, file_=None, startrev=1000, stoprev=-1, **opts): |
|
2782 | 2819 | """Benchmark writing a series of revisions to a revlog. |
|
2783 | 2820 | |
|
2784 | 2821 | Possible source values are: |
|
2785 | 2822 | * `full`: add from a full text (default). |
|
2786 | 2823 | * `parent-1`: add from a delta to the first parent |
|
2787 | 2824 | * `parent-2`: add from a delta to the second parent if it exists |
|
2788 | 2825 | (use a delta from the first parent otherwise) |
|
2789 | 2826 | * `parent-smallest`: add from the smallest delta (either p1 or p2) |
|
2790 | 2827 | * `storage`: add from the existing precomputed deltas |
|
2791 | 2828 | |
|
2792 | 2829 | Note: This performance command measures performance in a custom way. As a |
|
2793 | 2830 | result some of the global configuration of the 'perf' command does not |
|
2794 | 2831 | apply to it: |
|
2795 | 2832 | |
|
2796 | 2833 | * ``pre-run``: disabled |
|
2797 | 2834 | |
|
2798 | 2835 | * ``profile-benchmark``: disabled |
|
2799 | 2836 | |
|
2800 | 2837 | * ``run-limits``: disabled use --count instead |
|
2801 | 2838 | """ |
|
2802 | 2839 | opts = _byteskwargs(opts) |
|
2803 | 2840 | |
|
2804 | 2841 | rl = cmdutil.openrevlog(repo, b'perfrevlogwrite', file_, opts) |
|
2805 | 2842 | rllen = getlen(ui)(rl) |
|
2806 | 2843 | if startrev < 0: |
|
2807 | 2844 | startrev = rllen + startrev |
|
2808 | 2845 | if stoprev < 0: |
|
2809 | 2846 | stoprev = rllen + stoprev |
|
2810 | 2847 | |
|
2811 | 2848 | lazydeltabase = opts['lazydeltabase'] |
|
2812 | 2849 | source = opts['source'] |
|
2813 | 2850 | clearcaches = opts['clear_caches'] |
|
2814 | 2851 | validsource = ( |
|
2815 | 2852 | b'full', |
|
2816 | 2853 | b'parent-1', |
|
2817 | 2854 | b'parent-2', |
|
2818 | 2855 | b'parent-smallest', |
|
2819 | 2856 | b'storage', |
|
2820 | 2857 | ) |
|
2821 | 2858 | if source not in validsource: |
|
2822 | 2859 | raise error.Abort('invalid source type: %s' % source) |
|
2823 | 2860 | |
|
2824 | 2861 | ### actually gather results |
|
2825 | 2862 | count = opts['count'] |
|
2826 | 2863 | if count <= 0: |
|
2827 | 2864 | raise error.Abort('invalide run count: %d' % count) |
|
2828 | 2865 | allresults = [] |
|
2829 | 2866 | for c in range(count): |
|
2830 | 2867 | timing = _timeonewrite( |
|
2831 | 2868 | ui, |
|
2832 | 2869 | rl, |
|
2833 | 2870 | source, |
|
2834 | 2871 | startrev, |
|
2835 | 2872 | stoprev, |
|
2836 | 2873 | c + 1, |
|
2837 | 2874 | lazydeltabase=lazydeltabase, |
|
2838 | 2875 | clearcaches=clearcaches, |
|
2839 | 2876 | ) |
|
2840 | 2877 | allresults.append(timing) |
|
2841 | 2878 | |
|
2842 | 2879 | ### consolidate the results in a single list |
|
2843 | 2880 | results = [] |
|
2844 | 2881 | for idx, (rev, t) in enumerate(allresults[0]): |
|
2845 | 2882 | ts = [t] |
|
2846 | 2883 | for other in allresults[1:]: |
|
2847 | 2884 | orev, ot = other[idx] |
|
2848 | 2885 | assert orev == rev |
|
2849 | 2886 | ts.append(ot) |
|
2850 | 2887 | results.append((rev, ts)) |
|
2851 | 2888 | resultcount = len(results) |
|
2852 | 2889 | |
|
2853 | 2890 | ### Compute and display relevant statistics |
|
2854 | 2891 | |
|
2855 | 2892 | # get a formatter |
|
2856 | 2893 | fm = ui.formatter(b'perf', opts) |
|
2857 | 2894 | displayall = ui.configbool(b"perf", b"all-timing", False) |
|
2858 | 2895 | |
|
2859 | 2896 | # print individual details if requested |
|
2860 | 2897 | if opts['details']: |
|
2861 | 2898 | for idx, item in enumerate(results, 1): |
|
2862 | 2899 | rev, data = item |
|
2863 | 2900 | title = 'revisions #%d of %d, rev %d' % (idx, resultcount, rev) |
|
2864 | 2901 | formatone(fm, data, title=title, displayall=displayall) |
|
2865 | 2902 | |
|
2866 | 2903 | # sorts results by median time |
|
2867 | 2904 | results.sort(key=lambda x: sorted(x[1])[len(x[1]) // 2]) |
|
2868 | 2905 | # list of (name, index) to display) |
|
2869 | 2906 | relevants = [ |
|
2870 | 2907 | ("min", 0), |
|
2871 | 2908 | ("10%", resultcount * 10 // 100), |
|
2872 | 2909 | ("25%", resultcount * 25 // 100), |
|
2873 | 2910 | ("50%", resultcount * 70 // 100), |
|
2874 | 2911 | ("75%", resultcount * 75 // 100), |
|
2875 | 2912 | ("90%", resultcount * 90 // 100), |
|
2876 | 2913 | ("95%", resultcount * 95 // 100), |
|
2877 | 2914 | ("99%", resultcount * 99 // 100), |
|
2878 | 2915 | ("99.9%", resultcount * 999 // 1000), |
|
2879 | 2916 | ("99.99%", resultcount * 9999 // 10000), |
|
2880 | 2917 | ("99.999%", resultcount * 99999 // 100000), |
|
2881 | 2918 | ("max", -1), |
|
2882 | 2919 | ] |
|
2883 | 2920 | if not ui.quiet: |
|
2884 | 2921 | for name, idx in relevants: |
|
2885 | 2922 | data = results[idx] |
|
2886 | 2923 | title = '%s of %d, rev %d' % (name, resultcount, data[0]) |
|
2887 | 2924 | formatone(fm, data[1], title=title, displayall=displayall) |
|
2888 | 2925 | |
|
2889 | 2926 | # XXX summing that many float will not be very precise, we ignore this fact |
|
2890 | 2927 | # for now |
|
2891 | 2928 | totaltime = [] |
|
2892 | 2929 | for item in allresults: |
|
2893 | 2930 | totaltime.append( |
|
2894 | 2931 | ( |
|
2895 | 2932 | sum(x[1][0] for x in item), |
|
2896 | 2933 | sum(x[1][1] for x in item), |
|
2897 | 2934 | sum(x[1][2] for x in item), |
|
2898 | 2935 | ) |
|
2899 | 2936 | ) |
|
2900 | 2937 | formatone( |
|
2901 | 2938 | fm, |
|
2902 | 2939 | totaltime, |
|
2903 | 2940 | title="total time (%d revs)" % resultcount, |
|
2904 | 2941 | displayall=displayall, |
|
2905 | 2942 | ) |
|
2906 | 2943 | fm.end() |
|
2907 | 2944 | |
|
2908 | 2945 | |
|
2909 | 2946 | class _faketr(object): |
|
2910 | 2947 | def add(s, x, y, z=None): |
|
2911 | 2948 | return None |
|
2912 | 2949 | |
|
2913 | 2950 | |
|
2914 | 2951 | def _timeonewrite( |
|
2915 | 2952 | ui, |
|
2916 | 2953 | orig, |
|
2917 | 2954 | source, |
|
2918 | 2955 | startrev, |
|
2919 | 2956 | stoprev, |
|
2920 | 2957 | runidx=None, |
|
2921 | 2958 | lazydeltabase=True, |
|
2922 | 2959 | clearcaches=True, |
|
2923 | 2960 | ): |
|
2924 | 2961 | timings = [] |
|
2925 | 2962 | tr = _faketr() |
|
2926 | 2963 | with _temprevlog(ui, orig, startrev) as dest: |
|
2927 | 2964 | dest._lazydeltabase = lazydeltabase |
|
2928 | 2965 | revs = list(orig.revs(startrev, stoprev)) |
|
2929 | 2966 | total = len(revs) |
|
2930 | 2967 | topic = 'adding' |
|
2931 | 2968 | if runidx is not None: |
|
2932 | 2969 | topic += ' (run #%d)' % runidx |
|
2933 | 2970 | # Support both old and new progress API |
|
2934 | 2971 | if util.safehasattr(ui, 'makeprogress'): |
|
2935 | 2972 | progress = ui.makeprogress(topic, unit='revs', total=total) |
|
2936 | 2973 | |
|
2937 | 2974 | def updateprogress(pos): |
|
2938 | 2975 | progress.update(pos) |
|
2939 | 2976 | |
|
2940 | 2977 | def completeprogress(): |
|
2941 | 2978 | progress.complete() |
|
2942 | 2979 | |
|
2943 | 2980 | else: |
|
2944 | 2981 | |
|
2945 | 2982 | def updateprogress(pos): |
|
2946 | 2983 | ui.progress(topic, pos, unit='revs', total=total) |
|
2947 | 2984 | |
|
2948 | 2985 | def completeprogress(): |
|
2949 | 2986 | ui.progress(topic, None, unit='revs', total=total) |
|
2950 | 2987 | |
|
2951 | 2988 | for idx, rev in enumerate(revs): |
|
2952 | 2989 | updateprogress(idx) |
|
2953 | 2990 | addargs, addkwargs = _getrevisionseed(orig, rev, tr, source) |
|
2954 | 2991 | if clearcaches: |
|
2955 | 2992 | dest.index.clearcaches() |
|
2956 | 2993 | dest.clearcaches() |
|
2957 | 2994 | with timeone() as r: |
|
2958 | 2995 | dest.addrawrevision(*addargs, **addkwargs) |
|
2959 | 2996 | timings.append((rev, r[0])) |
|
2960 | 2997 | updateprogress(total) |
|
2961 | 2998 | completeprogress() |
|
2962 | 2999 | return timings |
|
2963 | 3000 | |
|
2964 | 3001 | |
|
2965 | 3002 | def _getrevisionseed(orig, rev, tr, source): |
|
2966 | 3003 | from mercurial.node import nullid |
|
2967 | 3004 | |
|
2968 | 3005 | linkrev = orig.linkrev(rev) |
|
2969 | 3006 | node = orig.node(rev) |
|
2970 | 3007 | p1, p2 = orig.parents(node) |
|
2971 | 3008 | flags = orig.flags(rev) |
|
2972 | 3009 | cachedelta = None |
|
2973 | 3010 | text = None |
|
2974 | 3011 | |
|
2975 | 3012 | if source == b'full': |
|
2976 | 3013 | text = orig.revision(rev) |
|
2977 | 3014 | elif source == b'parent-1': |
|
2978 | 3015 | baserev = orig.rev(p1) |
|
2979 | 3016 | cachedelta = (baserev, orig.revdiff(p1, rev)) |
|
2980 | 3017 | elif source == b'parent-2': |
|
2981 | 3018 | parent = p2 |
|
2982 | 3019 | if p2 == nullid: |
|
2983 | 3020 | parent = p1 |
|
2984 | 3021 | baserev = orig.rev(parent) |
|
2985 | 3022 | cachedelta = (baserev, orig.revdiff(parent, rev)) |
|
2986 | 3023 | elif source == b'parent-smallest': |
|
2987 | 3024 | p1diff = orig.revdiff(p1, rev) |
|
2988 | 3025 | parent = p1 |
|
2989 | 3026 | diff = p1diff |
|
2990 | 3027 | if p2 != nullid: |
|
2991 | 3028 | p2diff = orig.revdiff(p2, rev) |
|
2992 | 3029 | if len(p1diff) > len(p2diff): |
|
2993 | 3030 | parent = p2 |
|
2994 | 3031 | diff = p2diff |
|
2995 | 3032 | baserev = orig.rev(parent) |
|
2996 | 3033 | cachedelta = (baserev, diff) |
|
2997 | 3034 | elif source == b'storage': |
|
2998 | 3035 | baserev = orig.deltaparent(rev) |
|
2999 | 3036 | cachedelta = (baserev, orig.revdiff(orig.node(baserev), rev)) |
|
3000 | 3037 | |
|
3001 | 3038 | return ( |
|
3002 | 3039 | (text, tr, linkrev, p1, p2), |
|
3003 | 3040 | {'node': node, 'flags': flags, 'cachedelta': cachedelta}, |
|
3004 | 3041 | ) |
|
3005 | 3042 | |
|
3006 | 3043 | |
|
3007 | 3044 | @contextlib.contextmanager |
|
3008 | 3045 | def _temprevlog(ui, orig, truncaterev): |
|
3009 | 3046 | from mercurial import vfs as vfsmod |
|
3010 | 3047 | |
|
3011 | 3048 | if orig._inline: |
|
3012 | 3049 | raise error.Abort('not supporting inline revlog (yet)') |
|
3013 | 3050 | revlogkwargs = {} |
|
3014 | 3051 | k = 'upperboundcomp' |
|
3015 | 3052 | if util.safehasattr(orig, k): |
|
3016 | 3053 | revlogkwargs[k] = getattr(orig, k) |
|
3017 | 3054 | |
|
3018 | origindexpath = orig.opener.join(orig.indexfile) | |
|
3019 | origdatapath = orig.opener.join(orig.datafile) | |
|
3020 | indexname = 'revlog.i' | |
|
3021 | dataname = 'revlog.d' | |
|
3055 | indexfile = getattr(orig, '_indexfile', None) | |
|
3056 | if indexfile is None: | |
|
3057 | # compatibility with <= hg-5.8 | |
|
3058 | indexfile = getattr(orig, 'indexfile') | |
|
3059 | origindexpath = orig.opener.join(indexfile) | |
|
3060 | ||
|
3061 | datafile = getattr(orig, '_datafile', getattr(orig, 'datafile')) | |
|
3062 | origdatapath = orig.opener.join(datafile) | |
|
3063 | radix = b'revlog' | |
|
3064 | indexname = b'revlog.i' | |
|
3065 | dataname = b'revlog.d' | |
|
3022 | 3066 | |
|
3023 | 3067 | tmpdir = tempfile.mkdtemp(prefix='tmp-hgperf-') |
|
3024 | 3068 | try: |
|
3025 | 3069 | # copy the data file in a temporary directory |
|
3026 | 3070 | ui.debug('copying data in %s\n' % tmpdir) |
|
3027 | 3071 | destindexpath = os.path.join(tmpdir, 'revlog.i') |
|
3028 | 3072 | destdatapath = os.path.join(tmpdir, 'revlog.d') |
|
3029 | 3073 | shutil.copyfile(origindexpath, destindexpath) |
|
3030 | 3074 | shutil.copyfile(origdatapath, destdatapath) |
|
3031 | 3075 | |
|
3032 | 3076 | # remove the data we want to add again |
|
3033 | 3077 | ui.debug('truncating data to be rewritten\n') |
|
3034 | 3078 | with open(destindexpath, 'ab') as index: |
|
3035 | 3079 | index.seek(0) |
|
3036 | 3080 | index.truncate(truncaterev * orig._io.size) |
|
3037 | 3081 | with open(destdatapath, 'ab') as data: |
|
3038 | 3082 | data.seek(0) |
|
3039 | 3083 | data.truncate(orig.start(truncaterev)) |
|
3040 | 3084 | |
|
3041 | 3085 | # instantiate a new revlog from the temporary copy |
|
3042 | 3086 | ui.debug('truncating adding to be rewritten\n') |
|
3043 | 3087 | vfs = vfsmod.vfs(tmpdir) |
|
3044 | 3088 | vfs.options = getattr(orig.opener, 'options', None) |
|
3045 | 3089 | |
|
3046 | dest = revlog.revlog( | |
|
3047 | vfs, indexfile=indexname, datafile=dataname, **revlogkwargs | |
|
3048 | ) | |
|
3090 | try: | |
|
3091 | dest = revlog(vfs, radix=radix, **revlogkwargs) | |
|
3092 | except TypeError: | |
|
3093 | dest = revlog( | |
|
3094 | vfs, indexfile=indexname, datafile=dataname, **revlogkwargs | |
|
3095 | ) | |
|
3049 | 3096 | if dest._inline: |
|
3050 | 3097 | raise error.Abort('not supporting inline revlog (yet)') |
|
3051 | 3098 | # make sure internals are initialized |
|
3052 | 3099 | dest.revision(len(dest) - 1) |
|
3053 | 3100 | yield dest |
|
3054 | 3101 | del dest, vfs |
|
3055 | 3102 | finally: |
|
3056 | 3103 | shutil.rmtree(tmpdir, True) |
|
3057 | 3104 | |
|
3058 | 3105 | |
|
3059 | 3106 | @command( |
|
3060 | 3107 | b'perf::revlogchunks|perfrevlogchunks', |
|
3061 | 3108 | revlogopts |
|
3062 | 3109 | + formatteropts |
|
3063 | 3110 | + [ |
|
3064 | 3111 | (b'e', b'engines', b'', b'compression engines to use'), |
|
3065 | 3112 | (b's', b'startrev', 0, b'revision to start at'), |
|
3066 | 3113 | ], |
|
3067 | 3114 | b'-c|-m|FILE', |
|
3068 | 3115 | ) |
|
3069 | 3116 | def perfrevlogchunks(ui, repo, file_=None, engines=None, startrev=0, **opts): |
|
3070 | 3117 | """Benchmark operations on revlog chunks. |
|
3071 | 3118 | |
|
3072 | 3119 | Logically, each revlog is a collection of fulltext revisions. However, |
|
3073 | 3120 | stored within each revlog are "chunks" of possibly compressed data. This |
|
3074 | 3121 | data needs to be read and decompressed or compressed and written. |
|
3075 | 3122 | |
|
3076 | 3123 | This command measures the time it takes to read+decompress and recompress |
|
3077 | 3124 | chunks in a revlog. It effectively isolates I/O and compression performance. |
|
3078 | 3125 | For measurements of higher-level operations like resolving revisions, |
|
3079 | 3126 | see ``perfrevlogrevisions`` and ``perfrevlogrevision``. |
|
3080 | 3127 | """ |
|
3081 | 3128 | opts = _byteskwargs(opts) |
|
3082 | 3129 | |
|
3083 | 3130 | rl = cmdutil.openrevlog(repo, b'perfrevlogchunks', file_, opts) |
|
3084 | 3131 | |
|
3085 | 3132 | # _chunkraw was renamed to _getsegmentforrevs. |
|
3086 | 3133 | try: |
|
3087 | 3134 | segmentforrevs = rl._getsegmentforrevs |
|
3088 | 3135 | except AttributeError: |
|
3089 | 3136 | segmentforrevs = rl._chunkraw |
|
3090 | 3137 | |
|
3091 | 3138 | # Verify engines argument. |
|
3092 | 3139 | if engines: |
|
3093 | 3140 | engines = {e.strip() for e in engines.split(b',')} |
|
3094 | 3141 | for engine in engines: |
|
3095 | 3142 | try: |
|
3096 | 3143 | util.compressionengines[engine] |
|
3097 | 3144 | except KeyError: |
|
3098 | 3145 | raise error.Abort(b'unknown compression engine: %s' % engine) |
|
3099 | 3146 | else: |
|
3100 | 3147 | engines = [] |
|
3101 | 3148 | for e in util.compengines: |
|
3102 | 3149 | engine = util.compengines[e] |
|
3103 | 3150 | try: |
|
3104 | 3151 | if engine.available(): |
|
3105 | 3152 | engine.revlogcompressor().compress(b'dummy') |
|
3106 | 3153 | engines.append(e) |
|
3107 | 3154 | except NotImplementedError: |
|
3108 | 3155 | pass |
|
3109 | 3156 | |
|
3110 | 3157 | revs = list(rl.revs(startrev, len(rl) - 1)) |
|
3111 | 3158 | |
|
3112 | 3159 | def rlfh(rl): |
|
3113 | 3160 | if rl._inline: |
|
3114 | return getsvfs(repo)(rl.indexfile) | |
|
3161 | indexfile = getattr(rl, '_indexfile', None) | |
|
3162 | if indexfile is None: | |
|
3163 | # compatibility with <= hg-5.8 | |
|
3164 | indexfile = getattr(rl, 'indexfile') | |
|
3165 | return getsvfs(repo)(indexfile) | |
|
3115 | 3166 | else: |
|
3116 | return getsvfs(repo)(rl.datafile) | |
|
3167 | datafile = getattr(rl, 'datafile', getattr(rl, 'datafile')) | |
|
3168 | return getsvfs(repo)(datafile) | |
|
3117 | 3169 | |
|
3118 | 3170 | def doread(): |
|
3119 | 3171 | rl.clearcaches() |
|
3120 | 3172 | for rev in revs: |
|
3121 | 3173 | segmentforrevs(rev, rev) |
|
3122 | 3174 | |
|
3123 | 3175 | def doreadcachedfh(): |
|
3124 | 3176 | rl.clearcaches() |
|
3125 | 3177 | fh = rlfh(rl) |
|
3126 | 3178 | for rev in revs: |
|
3127 | 3179 | segmentforrevs(rev, rev, df=fh) |
|
3128 | 3180 | |
|
3129 | 3181 | def doreadbatch(): |
|
3130 | 3182 | rl.clearcaches() |
|
3131 | 3183 | segmentforrevs(revs[0], revs[-1]) |
|
3132 | 3184 | |
|
3133 | 3185 | def doreadbatchcachedfh(): |
|
3134 | 3186 | rl.clearcaches() |
|
3135 | 3187 | fh = rlfh(rl) |
|
3136 | 3188 | segmentforrevs(revs[0], revs[-1], df=fh) |
|
3137 | 3189 | |
|
3138 | 3190 | def dochunk(): |
|
3139 | 3191 | rl.clearcaches() |
|
3140 | 3192 | fh = rlfh(rl) |
|
3141 | 3193 | for rev in revs: |
|
3142 | 3194 | rl._chunk(rev, df=fh) |
|
3143 | 3195 | |
|
3144 | 3196 | chunks = [None] |
|
3145 | 3197 | |
|
3146 | 3198 | def dochunkbatch(): |
|
3147 | 3199 | rl.clearcaches() |
|
3148 | 3200 | fh = rlfh(rl) |
|
3149 | 3201 | # Save chunks as a side-effect. |
|
3150 | 3202 | chunks[0] = rl._chunks(revs, df=fh) |
|
3151 | 3203 | |
|
3152 | 3204 | def docompress(compressor): |
|
3153 | 3205 | rl.clearcaches() |
|
3154 | 3206 | |
|
3155 | 3207 | try: |
|
3156 | 3208 | # Swap in the requested compression engine. |
|
3157 | 3209 | oldcompressor = rl._compressor |
|
3158 | 3210 | rl._compressor = compressor |
|
3159 | 3211 | for chunk in chunks[0]: |
|
3160 | 3212 | rl.compress(chunk) |
|
3161 | 3213 | finally: |
|
3162 | 3214 | rl._compressor = oldcompressor |
|
3163 | 3215 | |
|
3164 | 3216 | benches = [ |
|
3165 | 3217 | (lambda: doread(), b'read'), |
|
3166 | 3218 | (lambda: doreadcachedfh(), b'read w/ reused fd'), |
|
3167 | 3219 | (lambda: doreadbatch(), b'read batch'), |
|
3168 | 3220 | (lambda: doreadbatchcachedfh(), b'read batch w/ reused fd'), |
|
3169 | 3221 | (lambda: dochunk(), b'chunk'), |
|
3170 | 3222 | (lambda: dochunkbatch(), b'chunk batch'), |
|
3171 | 3223 | ] |
|
3172 | 3224 | |
|
3173 | 3225 | for engine in sorted(engines): |
|
3174 | 3226 | compressor = util.compengines[engine].revlogcompressor() |
|
3175 | 3227 | benches.append( |
|
3176 | 3228 | ( |
|
3177 | 3229 | functools.partial(docompress, compressor), |
|
3178 | 3230 | b'compress w/ %s' % engine, |
|
3179 | 3231 | ) |
|
3180 | 3232 | ) |
|
3181 | 3233 | |
|
3182 | 3234 | for fn, title in benches: |
|
3183 | 3235 | timer, fm = gettimer(ui, opts) |
|
3184 | 3236 | timer(fn, title=title) |
|
3185 | 3237 | fm.end() |
|
3186 | 3238 | |
|
3187 | 3239 | |
|
3188 | 3240 | @command( |
|
3189 | 3241 | b'perf::revlogrevision|perfrevlogrevision', |
|
3190 | 3242 | revlogopts |
|
3191 | 3243 | + formatteropts |
|
3192 | 3244 | + [(b'', b'cache', False, b'use caches instead of clearing')], |
|
3193 | 3245 | b'-c|-m|FILE REV', |
|
3194 | 3246 | ) |
|
3195 | 3247 | def perfrevlogrevision(ui, repo, file_, rev=None, cache=None, **opts): |
|
3196 | 3248 | """Benchmark obtaining a revlog revision. |
|
3197 | 3249 | |
|
3198 | 3250 | Obtaining a revlog revision consists of roughly the following steps: |
|
3199 | 3251 | |
|
3200 | 3252 | 1. Compute the delta chain |
|
3201 | 3253 | 2. Slice the delta chain if applicable |
|
3202 | 3254 | 3. Obtain the raw chunks for that delta chain |
|
3203 | 3255 | 4. Decompress each raw chunk |
|
3204 | 3256 | 5. Apply binary patches to obtain fulltext |
|
3205 | 3257 | 6. Verify hash of fulltext |
|
3206 | 3258 | |
|
3207 | 3259 | This command measures the time spent in each of these phases. |
|
3208 | 3260 | """ |
|
3209 | 3261 | opts = _byteskwargs(opts) |
|
3210 | 3262 | |
|
3211 | 3263 | if opts.get(b'changelog') or opts.get(b'manifest'): |
|
3212 | 3264 | file_, rev = None, file_ |
|
3213 | 3265 | elif rev is None: |
|
3214 | 3266 | raise error.CommandError(b'perfrevlogrevision', b'invalid arguments') |
|
3215 | 3267 | |
|
3216 | 3268 | r = cmdutil.openrevlog(repo, b'perfrevlogrevision', file_, opts) |
|
3217 | 3269 | |
|
3218 | 3270 | # _chunkraw was renamed to _getsegmentforrevs. |
|
3219 | 3271 | try: |
|
3220 | 3272 | segmentforrevs = r._getsegmentforrevs |
|
3221 | 3273 | except AttributeError: |
|
3222 | 3274 | segmentforrevs = r._chunkraw |
|
3223 | 3275 | |
|
3224 | 3276 | node = r.lookup(rev) |
|
3225 | 3277 | rev = r.rev(node) |
|
3226 | 3278 | |
|
3227 | 3279 | def getrawchunks(data, chain): |
|
3228 | 3280 | start = r.start |
|
3229 | 3281 | length = r.length |
|
3230 | 3282 | inline = r._inline |
|
3231 | 3283 | try: |
|
3232 | 3284 | iosize = r.index.entry_size |
|
3233 | 3285 | except AttributeError: |
|
3234 | 3286 | iosize = r._io.size |
|
3235 | 3287 | buffer = util.buffer |
|
3236 | 3288 | |
|
3237 | 3289 | chunks = [] |
|
3238 | 3290 | ladd = chunks.append |
|
3239 | 3291 | for idx, item in enumerate(chain): |
|
3240 | 3292 | offset = start(item[0]) |
|
3241 | 3293 | bits = data[idx] |
|
3242 | 3294 | for rev in item: |
|
3243 | 3295 | chunkstart = start(rev) |
|
3244 | 3296 | if inline: |
|
3245 | 3297 | chunkstart += (rev + 1) * iosize |
|
3246 | 3298 | chunklength = length(rev) |
|
3247 | 3299 | ladd(buffer(bits, chunkstart - offset, chunklength)) |
|
3248 | 3300 | |
|
3249 | 3301 | return chunks |
|
3250 | 3302 | |
|
3251 | 3303 | def dodeltachain(rev): |
|
3252 | 3304 | if not cache: |
|
3253 | 3305 | r.clearcaches() |
|
3254 | 3306 | r._deltachain(rev) |
|
3255 | 3307 | |
|
3256 | 3308 | def doread(chain): |
|
3257 | 3309 | if not cache: |
|
3258 | 3310 | r.clearcaches() |
|
3259 | 3311 | for item in slicedchain: |
|
3260 | 3312 | segmentforrevs(item[0], item[-1]) |
|
3261 | 3313 | |
|
3262 | 3314 | def doslice(r, chain, size): |
|
3263 | 3315 | for s in slicechunk(r, chain, targetsize=size): |
|
3264 | 3316 | pass |
|
3265 | 3317 | |
|
3266 | 3318 | def dorawchunks(data, chain): |
|
3267 | 3319 | if not cache: |
|
3268 | 3320 | r.clearcaches() |
|
3269 | 3321 | getrawchunks(data, chain) |
|
3270 | 3322 | |
|
3271 | 3323 | def dodecompress(chunks): |
|
3272 | 3324 | decomp = r.decompress |
|
3273 | 3325 | for chunk in chunks: |
|
3274 | 3326 | decomp(chunk) |
|
3275 | 3327 | |
|
3276 | 3328 | def dopatch(text, bins): |
|
3277 | 3329 | if not cache: |
|
3278 | 3330 | r.clearcaches() |
|
3279 | 3331 | mdiff.patches(text, bins) |
|
3280 | 3332 | |
|
3281 | 3333 | def dohash(text): |
|
3282 | 3334 | if not cache: |
|
3283 | 3335 | r.clearcaches() |
|
3284 | 3336 | r.checkhash(text, node, rev=rev) |
|
3285 | 3337 | |
|
3286 | 3338 | def dorevision(): |
|
3287 | 3339 | if not cache: |
|
3288 | 3340 | r.clearcaches() |
|
3289 | 3341 | r.revision(node) |
|
3290 | 3342 | |
|
3291 | 3343 | try: |
|
3292 | 3344 | from mercurial.revlogutils.deltas import slicechunk |
|
3293 | 3345 | except ImportError: |
|
3294 | 3346 | slicechunk = getattr(revlog, '_slicechunk', None) |
|
3295 | 3347 | |
|
3296 | 3348 | size = r.length(rev) |
|
3297 | 3349 | chain = r._deltachain(rev)[0] |
|
3298 | 3350 | if not getattr(r, '_withsparseread', False): |
|
3299 | 3351 | slicedchain = (chain,) |
|
3300 | 3352 | else: |
|
3301 | 3353 | slicedchain = tuple(slicechunk(r, chain, targetsize=size)) |
|
3302 | 3354 | data = [segmentforrevs(seg[0], seg[-1])[1] for seg in slicedchain] |
|
3303 | 3355 | rawchunks = getrawchunks(data, slicedchain) |
|
3304 | 3356 | bins = r._chunks(chain) |
|
3305 | 3357 | text = bytes(bins[0]) |
|
3306 | 3358 | bins = bins[1:] |
|
3307 | 3359 | text = mdiff.patches(text, bins) |
|
3308 | 3360 | |
|
3309 | 3361 | benches = [ |
|
3310 | 3362 | (lambda: dorevision(), b'full'), |
|
3311 | 3363 | (lambda: dodeltachain(rev), b'deltachain'), |
|
3312 | 3364 | (lambda: doread(chain), b'read'), |
|
3313 | 3365 | ] |
|
3314 | 3366 | |
|
3315 | 3367 | if getattr(r, '_withsparseread', False): |
|
3316 | 3368 | slicing = (lambda: doslice(r, chain, size), b'slice-sparse-chain') |
|
3317 | 3369 | benches.append(slicing) |
|
3318 | 3370 | |
|
3319 | 3371 | benches.extend( |
|
3320 | 3372 | [ |
|
3321 | 3373 | (lambda: dorawchunks(data, slicedchain), b'rawchunks'), |
|
3322 | 3374 | (lambda: dodecompress(rawchunks), b'decompress'), |
|
3323 | 3375 | (lambda: dopatch(text, bins), b'patch'), |
|
3324 | 3376 | (lambda: dohash(text), b'hash'), |
|
3325 | 3377 | ] |
|
3326 | 3378 | ) |
|
3327 | 3379 | |
|
3328 | 3380 | timer, fm = gettimer(ui, opts) |
|
3329 | 3381 | for fn, title in benches: |
|
3330 | 3382 | timer(fn, title=title) |
|
3331 | 3383 | fm.end() |
|
3332 | 3384 | |
|
3333 | 3385 | |
|
3334 | 3386 | @command( |
|
3335 | 3387 | b'perf::revset|perfrevset', |
|
3336 | 3388 | [ |
|
3337 | 3389 | (b'C', b'clear', False, b'clear volatile cache between each call.'), |
|
3338 | 3390 | (b'', b'contexts', False, b'obtain changectx for each revision'), |
|
3339 | 3391 | ] |
|
3340 | 3392 | + formatteropts, |
|
3341 | 3393 | b"REVSET", |
|
3342 | 3394 | ) |
|
3343 | 3395 | def perfrevset(ui, repo, expr, clear=False, contexts=False, **opts): |
|
3344 | 3396 | """benchmark the execution time of a revset |
|
3345 | 3397 | |
|
3346 | 3398 | Use the --clean option if need to evaluate the impact of build volatile |
|
3347 | 3399 | revisions set cache on the revset execution. Volatile cache hold filtered |
|
3348 | 3400 | and obsolete related cache.""" |
|
3349 | 3401 | opts = _byteskwargs(opts) |
|
3350 | 3402 | |
|
3351 | 3403 | timer, fm = gettimer(ui, opts) |
|
3352 | 3404 | |
|
3353 | 3405 | def d(): |
|
3354 | 3406 | if clear: |
|
3355 | 3407 | repo.invalidatevolatilesets() |
|
3356 | 3408 | if contexts: |
|
3357 | 3409 | for ctx in repo.set(expr): |
|
3358 | 3410 | pass |
|
3359 | 3411 | else: |
|
3360 | 3412 | for r in repo.revs(expr): |
|
3361 | 3413 | pass |
|
3362 | 3414 | |
|
3363 | 3415 | timer(d) |
|
3364 | 3416 | fm.end() |
|
3365 | 3417 | |
|
3366 | 3418 | |
|
3367 | 3419 | @command( |
|
3368 | 3420 | b'perf::volatilesets|perfvolatilesets', |
|
3369 | 3421 | [ |
|
3370 | 3422 | (b'', b'clear-obsstore', False, b'drop obsstore between each call.'), |
|
3371 | 3423 | ] |
|
3372 | 3424 | + formatteropts, |
|
3373 | 3425 | ) |
|
3374 | 3426 | def perfvolatilesets(ui, repo, *names, **opts): |
|
3375 | 3427 | """benchmark the computation of various volatile set |
|
3376 | 3428 | |
|
3377 | 3429 | Volatile set computes element related to filtering and obsolescence.""" |
|
3378 | 3430 | opts = _byteskwargs(opts) |
|
3379 | 3431 | timer, fm = gettimer(ui, opts) |
|
3380 | 3432 | repo = repo.unfiltered() |
|
3381 | 3433 | |
|
3382 | 3434 | def getobs(name): |
|
3383 | 3435 | def d(): |
|
3384 | 3436 | repo.invalidatevolatilesets() |
|
3385 | 3437 | if opts[b'clear_obsstore']: |
|
3386 | 3438 | clearfilecache(repo, b'obsstore') |
|
3387 | 3439 | obsolete.getrevs(repo, name) |
|
3388 | 3440 | |
|
3389 | 3441 | return d |
|
3390 | 3442 | |
|
3391 | 3443 | allobs = sorted(obsolete.cachefuncs) |
|
3392 | 3444 | if names: |
|
3393 | 3445 | allobs = [n for n in allobs if n in names] |
|
3394 | 3446 | |
|
3395 | 3447 | for name in allobs: |
|
3396 | 3448 | timer(getobs(name), title=name) |
|
3397 | 3449 | |
|
3398 | 3450 | def getfiltered(name): |
|
3399 | 3451 | def d(): |
|
3400 | 3452 | repo.invalidatevolatilesets() |
|
3401 | 3453 | if opts[b'clear_obsstore']: |
|
3402 | 3454 | clearfilecache(repo, b'obsstore') |
|
3403 | 3455 | repoview.filterrevs(repo, name) |
|
3404 | 3456 | |
|
3405 | 3457 | return d |
|
3406 | 3458 | |
|
3407 | 3459 | allfilter = sorted(repoview.filtertable) |
|
3408 | 3460 | if names: |
|
3409 | 3461 | allfilter = [n for n in allfilter if n in names] |
|
3410 | 3462 | |
|
3411 | 3463 | for name in allfilter: |
|
3412 | 3464 | timer(getfiltered(name), title=name) |
|
3413 | 3465 | fm.end() |
|
3414 | 3466 | |
|
3415 | 3467 | |
|
3416 | 3468 | @command( |
|
3417 | 3469 | b'perf::branchmap|perfbranchmap', |
|
3418 | 3470 | [ |
|
3419 | 3471 | (b'f', b'full', False, b'Includes build time of subset'), |
|
3420 | 3472 | ( |
|
3421 | 3473 | b'', |
|
3422 | 3474 | b'clear-revbranch', |
|
3423 | 3475 | False, |
|
3424 | 3476 | b'purge the revbranch cache between computation', |
|
3425 | 3477 | ), |
|
3426 | 3478 | ] |
|
3427 | 3479 | + formatteropts, |
|
3428 | 3480 | ) |
|
3429 | 3481 | def perfbranchmap(ui, repo, *filternames, **opts): |
|
3430 | 3482 | """benchmark the update of a branchmap |
|
3431 | 3483 | |
|
3432 | 3484 | This benchmarks the full repo.branchmap() call with read and write disabled |
|
3433 | 3485 | """ |
|
3434 | 3486 | opts = _byteskwargs(opts) |
|
3435 | 3487 | full = opts.get(b"full", False) |
|
3436 | 3488 | clear_revbranch = opts.get(b"clear_revbranch", False) |
|
3437 | 3489 | timer, fm = gettimer(ui, opts) |
|
3438 | 3490 | |
|
3439 | 3491 | def getbranchmap(filtername): |
|
3440 | 3492 | """generate a benchmark function for the filtername""" |
|
3441 | 3493 | if filtername is None: |
|
3442 | 3494 | view = repo |
|
3443 | 3495 | else: |
|
3444 | 3496 | view = repo.filtered(filtername) |
|
3445 | 3497 | if util.safehasattr(view._branchcaches, '_per_filter'): |
|
3446 | 3498 | filtered = view._branchcaches._per_filter |
|
3447 | 3499 | else: |
|
3448 | 3500 | # older versions |
|
3449 | 3501 | filtered = view._branchcaches |
|
3450 | 3502 | |
|
3451 | 3503 | def d(): |
|
3452 | 3504 | if clear_revbranch: |
|
3453 | 3505 | repo.revbranchcache()._clear() |
|
3454 | 3506 | if full: |
|
3455 | 3507 | view._branchcaches.clear() |
|
3456 | 3508 | else: |
|
3457 | 3509 | filtered.pop(filtername, None) |
|
3458 | 3510 | view.branchmap() |
|
3459 | 3511 | |
|
3460 | 3512 | return d |
|
3461 | 3513 | |
|
3462 | 3514 | # add filter in smaller subset to bigger subset |
|
3463 | 3515 | possiblefilters = set(repoview.filtertable) |
|
3464 | 3516 | if filternames: |
|
3465 | 3517 | possiblefilters &= set(filternames) |
|
3466 | 3518 | subsettable = getbranchmapsubsettable() |
|
3467 | 3519 | allfilters = [] |
|
3468 | 3520 | while possiblefilters: |
|
3469 | 3521 | for name in possiblefilters: |
|
3470 | 3522 | subset = subsettable.get(name) |
|
3471 | 3523 | if subset not in possiblefilters: |
|
3472 | 3524 | break |
|
3473 | 3525 | else: |
|
3474 | 3526 | assert False, b'subset cycle %s!' % possiblefilters |
|
3475 | 3527 | allfilters.append(name) |
|
3476 | 3528 | possiblefilters.remove(name) |
|
3477 | 3529 | |
|
3478 | 3530 | # warm the cache |
|
3479 | 3531 | if not full: |
|
3480 | 3532 | for name in allfilters: |
|
3481 | 3533 | repo.filtered(name).branchmap() |
|
3482 | 3534 | if not filternames or b'unfiltered' in filternames: |
|
3483 | 3535 | # add unfiltered |
|
3484 | 3536 | allfilters.append(None) |
|
3485 | 3537 | |
|
3486 | 3538 | if util.safehasattr(branchmap.branchcache, 'fromfile'): |
|
3487 | 3539 | branchcacheread = safeattrsetter(branchmap.branchcache, b'fromfile') |
|
3488 | 3540 | branchcacheread.set(classmethod(lambda *args: None)) |
|
3489 | 3541 | else: |
|
3490 | 3542 | # older versions |
|
3491 | 3543 | branchcacheread = safeattrsetter(branchmap, b'read') |
|
3492 | 3544 | branchcacheread.set(lambda *args: None) |
|
3493 | 3545 | branchcachewrite = safeattrsetter(branchmap.branchcache, b'write') |
|
3494 | 3546 | branchcachewrite.set(lambda *args: None) |
|
3495 | 3547 | try: |
|
3496 | 3548 | for name in allfilters: |
|
3497 | 3549 | printname = name |
|
3498 | 3550 | if name is None: |
|
3499 | 3551 | printname = b'unfiltered' |
|
3500 | 3552 | timer(getbranchmap(name), title=printname) |
|
3501 | 3553 | finally: |
|
3502 | 3554 | branchcacheread.restore() |
|
3503 | 3555 | branchcachewrite.restore() |
|
3504 | 3556 | fm.end() |
|
3505 | 3557 | |
|
3506 | 3558 | |
|
3507 | 3559 | @command( |
|
3508 | 3560 | b'perf::branchmapupdate|perfbranchmapupdate', |
|
3509 | 3561 | [ |
|
3510 | 3562 | (b'', b'base', [], b'subset of revision to start from'), |
|
3511 | 3563 | (b'', b'target', [], b'subset of revision to end with'), |
|
3512 | 3564 | (b'', b'clear-caches', False, b'clear cache between each runs'), |
|
3513 | 3565 | ] |
|
3514 | 3566 | + formatteropts, |
|
3515 | 3567 | ) |
|
3516 | 3568 | def perfbranchmapupdate(ui, repo, base=(), target=(), **opts): |
|
3517 | 3569 | """benchmark branchmap update from for <base> revs to <target> revs |
|
3518 | 3570 | |
|
3519 | 3571 | If `--clear-caches` is passed, the following items will be reset before |
|
3520 | 3572 | each update: |
|
3521 | 3573 | * the changelog instance and associated indexes |
|
3522 | 3574 | * the rev-branch-cache instance |
|
3523 | 3575 | |
|
3524 | 3576 | Examples: |
|
3525 | 3577 | |
|
3526 | 3578 | # update for the one last revision |
|
3527 | 3579 | $ hg perfbranchmapupdate --base 'not tip' --target 'tip' |
|
3528 | 3580 | |
|
3529 | 3581 | $ update for change coming with a new branch |
|
3530 | 3582 | $ hg perfbranchmapupdate --base 'stable' --target 'default' |
|
3531 | 3583 | """ |
|
3532 | 3584 | from mercurial import branchmap |
|
3533 | 3585 | from mercurial import repoview |
|
3534 | 3586 | |
|
3535 | 3587 | opts = _byteskwargs(opts) |
|
3536 | 3588 | timer, fm = gettimer(ui, opts) |
|
3537 | 3589 | clearcaches = opts[b'clear_caches'] |
|
3538 | 3590 | unfi = repo.unfiltered() |
|
3539 | 3591 | x = [None] # used to pass data between closure |
|
3540 | 3592 | |
|
3541 | 3593 | # we use a `list` here to avoid possible side effect from smartset |
|
3542 | 3594 | baserevs = list(scmutil.revrange(repo, base)) |
|
3543 | 3595 | targetrevs = list(scmutil.revrange(repo, target)) |
|
3544 | 3596 | if not baserevs: |
|
3545 | 3597 | raise error.Abort(b'no revisions selected for --base') |
|
3546 | 3598 | if not targetrevs: |
|
3547 | 3599 | raise error.Abort(b'no revisions selected for --target') |
|
3548 | 3600 | |
|
3549 | 3601 | # make sure the target branchmap also contains the one in the base |
|
3550 | 3602 | targetrevs = list(set(baserevs) | set(targetrevs)) |
|
3551 | 3603 | targetrevs.sort() |
|
3552 | 3604 | |
|
3553 | 3605 | cl = repo.changelog |
|
3554 | 3606 | allbaserevs = list(cl.ancestors(baserevs, inclusive=True)) |
|
3555 | 3607 | allbaserevs.sort() |
|
3556 | 3608 | alltargetrevs = frozenset(cl.ancestors(targetrevs, inclusive=True)) |
|
3557 | 3609 | |
|
3558 | 3610 | newrevs = list(alltargetrevs.difference(allbaserevs)) |
|
3559 | 3611 | newrevs.sort() |
|
3560 | 3612 | |
|
3561 | 3613 | allrevs = frozenset(unfi.changelog.revs()) |
|
3562 | 3614 | basefilterrevs = frozenset(allrevs.difference(allbaserevs)) |
|
3563 | 3615 | targetfilterrevs = frozenset(allrevs.difference(alltargetrevs)) |
|
3564 | 3616 | |
|
3565 | 3617 | def basefilter(repo, visibilityexceptions=None): |
|
3566 | 3618 | return basefilterrevs |
|
3567 | 3619 | |
|
3568 | 3620 | def targetfilter(repo, visibilityexceptions=None): |
|
3569 | 3621 | return targetfilterrevs |
|
3570 | 3622 | |
|
3571 | 3623 | msg = b'benchmark of branchmap with %d revisions with %d new ones\n' |
|
3572 | 3624 | ui.status(msg % (len(allbaserevs), len(newrevs))) |
|
3573 | 3625 | if targetfilterrevs: |
|
3574 | 3626 | msg = b'(%d revisions still filtered)\n' |
|
3575 | 3627 | ui.status(msg % len(targetfilterrevs)) |
|
3576 | 3628 | |
|
3577 | 3629 | try: |
|
3578 | 3630 | repoview.filtertable[b'__perf_branchmap_update_base'] = basefilter |
|
3579 | 3631 | repoview.filtertable[b'__perf_branchmap_update_target'] = targetfilter |
|
3580 | 3632 | |
|
3581 | 3633 | baserepo = repo.filtered(b'__perf_branchmap_update_base') |
|
3582 | 3634 | targetrepo = repo.filtered(b'__perf_branchmap_update_target') |
|
3583 | 3635 | |
|
3584 | 3636 | # try to find an existing branchmap to reuse |
|
3585 | 3637 | subsettable = getbranchmapsubsettable() |
|
3586 | 3638 | candidatefilter = subsettable.get(None) |
|
3587 | 3639 | while candidatefilter is not None: |
|
3588 | 3640 | candidatebm = repo.filtered(candidatefilter).branchmap() |
|
3589 | 3641 | if candidatebm.validfor(baserepo): |
|
3590 | 3642 | filtered = repoview.filterrevs(repo, candidatefilter) |
|
3591 | 3643 | missing = [r for r in allbaserevs if r in filtered] |
|
3592 | 3644 | base = candidatebm.copy() |
|
3593 | 3645 | base.update(baserepo, missing) |
|
3594 | 3646 | break |
|
3595 | 3647 | candidatefilter = subsettable.get(candidatefilter) |
|
3596 | 3648 | else: |
|
3597 | 3649 | # no suitable subset where found |
|
3598 | 3650 | base = branchmap.branchcache() |
|
3599 | 3651 | base.update(baserepo, allbaserevs) |
|
3600 | 3652 | |
|
3601 | 3653 | def setup(): |
|
3602 | 3654 | x[0] = base.copy() |
|
3603 | 3655 | if clearcaches: |
|
3604 | 3656 | unfi._revbranchcache = None |
|
3605 | 3657 | clearchangelog(repo) |
|
3606 | 3658 | |
|
3607 | 3659 | def bench(): |
|
3608 | 3660 | x[0].update(targetrepo, newrevs) |
|
3609 | 3661 | |
|
3610 | 3662 | timer(bench, setup=setup) |
|
3611 | 3663 | fm.end() |
|
3612 | 3664 | finally: |
|
3613 | 3665 | repoview.filtertable.pop(b'__perf_branchmap_update_base', None) |
|
3614 | 3666 | repoview.filtertable.pop(b'__perf_branchmap_update_target', None) |
|
3615 | 3667 | |
|
3616 | 3668 | |
|
3617 | 3669 | @command( |
|
3618 | 3670 | b'perf::branchmapload|perfbranchmapload', |
|
3619 | 3671 | [ |
|
3620 | 3672 | (b'f', b'filter', b'', b'Specify repoview filter'), |
|
3621 | 3673 | (b'', b'list', False, b'List brachmap filter caches'), |
|
3622 | 3674 | (b'', b'clear-revlogs', False, b'refresh changelog and manifest'), |
|
3623 | 3675 | ] |
|
3624 | 3676 | + formatteropts, |
|
3625 | 3677 | ) |
|
3626 | 3678 | def perfbranchmapload(ui, repo, filter=b'', list=False, **opts): |
|
3627 | 3679 | """benchmark reading the branchmap""" |
|
3628 | 3680 | opts = _byteskwargs(opts) |
|
3629 | 3681 | clearrevlogs = opts[b'clear_revlogs'] |
|
3630 | 3682 | |
|
3631 | 3683 | if list: |
|
3632 | 3684 | for name, kind, st in repo.cachevfs.readdir(stat=True): |
|
3633 | 3685 | if name.startswith(b'branch2'): |
|
3634 | 3686 | filtername = name.partition(b'-')[2] or b'unfiltered' |
|
3635 | 3687 | ui.status( |
|
3636 | 3688 | b'%s - %s\n' % (filtername, util.bytecount(st.st_size)) |
|
3637 | 3689 | ) |
|
3638 | 3690 | return |
|
3639 | 3691 | if not filter: |
|
3640 | 3692 | filter = None |
|
3641 | 3693 | subsettable = getbranchmapsubsettable() |
|
3642 | 3694 | if filter is None: |
|
3643 | 3695 | repo = repo.unfiltered() |
|
3644 | 3696 | else: |
|
3645 | 3697 | repo = repoview.repoview(repo, filter) |
|
3646 | 3698 | |
|
3647 | 3699 | repo.branchmap() # make sure we have a relevant, up to date branchmap |
|
3648 | 3700 | |
|
3649 | 3701 | try: |
|
3650 | 3702 | fromfile = branchmap.branchcache.fromfile |
|
3651 | 3703 | except AttributeError: |
|
3652 | 3704 | # older versions |
|
3653 | 3705 | fromfile = branchmap.read |
|
3654 | 3706 | |
|
3655 | 3707 | currentfilter = filter |
|
3656 | 3708 | # try once without timer, the filter may not be cached |
|
3657 | 3709 | while fromfile(repo) is None: |
|
3658 | 3710 | currentfilter = subsettable.get(currentfilter) |
|
3659 | 3711 | if currentfilter is None: |
|
3660 | 3712 | raise error.Abort( |
|
3661 | 3713 | b'No branchmap cached for %s repo' % (filter or b'unfiltered') |
|
3662 | 3714 | ) |
|
3663 | 3715 | repo = repo.filtered(currentfilter) |
|
3664 | 3716 | timer, fm = gettimer(ui, opts) |
|
3665 | 3717 | |
|
3666 | 3718 | def setup(): |
|
3667 | 3719 | if clearrevlogs: |
|
3668 | 3720 | clearchangelog(repo) |
|
3669 | 3721 | |
|
3670 | 3722 | def bench(): |
|
3671 | 3723 | fromfile(repo) |
|
3672 | 3724 | |
|
3673 | 3725 | timer(bench, setup=setup) |
|
3674 | 3726 | fm.end() |
|
3675 | 3727 | |
|
3676 | 3728 | |
|
3677 | 3729 | @command(b'perf::loadmarkers|perfloadmarkers') |
|
3678 | 3730 | def perfloadmarkers(ui, repo): |
|
3679 | 3731 | """benchmark the time to parse the on-disk markers for a repo |
|
3680 | 3732 | |
|
3681 | 3733 | Result is the number of markers in the repo.""" |
|
3682 | 3734 | timer, fm = gettimer(ui) |
|
3683 | 3735 | svfs = getsvfs(repo) |
|
3684 | 3736 | timer(lambda: len(obsolete.obsstore(repo, svfs))) |
|
3685 | 3737 | fm.end() |
|
3686 | 3738 | |
|
3687 | 3739 | |
|
3688 | 3740 | @command( |
|
3689 | 3741 | b'perf::lrucachedict|perflrucachedict', |
|
3690 | 3742 | formatteropts |
|
3691 | 3743 | + [ |
|
3692 | 3744 | (b'', b'costlimit', 0, b'maximum total cost of items in cache'), |
|
3693 | 3745 | (b'', b'mincost', 0, b'smallest cost of items in cache'), |
|
3694 | 3746 | (b'', b'maxcost', 100, b'maximum cost of items in cache'), |
|
3695 | 3747 | (b'', b'size', 4, b'size of cache'), |
|
3696 | 3748 | (b'', b'gets', 10000, b'number of key lookups'), |
|
3697 | 3749 | (b'', b'sets', 10000, b'number of key sets'), |
|
3698 | 3750 | (b'', b'mixed', 10000, b'number of mixed mode operations'), |
|
3699 | 3751 | ( |
|
3700 | 3752 | b'', |
|
3701 | 3753 | b'mixedgetfreq', |
|
3702 | 3754 | 50, |
|
3703 | 3755 | b'frequency of get vs set ops in mixed mode', |
|
3704 | 3756 | ), |
|
3705 | 3757 | ], |
|
3706 | 3758 | norepo=True, |
|
3707 | 3759 | ) |
|
3708 | 3760 | def perflrucache( |
|
3709 | 3761 | ui, |
|
3710 | 3762 | mincost=0, |
|
3711 | 3763 | maxcost=100, |
|
3712 | 3764 | costlimit=0, |
|
3713 | 3765 | size=4, |
|
3714 | 3766 | gets=10000, |
|
3715 | 3767 | sets=10000, |
|
3716 | 3768 | mixed=10000, |
|
3717 | 3769 | mixedgetfreq=50, |
|
3718 | 3770 | **opts |
|
3719 | 3771 | ): |
|
3720 | 3772 | opts = _byteskwargs(opts) |
|
3721 | 3773 | |
|
3722 | 3774 | def doinit(): |
|
3723 | 3775 | for i in _xrange(10000): |
|
3724 | 3776 | util.lrucachedict(size) |
|
3725 | 3777 | |
|
3726 | 3778 | costrange = list(range(mincost, maxcost + 1)) |
|
3727 | 3779 | |
|
3728 | 3780 | values = [] |
|
3729 | 3781 | for i in _xrange(size): |
|
3730 | 3782 | values.append(random.randint(0, _maxint)) |
|
3731 | 3783 | |
|
3732 | 3784 | # Get mode fills the cache and tests raw lookup performance with no |
|
3733 | 3785 | # eviction. |
|
3734 | 3786 | getseq = [] |
|
3735 | 3787 | for i in _xrange(gets): |
|
3736 | 3788 | getseq.append(random.choice(values)) |
|
3737 | 3789 | |
|
3738 | 3790 | def dogets(): |
|
3739 | 3791 | d = util.lrucachedict(size) |
|
3740 | 3792 | for v in values: |
|
3741 | 3793 | d[v] = v |
|
3742 | 3794 | for key in getseq: |
|
3743 | 3795 | value = d[key] |
|
3744 | 3796 | value # silence pyflakes warning |
|
3745 | 3797 | |
|
3746 | 3798 | def dogetscost(): |
|
3747 | 3799 | d = util.lrucachedict(size, maxcost=costlimit) |
|
3748 | 3800 | for i, v in enumerate(values): |
|
3749 | 3801 | d.insert(v, v, cost=costs[i]) |
|
3750 | 3802 | for key in getseq: |
|
3751 | 3803 | try: |
|
3752 | 3804 | value = d[key] |
|
3753 | 3805 | value # silence pyflakes warning |
|
3754 | 3806 | except KeyError: |
|
3755 | 3807 | pass |
|
3756 | 3808 | |
|
3757 | 3809 | # Set mode tests insertion speed with cache eviction. |
|
3758 | 3810 | setseq = [] |
|
3759 | 3811 | costs = [] |
|
3760 | 3812 | for i in _xrange(sets): |
|
3761 | 3813 | setseq.append(random.randint(0, _maxint)) |
|
3762 | 3814 | costs.append(random.choice(costrange)) |
|
3763 | 3815 | |
|
3764 | 3816 | def doinserts(): |
|
3765 | 3817 | d = util.lrucachedict(size) |
|
3766 | 3818 | for v in setseq: |
|
3767 | 3819 | d.insert(v, v) |
|
3768 | 3820 | |
|
3769 | 3821 | def doinsertscost(): |
|
3770 | 3822 | d = util.lrucachedict(size, maxcost=costlimit) |
|
3771 | 3823 | for i, v in enumerate(setseq): |
|
3772 | 3824 | d.insert(v, v, cost=costs[i]) |
|
3773 | 3825 | |
|
3774 | 3826 | def dosets(): |
|
3775 | 3827 | d = util.lrucachedict(size) |
|
3776 | 3828 | for v in setseq: |
|
3777 | 3829 | d[v] = v |
|
3778 | 3830 | |
|
3779 | 3831 | # Mixed mode randomly performs gets and sets with eviction. |
|
3780 | 3832 | mixedops = [] |
|
3781 | 3833 | for i in _xrange(mixed): |
|
3782 | 3834 | r = random.randint(0, 100) |
|
3783 | 3835 | if r < mixedgetfreq: |
|
3784 | 3836 | op = 0 |
|
3785 | 3837 | else: |
|
3786 | 3838 | op = 1 |
|
3787 | 3839 | |
|
3788 | 3840 | mixedops.append( |
|
3789 | 3841 | (op, random.randint(0, size * 2), random.choice(costrange)) |
|
3790 | 3842 | ) |
|
3791 | 3843 | |
|
3792 | 3844 | def domixed(): |
|
3793 | 3845 | d = util.lrucachedict(size) |
|
3794 | 3846 | |
|
3795 | 3847 | for op, v, cost in mixedops: |
|
3796 | 3848 | if op == 0: |
|
3797 | 3849 | try: |
|
3798 | 3850 | d[v] |
|
3799 | 3851 | except KeyError: |
|
3800 | 3852 | pass |
|
3801 | 3853 | else: |
|
3802 | 3854 | d[v] = v |
|
3803 | 3855 | |
|
3804 | 3856 | def domixedcost(): |
|
3805 | 3857 | d = util.lrucachedict(size, maxcost=costlimit) |
|
3806 | 3858 | |
|
3807 | 3859 | for op, v, cost in mixedops: |
|
3808 | 3860 | if op == 0: |
|
3809 | 3861 | try: |
|
3810 | 3862 | d[v] |
|
3811 | 3863 | except KeyError: |
|
3812 | 3864 | pass |
|
3813 | 3865 | else: |
|
3814 | 3866 | d.insert(v, v, cost=cost) |
|
3815 | 3867 | |
|
3816 | 3868 | benches = [ |
|
3817 | 3869 | (doinit, b'init'), |
|
3818 | 3870 | ] |
|
3819 | 3871 | |
|
3820 | 3872 | if costlimit: |
|
3821 | 3873 | benches.extend( |
|
3822 | 3874 | [ |
|
3823 | 3875 | (dogetscost, b'gets w/ cost limit'), |
|
3824 | 3876 | (doinsertscost, b'inserts w/ cost limit'), |
|
3825 | 3877 | (domixedcost, b'mixed w/ cost limit'), |
|
3826 | 3878 | ] |
|
3827 | 3879 | ) |
|
3828 | 3880 | else: |
|
3829 | 3881 | benches.extend( |
|
3830 | 3882 | [ |
|
3831 | 3883 | (dogets, b'gets'), |
|
3832 | 3884 | (doinserts, b'inserts'), |
|
3833 | 3885 | (dosets, b'sets'), |
|
3834 | 3886 | (domixed, b'mixed'), |
|
3835 | 3887 | ] |
|
3836 | 3888 | ) |
|
3837 | 3889 | |
|
3838 | 3890 | for fn, title in benches: |
|
3839 | 3891 | timer, fm = gettimer(ui, opts) |
|
3840 | 3892 | timer(fn, title=title) |
|
3841 | 3893 | fm.end() |
|
3842 | 3894 | |
|
3843 | 3895 | |
|
3844 | 3896 | @command( |
|
3845 | 3897 | b'perf::write|perfwrite', |
|
3846 | 3898 | formatteropts |
|
3847 | 3899 | + [ |
|
3848 | 3900 | (b'', b'write-method', b'write', b'ui write method'), |
|
3849 | 3901 | (b'', b'nlines', 100, b'number of lines'), |
|
3850 | 3902 | (b'', b'nitems', 100, b'number of items (per line)'), |
|
3851 | 3903 | (b'', b'item', b'x', b'item that is written'), |
|
3852 | 3904 | (b'', b'batch-line', None, b'pass whole line to write method at once'), |
|
3853 | 3905 | (b'', b'flush-line', None, b'flush after each line'), |
|
3854 | 3906 | ], |
|
3855 | 3907 | ) |
|
3856 | 3908 | def perfwrite(ui, repo, **opts): |
|
3857 | 3909 | """microbenchmark ui.write (and others)""" |
|
3858 | 3910 | opts = _byteskwargs(opts) |
|
3859 | 3911 | |
|
3860 | 3912 | write = getattr(ui, _sysstr(opts[b'write_method'])) |
|
3861 | 3913 | nlines = int(opts[b'nlines']) |
|
3862 | 3914 | nitems = int(opts[b'nitems']) |
|
3863 | 3915 | item = opts[b'item'] |
|
3864 | 3916 | batch_line = opts.get(b'batch_line') |
|
3865 | 3917 | flush_line = opts.get(b'flush_line') |
|
3866 | 3918 | |
|
3867 | 3919 | if batch_line: |
|
3868 | 3920 | line = item * nitems + b'\n' |
|
3869 | 3921 | |
|
3870 | 3922 | def benchmark(): |
|
3871 | 3923 | for i in pycompat.xrange(nlines): |
|
3872 | 3924 | if batch_line: |
|
3873 | 3925 | write(line) |
|
3874 | 3926 | else: |
|
3875 | 3927 | for i in pycompat.xrange(nitems): |
|
3876 | 3928 | write(item) |
|
3877 | 3929 | write(b'\n') |
|
3878 | 3930 | if flush_line: |
|
3879 | 3931 | ui.flush() |
|
3880 | 3932 | ui.flush() |
|
3881 | 3933 | |
|
3882 | 3934 | timer, fm = gettimer(ui, opts) |
|
3883 | 3935 | timer(benchmark) |
|
3884 | 3936 | fm.end() |
|
3885 | 3937 | |
|
3886 | 3938 | |
|
3887 | 3939 | def uisetup(ui): |
|
3888 | 3940 | if util.safehasattr(cmdutil, b'openrevlog') and not util.safehasattr( |
|
3889 | 3941 | commands, b'debugrevlogopts' |
|
3890 | 3942 | ): |
|
3891 | 3943 | # for "historical portability": |
|
3892 | 3944 | # In this case, Mercurial should be 1.9 (or a79fea6b3e77) - |
|
3893 | 3945 | # 3.7 (or 5606f7d0d063). Therefore, '--dir' option for |
|
3894 | 3946 | # openrevlog() should cause failure, because it has been |
|
3895 | 3947 | # available since 3.5 (or 49c583ca48c4). |
|
3896 | 3948 | def openrevlog(orig, repo, cmd, file_, opts): |
|
3897 | 3949 | if opts.get(b'dir') and not util.safehasattr(repo, b'dirlog'): |
|
3898 | 3950 | raise error.Abort( |
|
3899 | 3951 | b"This version doesn't support --dir option", |
|
3900 | 3952 | hint=b"use 3.5 or later", |
|
3901 | 3953 | ) |
|
3902 | 3954 | return orig(repo, cmd, file_, opts) |
|
3903 | 3955 | |
|
3904 | 3956 | extensions.wrapfunction(cmdutil, b'openrevlog', openrevlog) |
|
3905 | 3957 | |
|
3906 | 3958 | |
|
3907 | 3959 | @command( |
|
3908 | 3960 | b'perf::progress|perfprogress', |
|
3909 | 3961 | formatteropts |
|
3910 | 3962 | + [ |
|
3911 | 3963 | (b'', b'topic', b'topic', b'topic for progress messages'), |
|
3912 | 3964 | (b'c', b'total', 1000000, b'total value we are progressing to'), |
|
3913 | 3965 | ], |
|
3914 | 3966 | norepo=True, |
|
3915 | 3967 | ) |
|
3916 | 3968 | def perfprogress(ui, topic=None, total=None, **opts): |
|
3917 | 3969 | """printing of progress bars""" |
|
3918 | 3970 | opts = _byteskwargs(opts) |
|
3919 | 3971 | |
|
3920 | 3972 | timer, fm = gettimer(ui, opts) |
|
3921 | 3973 | |
|
3922 | 3974 | def doprogress(): |
|
3923 | 3975 | with ui.makeprogress(topic, total=total) as progress: |
|
3924 | 3976 | for i in _xrange(total): |
|
3925 | 3977 | progress.increment() |
|
3926 | 3978 | |
|
3927 | 3979 | timer(doprogress) |
|
3928 | 3980 | fm.end() |
@@ -1,48 +1,57 b'' | |||
|
1 | 1 | #!/usr/bin/env python3 |
|
2 | 2 | # Undump a dump from dumprevlog |
|
3 | 3 | # $ hg init |
|
4 | 4 | # $ undumprevlog < repo.dump |
|
5 | 5 | |
|
6 | 6 | from __future__ import absolute_import, print_function |
|
7 | 7 | |
|
8 | 8 | import sys |
|
9 | 9 | from mercurial.node import bin |
|
10 | 10 | from mercurial import ( |
|
11 | 11 | encoding, |
|
12 | 12 | revlog, |
|
13 | 13 | transaction, |
|
14 | 14 | vfs as vfsmod, |
|
15 | 15 | ) |
|
16 | 16 | from mercurial.utils import procutil |
|
17 | 17 | |
|
18 | from mercurial.revlogutils import ( | |
|
19 | constants as revlog_constants, | |
|
20 | ) | |
|
21 | ||
|
18 | 22 | for fp in (sys.stdin, sys.stdout, sys.stderr): |
|
19 | 23 | procutil.setbinary(fp) |
|
20 | 24 | |
|
21 | 25 | opener = vfsmod.vfs(b'.', False) |
|
22 | 26 | tr = transaction.transaction( |
|
23 | 27 | sys.stderr.write, opener, {b'store': opener}, b"undump.journal" |
|
24 | 28 | ) |
|
25 | 29 | while True: |
|
26 | 30 | l = sys.stdin.readline() |
|
27 | 31 | if not l: |
|
28 | 32 | break |
|
29 | 33 | if l.startswith("file:"): |
|
30 | 34 | f = encoding.strtolocal(l[6:-1]) |
|
31 | r = revlog.revlog(opener, f) | |
|
35 | assert f.endswith(b'.i') | |
|
36 | r = revlog.revlog( | |
|
37 | opener, | |
|
38 | target=(revlog_constants.KIND_OTHER, b'undump-revlog'), | |
|
39 | radix=f[:-2], | |
|
40 | ) | |
|
32 | 41 | procutil.stdout.write(b'%s\n' % f) |
|
33 | 42 | elif l.startswith("node:"): |
|
34 | 43 | n = bin(l[6:-1]) |
|
35 | 44 | elif l.startswith("linkrev:"): |
|
36 | 45 | lr = int(l[9:-1]) |
|
37 | 46 | elif l.startswith("parents:"): |
|
38 | 47 | p = l[9:-1].split() |
|
39 | 48 | p1 = bin(p[0]) |
|
40 | 49 | p2 = bin(p[1]) |
|
41 | 50 | elif l.startswith("length:"): |
|
42 | 51 | length = int(l[8:-1]) |
|
43 | 52 | sys.stdin.readline() # start marker |
|
44 | 53 | d = encoding.strtolocal(sys.stdin.read(length)) |
|
45 | 54 | sys.stdin.readline() # end marker |
|
46 | 55 | r.addrevision(d, tr, lr, p1, p2) |
|
47 | 56 | |
|
48 | 57 | tr.close() |
@@ -1,1166 +1,1165 b'' | |||
|
1 | 1 | # absorb.py |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2016 Facebook, Inc. |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | 8 | """apply working directory changes to changesets (EXPERIMENTAL) |
|
9 | 9 | |
|
10 | 10 | The absorb extension provides a command to use annotate information to |
|
11 | 11 | amend modified chunks into the corresponding non-public changesets. |
|
12 | 12 | |
|
13 | 13 | :: |
|
14 | 14 | |
|
15 | 15 | [absorb] |
|
16 | 16 | # only check 50 recent non-public changesets at most |
|
17 | 17 | max-stack-size = 50 |
|
18 | 18 | # whether to add noise to new commits to avoid obsolescence cycle |
|
19 | 19 | add-noise = 1 |
|
20 | 20 | # make `amend --correlated` a shortcut to the main command |
|
21 | 21 | amend-flag = correlated |
|
22 | 22 | |
|
23 | 23 | [color] |
|
24 | 24 | absorb.description = yellow |
|
25 | 25 | absorb.node = blue bold |
|
26 | 26 | absorb.path = bold |
|
27 | 27 | """ |
|
28 | 28 | |
|
29 | 29 | # TODO: |
|
30 | 30 | # * Rename config items to [commands] namespace |
|
31 | 31 | # * Converge getdraftstack() with other code in core |
|
32 | 32 | # * move many attributes on fixupstate to be private |
|
33 | 33 | |
|
34 | 34 | from __future__ import absolute_import |
|
35 | 35 | |
|
36 | 36 | import collections |
|
37 | 37 | |
|
38 | 38 | from mercurial.i18n import _ |
|
39 | 39 | from mercurial.node import ( |
|
40 | 40 | hex, |
|
41 | nullid, | |
|
42 | 41 | short, |
|
43 | 42 | ) |
|
44 | 43 | from mercurial import ( |
|
45 | 44 | cmdutil, |
|
46 | 45 | commands, |
|
47 | 46 | context, |
|
48 | 47 | crecord, |
|
49 | 48 | error, |
|
50 | 49 | linelog, |
|
51 | 50 | mdiff, |
|
52 | 51 | obsolete, |
|
53 | 52 | patch, |
|
54 | 53 | phases, |
|
55 | 54 | pycompat, |
|
56 | 55 | registrar, |
|
57 | 56 | rewriteutil, |
|
58 | 57 | scmutil, |
|
59 | 58 | util, |
|
60 | 59 | ) |
|
61 | 60 | from mercurial.utils import stringutil |
|
62 | 61 | |
|
63 | 62 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
64 | 63 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
65 | 64 | # be specifying the version(s) of Mercurial they are tested with, or |
|
66 | 65 | # leave the attribute unspecified. |
|
67 | 66 | testedwith = b'ships-with-hg-core' |
|
68 | 67 | |
|
69 | 68 | cmdtable = {} |
|
70 | 69 | command = registrar.command(cmdtable) |
|
71 | 70 | |
|
72 | 71 | configtable = {} |
|
73 | 72 | configitem = registrar.configitem(configtable) |
|
74 | 73 | |
|
75 | 74 | configitem(b'absorb', b'add-noise', default=True) |
|
76 | 75 | configitem(b'absorb', b'amend-flag', default=None) |
|
77 | 76 | configitem(b'absorb', b'max-stack-size', default=50) |
|
78 | 77 | |
|
79 | 78 | colortable = { |
|
80 | 79 | b'absorb.description': b'yellow', |
|
81 | 80 | b'absorb.node': b'blue bold', |
|
82 | 81 | b'absorb.path': b'bold', |
|
83 | 82 | } |
|
84 | 83 | |
|
85 | 84 | defaultdict = collections.defaultdict |
|
86 | 85 | |
|
87 | 86 | |
|
88 | 87 | class nullui(object): |
|
89 | 88 | """blank ui object doing nothing""" |
|
90 | 89 | |
|
91 | 90 | debugflag = False |
|
92 | 91 | verbose = False |
|
93 | 92 | quiet = True |
|
94 | 93 | |
|
95 | 94 | def __getitem__(name): |
|
96 | 95 | def nullfunc(*args, **kwds): |
|
97 | 96 | return |
|
98 | 97 | |
|
99 | 98 | return nullfunc |
|
100 | 99 | |
|
101 | 100 | |
|
102 | 101 | class emptyfilecontext(object): |
|
103 | 102 | """minimal filecontext representing an empty file""" |
|
104 | 103 | |
|
105 | 104 | def __init__(self, repo): |
|
106 | 105 | self._repo = repo |
|
107 | 106 | |
|
108 | 107 | def data(self): |
|
109 | 108 | return b'' |
|
110 | 109 | |
|
111 | 110 | def node(self): |
|
112 | return nullid | |
|
111 | return self._repo.nullid | |
|
113 | 112 | |
|
114 | 113 | |
|
115 | 114 | def uniq(lst): |
|
116 | 115 | """list -> list. remove duplicated items without changing the order""" |
|
117 | 116 | seen = set() |
|
118 | 117 | result = [] |
|
119 | 118 | for x in lst: |
|
120 | 119 | if x not in seen: |
|
121 | 120 | seen.add(x) |
|
122 | 121 | result.append(x) |
|
123 | 122 | return result |
|
124 | 123 | |
|
125 | 124 | |
|
126 | 125 | def getdraftstack(headctx, limit=None): |
|
127 | 126 | """(ctx, int?) -> [ctx]. get a linear stack of non-public changesets. |
|
128 | 127 | |
|
129 | 128 | changesets are sorted in topo order, oldest first. |
|
130 | 129 | return at most limit items, if limit is a positive number. |
|
131 | 130 | |
|
132 | 131 | merges are considered as non-draft as well. i.e. every commit |
|
133 | 132 | returned has and only has 1 parent. |
|
134 | 133 | """ |
|
135 | 134 | ctx = headctx |
|
136 | 135 | result = [] |
|
137 | 136 | while ctx.phase() != phases.public: |
|
138 | 137 | if limit and len(result) >= limit: |
|
139 | 138 | break |
|
140 | 139 | parents = ctx.parents() |
|
141 | 140 | if len(parents) != 1: |
|
142 | 141 | break |
|
143 | 142 | result.append(ctx) |
|
144 | 143 | ctx = parents[0] |
|
145 | 144 | result.reverse() |
|
146 | 145 | return result |
|
147 | 146 | |
|
148 | 147 | |
|
149 | 148 | def getfilestack(stack, path, seenfctxs=None): |
|
150 | 149 | """([ctx], str, set) -> [fctx], {ctx: fctx} |
|
151 | 150 | |
|
152 | 151 | stack is a list of contexts, from old to new. usually they are what |
|
153 | 152 | "getdraftstack" returns. |
|
154 | 153 | |
|
155 | 154 | follows renames, but not copies. |
|
156 | 155 | |
|
157 | 156 | seenfctxs is a set of filecontexts that will be considered "immutable". |
|
158 | 157 | they are usually what this function returned in earlier calls, useful |
|
159 | 158 | to avoid issues that a file was "moved" to multiple places and was then |
|
160 | 159 | modified differently, like: "a" was copied to "b", "a" was also copied to |
|
161 | 160 | "c" and then "a" was deleted, then both "b" and "c" were "moved" from "a" |
|
162 | 161 | and we enforce only one of them to be able to affect "a"'s content. |
|
163 | 162 | |
|
164 | 163 | return an empty list and an empty dict, if the specified path does not |
|
165 | 164 | exist in stack[-1] (the top of the stack). |
|
166 | 165 | |
|
167 | 166 | otherwise, return a list of de-duplicated filecontexts, and the map to |
|
168 | 167 | convert ctx in the stack to fctx, for possible mutable fctxs. the first item |
|
169 | 168 | of the list would be outside the stack and should be considered immutable. |
|
170 | 169 | the remaining items are within the stack. |
|
171 | 170 | |
|
172 | 171 | for example, given the following changelog and corresponding filelog |
|
173 | 172 | revisions: |
|
174 | 173 | |
|
175 | 174 | changelog: 3----4----5----6----7 |
|
176 | 175 | filelog: x 0----1----1----2 (x: no such file yet) |
|
177 | 176 | |
|
178 | 177 | - if stack = [5, 6, 7], returns ([0, 1, 2], {5: 1, 6: 1, 7: 2}) |
|
179 | 178 | - if stack = [3, 4, 5], returns ([e, 0, 1], {4: 0, 5: 1}), where "e" is a |
|
180 | 179 | dummy empty filecontext. |
|
181 | 180 | - if stack = [2], returns ([], {}) |
|
182 | 181 | - if stack = [7], returns ([1, 2], {7: 2}) |
|
183 | 182 | - if stack = [6, 7], returns ([1, 2], {6: 1, 7: 2}), although {6: 1} can be |
|
184 | 183 | removed, since 1 is immutable. |
|
185 | 184 | """ |
|
186 | 185 | if seenfctxs is None: |
|
187 | 186 | seenfctxs = set() |
|
188 | 187 | assert stack |
|
189 | 188 | |
|
190 | 189 | if path not in stack[-1]: |
|
191 | 190 | return [], {} |
|
192 | 191 | |
|
193 | 192 | fctxs = [] |
|
194 | 193 | fctxmap = {} |
|
195 | 194 | |
|
196 | 195 | pctx = stack[0].p1() # the public (immutable) ctx we stop at |
|
197 | 196 | for ctx in reversed(stack): |
|
198 | 197 | if path not in ctx: # the file is added in the next commit |
|
199 | 198 | pctx = ctx |
|
200 | 199 | break |
|
201 | 200 | fctx = ctx[path] |
|
202 | 201 | fctxs.append(fctx) |
|
203 | 202 | if fctx in seenfctxs: # treat fctx as the immutable one |
|
204 | 203 | pctx = None # do not add another immutable fctx |
|
205 | 204 | break |
|
206 | 205 | fctxmap[ctx] = fctx # only for mutable fctxs |
|
207 | 206 | copy = fctx.copysource() |
|
208 | 207 | if copy: |
|
209 | 208 | path = copy # follow rename |
|
210 | 209 | if path in ctx: # but do not follow copy |
|
211 | 210 | pctx = ctx.p1() |
|
212 | 211 | break |
|
213 | 212 | |
|
214 | 213 | if pctx is not None: # need an extra immutable fctx |
|
215 | 214 | if path in pctx: |
|
216 | 215 | fctxs.append(pctx[path]) |
|
217 | 216 | else: |
|
218 | 217 | fctxs.append(emptyfilecontext(pctx.repo())) |
|
219 | 218 | |
|
220 | 219 | fctxs.reverse() |
|
221 | 220 | # note: we rely on a property of hg: filerev is not reused for linear |
|
222 | 221 | # history. i.e. it's impossible to have: |
|
223 | 222 | # changelog: 4----5----6 (linear, no merges) |
|
224 | 223 | # filelog: 1----2----1 |
|
225 | 224 | # ^ reuse filerev (impossible) |
|
226 | 225 | # because parents are part of the hash. if that's not true, we need to |
|
227 | 226 | # remove uniq and find a different way to identify fctxs. |
|
228 | 227 | return uniq(fctxs), fctxmap |
|
229 | 228 | |
|
230 | 229 | |
|
231 | 230 | class overlaystore(patch.filestore): |
|
232 | 231 | """read-only, hybrid store based on a dict and ctx. |
|
233 | 232 | memworkingcopy: {path: content}, overrides file contents. |
|
234 | 233 | """ |
|
235 | 234 | |
|
236 | 235 | def __init__(self, basectx, memworkingcopy): |
|
237 | 236 | self.basectx = basectx |
|
238 | 237 | self.memworkingcopy = memworkingcopy |
|
239 | 238 | |
|
240 | 239 | def getfile(self, path): |
|
241 | 240 | """comply with mercurial.patch.filestore.getfile""" |
|
242 | 241 | if path not in self.basectx: |
|
243 | 242 | return None, None, None |
|
244 | 243 | fctx = self.basectx[path] |
|
245 | 244 | if path in self.memworkingcopy: |
|
246 | 245 | content = self.memworkingcopy[path] |
|
247 | 246 | else: |
|
248 | 247 | content = fctx.data() |
|
249 | 248 | mode = (fctx.islink(), fctx.isexec()) |
|
250 | 249 | copy = fctx.copysource() |
|
251 | 250 | return content, mode, copy |
|
252 | 251 | |
|
253 | 252 | |
|
254 | 253 | def overlaycontext(memworkingcopy, ctx, parents=None, extra=None, desc=None): |
|
255 | 254 | """({path: content}, ctx, (p1node, p2node)?, {}?) -> memctx |
|
256 | 255 | memworkingcopy overrides file contents. |
|
257 | 256 | """ |
|
258 | 257 | # parents must contain 2 items: (node1, node2) |
|
259 | 258 | if parents is None: |
|
260 | 259 | parents = ctx.repo().changelog.parents(ctx.node()) |
|
261 | 260 | if extra is None: |
|
262 | 261 | extra = ctx.extra() |
|
263 | 262 | if desc is None: |
|
264 | 263 | desc = ctx.description() |
|
265 | 264 | date = ctx.date() |
|
266 | 265 | user = ctx.user() |
|
267 | 266 | files = set(ctx.files()).union(memworkingcopy) |
|
268 | 267 | store = overlaystore(ctx, memworkingcopy) |
|
269 | 268 | return context.memctx( |
|
270 | 269 | repo=ctx.repo(), |
|
271 | 270 | parents=parents, |
|
272 | 271 | text=desc, |
|
273 | 272 | files=files, |
|
274 | 273 | filectxfn=store, |
|
275 | 274 | user=user, |
|
276 | 275 | date=date, |
|
277 | 276 | branch=None, |
|
278 | 277 | extra=extra, |
|
279 | 278 | ) |
|
280 | 279 | |
|
281 | 280 | |
|
282 | 281 | class filefixupstate(object): |
|
283 | 282 | """state needed to apply fixups to a single file |
|
284 | 283 | |
|
285 | 284 | internally, it keeps file contents of several revisions and a linelog. |
|
286 | 285 | |
|
287 | 286 | the linelog uses odd revision numbers for original contents (fctxs passed |
|
288 | 287 | to __init__), and even revision numbers for fixups, like: |
|
289 | 288 | |
|
290 | 289 | linelog rev 1: self.fctxs[0] (from an immutable "public" changeset) |
|
291 | 290 | linelog rev 2: fixups made to self.fctxs[0] |
|
292 | 291 | linelog rev 3: self.fctxs[1] (a child of fctxs[0]) |
|
293 | 292 | linelog rev 4: fixups made to self.fctxs[1] |
|
294 | 293 | ... |
|
295 | 294 | |
|
296 | 295 | a typical use is like: |
|
297 | 296 | |
|
298 | 297 | 1. call diffwith, to calculate self.fixups |
|
299 | 298 | 2. (optionally), present self.fixups to the user, or change it |
|
300 | 299 | 3. call apply, to apply changes |
|
301 | 300 | 4. read results from "finalcontents", or call getfinalcontent |
|
302 | 301 | """ |
|
303 | 302 | |
|
304 | 303 | def __init__(self, fctxs, path, ui=None, opts=None): |
|
305 | 304 | """([fctx], ui or None) -> None |
|
306 | 305 | |
|
307 | 306 | fctxs should be linear, and sorted by topo order - oldest first. |
|
308 | 307 | fctxs[0] will be considered as "immutable" and will not be changed. |
|
309 | 308 | """ |
|
310 | 309 | self.fctxs = fctxs |
|
311 | 310 | self.path = path |
|
312 | 311 | self.ui = ui or nullui() |
|
313 | 312 | self.opts = opts or {} |
|
314 | 313 | |
|
315 | 314 | # following fields are built from fctxs. they exist for perf reason |
|
316 | 315 | self.contents = [f.data() for f in fctxs] |
|
317 | 316 | self.contentlines = pycompat.maplist(mdiff.splitnewlines, self.contents) |
|
318 | 317 | self.linelog = self._buildlinelog() |
|
319 | 318 | if self.ui.debugflag: |
|
320 | 319 | assert self._checkoutlinelog() == self.contents |
|
321 | 320 | |
|
322 | 321 | # following fields will be filled later |
|
323 | 322 | self.chunkstats = [0, 0] # [adopted, total : int] |
|
324 | 323 | self.targetlines = [] # [str] |
|
325 | 324 | self.fixups = [] # [(linelog rev, a1, a2, b1, b2)] |
|
326 | 325 | self.finalcontents = [] # [str] |
|
327 | 326 | self.ctxaffected = set() |
|
328 | 327 | |
|
329 | 328 | def diffwith(self, targetfctx, fm=None): |
|
330 | 329 | """calculate fixups needed by examining the differences between |
|
331 | 330 | self.fctxs[-1] and targetfctx, chunk by chunk. |
|
332 | 331 | |
|
333 | 332 | targetfctx is the target state we move towards. we may or may not be |
|
334 | 333 | able to get there because not all modified chunks can be amended into |
|
335 | 334 | a non-public fctx unambiguously. |
|
336 | 335 | |
|
337 | 336 | call this only once, before apply(). |
|
338 | 337 | |
|
339 | 338 | update self.fixups, self.chunkstats, and self.targetlines. |
|
340 | 339 | """ |
|
341 | 340 | a = self.contents[-1] |
|
342 | 341 | alines = self.contentlines[-1] |
|
343 | 342 | b = targetfctx.data() |
|
344 | 343 | blines = mdiff.splitnewlines(b) |
|
345 | 344 | self.targetlines = blines |
|
346 | 345 | |
|
347 | 346 | self.linelog.annotate(self.linelog.maxrev) |
|
348 | 347 | annotated = self.linelog.annotateresult # [(linelog rev, linenum)] |
|
349 | 348 | assert len(annotated) == len(alines) |
|
350 | 349 | # add a dummy end line to make insertion at the end easier |
|
351 | 350 | if annotated: |
|
352 | 351 | dummyendline = (annotated[-1][0], annotated[-1][1] + 1) |
|
353 | 352 | annotated.append(dummyendline) |
|
354 | 353 | |
|
355 | 354 | # analyse diff blocks |
|
356 | 355 | for chunk in self._alldiffchunks(a, b, alines, blines): |
|
357 | 356 | newfixups = self._analysediffchunk(chunk, annotated) |
|
358 | 357 | self.chunkstats[0] += bool(newfixups) # 1 or 0 |
|
359 | 358 | self.chunkstats[1] += 1 |
|
360 | 359 | self.fixups += newfixups |
|
361 | 360 | if fm is not None: |
|
362 | 361 | self._showchanges(fm, alines, blines, chunk, newfixups) |
|
363 | 362 | |
|
364 | 363 | def apply(self): |
|
365 | 364 | """apply self.fixups. update self.linelog, self.finalcontents. |
|
366 | 365 | |
|
367 | 366 | call this only once, before getfinalcontent(), after diffwith(). |
|
368 | 367 | """ |
|
369 | 368 | # the following is unnecessary, as it's done by "diffwith": |
|
370 | 369 | # self.linelog.annotate(self.linelog.maxrev) |
|
371 | 370 | for rev, a1, a2, b1, b2 in reversed(self.fixups): |
|
372 | 371 | blines = self.targetlines[b1:b2] |
|
373 | 372 | if self.ui.debugflag: |
|
374 | 373 | idx = (max(rev - 1, 0)) // 2 |
|
375 | 374 | self.ui.write( |
|
376 | 375 | _(b'%s: chunk %d:%d -> %d lines\n') |
|
377 | 376 | % (short(self.fctxs[idx].node()), a1, a2, len(blines)) |
|
378 | 377 | ) |
|
379 | 378 | self.linelog.replacelines(rev, a1, a2, b1, b2) |
|
380 | 379 | if self.opts.get(b'edit_lines', False): |
|
381 | 380 | self.finalcontents = self._checkoutlinelogwithedits() |
|
382 | 381 | else: |
|
383 | 382 | self.finalcontents = self._checkoutlinelog() |
|
384 | 383 | |
|
385 | 384 | def getfinalcontent(self, fctx): |
|
386 | 385 | """(fctx) -> str. get modified file content for a given filecontext""" |
|
387 | 386 | idx = self.fctxs.index(fctx) |
|
388 | 387 | return self.finalcontents[idx] |
|
389 | 388 | |
|
390 | 389 | def _analysediffchunk(self, chunk, annotated): |
|
391 | 390 | """analyse a different chunk and return new fixups found |
|
392 | 391 | |
|
393 | 392 | return [] if no lines from the chunk can be safely applied. |
|
394 | 393 | |
|
395 | 394 | the chunk (or lines) cannot be safely applied, if, for example: |
|
396 | 395 | - the modified (deleted) lines belong to a public changeset |
|
397 | 396 | (self.fctxs[0]) |
|
398 | 397 | - the chunk is a pure insertion and the adjacent lines (at most 2 |
|
399 | 398 | lines) belong to different non-public changesets, or do not belong |
|
400 | 399 | to any non-public changesets. |
|
401 | 400 | - the chunk is modifying lines from different changesets. |
|
402 | 401 | in this case, if the number of lines deleted equals to the number |
|
403 | 402 | of lines added, assume it's a simple 1:1 map (could be wrong). |
|
404 | 403 | otherwise, give up. |
|
405 | 404 | - the chunk is modifying lines from a single non-public changeset, |
|
406 | 405 | but other revisions touch the area as well. i.e. the lines are |
|
407 | 406 | not continuous as seen from the linelog. |
|
408 | 407 | """ |
|
409 | 408 | a1, a2, b1, b2 = chunk |
|
410 | 409 | # find involved indexes from annotate result |
|
411 | 410 | involved = annotated[a1:a2] |
|
412 | 411 | if not involved and annotated: # a1 == a2 and a is not empty |
|
413 | 412 | # pure insertion, check nearby lines. ignore lines belong |
|
414 | 413 | # to the public (first) changeset (i.e. annotated[i][0] == 1) |
|
415 | 414 | nearbylinenums = {a2, max(0, a1 - 1)} |
|
416 | 415 | involved = [ |
|
417 | 416 | annotated[i] for i in nearbylinenums if annotated[i][0] != 1 |
|
418 | 417 | ] |
|
419 | 418 | involvedrevs = list({r for r, l in involved}) |
|
420 | 419 | newfixups = [] |
|
421 | 420 | if len(involvedrevs) == 1 and self._iscontinuous(a1, a2 - 1, True): |
|
422 | 421 | # chunk belongs to a single revision |
|
423 | 422 | rev = involvedrevs[0] |
|
424 | 423 | if rev > 1: |
|
425 | 424 | fixuprev = rev + 1 |
|
426 | 425 | newfixups.append((fixuprev, a1, a2, b1, b2)) |
|
427 | 426 | elif a2 - a1 == b2 - b1 or b1 == b2: |
|
428 | 427 | # 1:1 line mapping, or chunk was deleted |
|
429 | 428 | for i in pycompat.xrange(a1, a2): |
|
430 | 429 | rev, linenum = annotated[i] |
|
431 | 430 | if rev > 1: |
|
432 | 431 | if b1 == b2: # deletion, simply remove that single line |
|
433 | 432 | nb1 = nb2 = 0 |
|
434 | 433 | else: # 1:1 line mapping, change the corresponding rev |
|
435 | 434 | nb1 = b1 + i - a1 |
|
436 | 435 | nb2 = nb1 + 1 |
|
437 | 436 | fixuprev = rev + 1 |
|
438 | 437 | newfixups.append((fixuprev, i, i + 1, nb1, nb2)) |
|
439 | 438 | return self._optimizefixups(newfixups) |
|
440 | 439 | |
|
441 | 440 | @staticmethod |
|
442 | 441 | def _alldiffchunks(a, b, alines, blines): |
|
443 | 442 | """like mdiff.allblocks, but only care about differences""" |
|
444 | 443 | blocks = mdiff.allblocks(a, b, lines1=alines, lines2=blines) |
|
445 | 444 | for chunk, btype in blocks: |
|
446 | 445 | if btype != b'!': |
|
447 | 446 | continue |
|
448 | 447 | yield chunk |
|
449 | 448 | |
|
450 | 449 | def _buildlinelog(self): |
|
451 | 450 | """calculate the initial linelog based on self.content{,line}s. |
|
452 | 451 | this is similar to running a partial "annotate". |
|
453 | 452 | """ |
|
454 | 453 | llog = linelog.linelog() |
|
455 | 454 | a, alines = b'', [] |
|
456 | 455 | for i in pycompat.xrange(len(self.contents)): |
|
457 | 456 | b, blines = self.contents[i], self.contentlines[i] |
|
458 | 457 | llrev = i * 2 + 1 |
|
459 | 458 | chunks = self._alldiffchunks(a, b, alines, blines) |
|
460 | 459 | for a1, a2, b1, b2 in reversed(list(chunks)): |
|
461 | 460 | llog.replacelines(llrev, a1, a2, b1, b2) |
|
462 | 461 | a, alines = b, blines |
|
463 | 462 | return llog |
|
464 | 463 | |
|
465 | 464 | def _checkoutlinelog(self): |
|
466 | 465 | """() -> [str]. check out file contents from linelog""" |
|
467 | 466 | contents = [] |
|
468 | 467 | for i in pycompat.xrange(len(self.contents)): |
|
469 | 468 | rev = (i + 1) * 2 |
|
470 | 469 | self.linelog.annotate(rev) |
|
471 | 470 | content = b''.join(map(self._getline, self.linelog.annotateresult)) |
|
472 | 471 | contents.append(content) |
|
473 | 472 | return contents |
|
474 | 473 | |
|
475 | 474 | def _checkoutlinelogwithedits(self): |
|
476 | 475 | """() -> [str]. prompt all lines for edit""" |
|
477 | 476 | alllines = self.linelog.getalllines() |
|
478 | 477 | # header |
|
479 | 478 | editortext = ( |
|
480 | 479 | _( |
|
481 | 480 | b'HG: editing %s\nHG: "y" means the line to the right ' |
|
482 | 481 | b'exists in the changeset to the top\nHG:\n' |
|
483 | 482 | ) |
|
484 | 483 | % self.fctxs[-1].path() |
|
485 | 484 | ) |
|
486 | 485 | # [(idx, fctx)]. hide the dummy emptyfilecontext |
|
487 | 486 | visiblefctxs = [ |
|
488 | 487 | (i, f) |
|
489 | 488 | for i, f in enumerate(self.fctxs) |
|
490 | 489 | if not isinstance(f, emptyfilecontext) |
|
491 | 490 | ] |
|
492 | 491 | for i, (j, f) in enumerate(visiblefctxs): |
|
493 | 492 | editortext += _(b'HG: %s/%s %s %s\n') % ( |
|
494 | 493 | b'|' * i, |
|
495 | 494 | b'-' * (len(visiblefctxs) - i + 1), |
|
496 | 495 | short(f.node()), |
|
497 | 496 | f.description().split(b'\n', 1)[0], |
|
498 | 497 | ) |
|
499 | 498 | editortext += _(b'HG: %s\n') % (b'|' * len(visiblefctxs)) |
|
500 | 499 | # figure out the lifetime of a line, this is relatively inefficient, |
|
501 | 500 | # but probably fine |
|
502 | 501 | lineset = defaultdict(lambda: set()) # {(llrev, linenum): {llrev}} |
|
503 | 502 | for i, f in visiblefctxs: |
|
504 | 503 | self.linelog.annotate((i + 1) * 2) |
|
505 | 504 | for l in self.linelog.annotateresult: |
|
506 | 505 | lineset[l].add(i) |
|
507 | 506 | # append lines |
|
508 | 507 | for l in alllines: |
|
509 | 508 | editortext += b' %s : %s' % ( |
|
510 | 509 | b''.join( |
|
511 | 510 | [ |
|
512 | 511 | (b'y' if i in lineset[l] else b' ') |
|
513 | 512 | for i, _f in visiblefctxs |
|
514 | 513 | ] |
|
515 | 514 | ), |
|
516 | 515 | self._getline(l), |
|
517 | 516 | ) |
|
518 | 517 | # run editor |
|
519 | 518 | editedtext = self.ui.edit(editortext, b'', action=b'absorb') |
|
520 | 519 | if not editedtext: |
|
521 | 520 | raise error.InputError(_(b'empty editor text')) |
|
522 | 521 | # parse edited result |
|
523 | 522 | contents = [b''] * len(self.fctxs) |
|
524 | 523 | leftpadpos = 4 |
|
525 | 524 | colonpos = leftpadpos + len(visiblefctxs) + 1 |
|
526 | 525 | for l in mdiff.splitnewlines(editedtext): |
|
527 | 526 | if l.startswith(b'HG:'): |
|
528 | 527 | continue |
|
529 | 528 | if l[colonpos - 1 : colonpos + 2] != b' : ': |
|
530 | 529 | raise error.InputError(_(b'malformed line: %s') % l) |
|
531 | 530 | linecontent = l[colonpos + 2 :] |
|
532 | 531 | for i, ch in enumerate( |
|
533 | 532 | pycompat.bytestr(l[leftpadpos : colonpos - 1]) |
|
534 | 533 | ): |
|
535 | 534 | if ch == b'y': |
|
536 | 535 | contents[visiblefctxs[i][0]] += linecontent |
|
537 | 536 | # chunkstats is hard to calculate if anything changes, therefore |
|
538 | 537 | # set them to just a simple value (1, 1). |
|
539 | 538 | if editedtext != editortext: |
|
540 | 539 | self.chunkstats = [1, 1] |
|
541 | 540 | return contents |
|
542 | 541 | |
|
543 | 542 | def _getline(self, lineinfo): |
|
544 | 543 | """((rev, linenum)) -> str. convert rev+line number to line content""" |
|
545 | 544 | rev, linenum = lineinfo |
|
546 | 545 | if rev & 1: # odd: original line taken from fctxs |
|
547 | 546 | return self.contentlines[rev // 2][linenum] |
|
548 | 547 | else: # even: fixup line from targetfctx |
|
549 | 548 | return self.targetlines[linenum] |
|
550 | 549 | |
|
551 | 550 | def _iscontinuous(self, a1, a2, closedinterval=False): |
|
552 | 551 | """(a1, a2 : int) -> bool |
|
553 | 552 | |
|
554 | 553 | check if these lines are continuous. i.e. no other insertions or |
|
555 | 554 | deletions (from other revisions) among these lines. |
|
556 | 555 | |
|
557 | 556 | closedinterval decides whether a2 should be included or not. i.e. is |
|
558 | 557 | it [a1, a2), or [a1, a2] ? |
|
559 | 558 | """ |
|
560 | 559 | if a1 >= a2: |
|
561 | 560 | return True |
|
562 | 561 | llog = self.linelog |
|
563 | 562 | offset1 = llog.getoffset(a1) |
|
564 | 563 | offset2 = llog.getoffset(a2) + int(closedinterval) |
|
565 | 564 | linesinbetween = llog.getalllines(offset1, offset2) |
|
566 | 565 | return len(linesinbetween) == a2 - a1 + int(closedinterval) |
|
567 | 566 | |
|
568 | 567 | def _optimizefixups(self, fixups): |
|
569 | 568 | """[(rev, a1, a2, b1, b2)] -> [(rev, a1, a2, b1, b2)]. |
|
570 | 569 | merge adjacent fixups to make them less fragmented. |
|
571 | 570 | """ |
|
572 | 571 | result = [] |
|
573 | 572 | pcurrentchunk = [[-1, -1, -1, -1, -1]] |
|
574 | 573 | |
|
575 | 574 | def pushchunk(): |
|
576 | 575 | if pcurrentchunk[0][0] != -1: |
|
577 | 576 | result.append(tuple(pcurrentchunk[0])) |
|
578 | 577 | |
|
579 | 578 | for i, chunk in enumerate(fixups): |
|
580 | 579 | rev, a1, a2, b1, b2 = chunk |
|
581 | 580 | lastrev = pcurrentchunk[0][0] |
|
582 | 581 | lasta2 = pcurrentchunk[0][2] |
|
583 | 582 | lastb2 = pcurrentchunk[0][4] |
|
584 | 583 | if ( |
|
585 | 584 | a1 == lasta2 |
|
586 | 585 | and b1 == lastb2 |
|
587 | 586 | and rev == lastrev |
|
588 | 587 | and self._iscontinuous(max(a1 - 1, 0), a1) |
|
589 | 588 | ): |
|
590 | 589 | # merge into currentchunk |
|
591 | 590 | pcurrentchunk[0][2] = a2 |
|
592 | 591 | pcurrentchunk[0][4] = b2 |
|
593 | 592 | else: |
|
594 | 593 | pushchunk() |
|
595 | 594 | pcurrentchunk[0] = list(chunk) |
|
596 | 595 | pushchunk() |
|
597 | 596 | return result |
|
598 | 597 | |
|
599 | 598 | def _showchanges(self, fm, alines, blines, chunk, fixups): |
|
600 | 599 | def trim(line): |
|
601 | 600 | if line.endswith(b'\n'): |
|
602 | 601 | line = line[:-1] |
|
603 | 602 | return line |
|
604 | 603 | |
|
605 | 604 | # this is not optimized for perf but _showchanges only gets executed |
|
606 | 605 | # with an extra command-line flag. |
|
607 | 606 | a1, a2, b1, b2 = chunk |
|
608 | 607 | aidxs, bidxs = [0] * (a2 - a1), [0] * (b2 - b1) |
|
609 | 608 | for idx, fa1, fa2, fb1, fb2 in fixups: |
|
610 | 609 | for i in pycompat.xrange(fa1, fa2): |
|
611 | 610 | aidxs[i - a1] = (max(idx, 1) - 1) // 2 |
|
612 | 611 | for i in pycompat.xrange(fb1, fb2): |
|
613 | 612 | bidxs[i - b1] = (max(idx, 1) - 1) // 2 |
|
614 | 613 | |
|
615 | 614 | fm.startitem() |
|
616 | 615 | fm.write( |
|
617 | 616 | b'hunk', |
|
618 | 617 | b' %s\n', |
|
619 | 618 | b'@@ -%d,%d +%d,%d @@' % (a1, a2 - a1, b1, b2 - b1), |
|
620 | 619 | label=b'diff.hunk', |
|
621 | 620 | ) |
|
622 | 621 | fm.data(path=self.path, linetype=b'hunk') |
|
623 | 622 | |
|
624 | 623 | def writeline(idx, diffchar, line, linetype, linelabel): |
|
625 | 624 | fm.startitem() |
|
626 | 625 | node = b'' |
|
627 | 626 | if idx: |
|
628 | 627 | ctx = self.fctxs[idx] |
|
629 | 628 | fm.context(fctx=ctx) |
|
630 | 629 | node = ctx.hex() |
|
631 | 630 | self.ctxaffected.add(ctx.changectx()) |
|
632 | 631 | fm.write(b'node', b'%-7.7s ', node, label=b'absorb.node') |
|
633 | 632 | fm.write( |
|
634 | 633 | b'diffchar ' + linetype, |
|
635 | 634 | b'%s%s\n', |
|
636 | 635 | diffchar, |
|
637 | 636 | line, |
|
638 | 637 | label=linelabel, |
|
639 | 638 | ) |
|
640 | 639 | fm.data(path=self.path, linetype=linetype) |
|
641 | 640 | |
|
642 | 641 | for i in pycompat.xrange(a1, a2): |
|
643 | 642 | writeline( |
|
644 | 643 | aidxs[i - a1], |
|
645 | 644 | b'-', |
|
646 | 645 | trim(alines[i]), |
|
647 | 646 | b'deleted', |
|
648 | 647 | b'diff.deleted', |
|
649 | 648 | ) |
|
650 | 649 | for i in pycompat.xrange(b1, b2): |
|
651 | 650 | writeline( |
|
652 | 651 | bidxs[i - b1], |
|
653 | 652 | b'+', |
|
654 | 653 | trim(blines[i]), |
|
655 | 654 | b'inserted', |
|
656 | 655 | b'diff.inserted', |
|
657 | 656 | ) |
|
658 | 657 | |
|
659 | 658 | |
|
660 | 659 | class fixupstate(object): |
|
661 | 660 | """state needed to run absorb |
|
662 | 661 | |
|
663 | 662 | internally, it keeps paths and filefixupstates. |
|
664 | 663 | |
|
665 | 664 | a typical use is like filefixupstates: |
|
666 | 665 | |
|
667 | 666 | 1. call diffwith, to calculate fixups |
|
668 | 667 | 2. (optionally), present fixups to the user, or edit fixups |
|
669 | 668 | 3. call apply, to apply changes to memory |
|
670 | 669 | 4. call commit, to commit changes to hg database |
|
671 | 670 | """ |
|
672 | 671 | |
|
673 | 672 | def __init__(self, stack, ui=None, opts=None): |
|
674 | 673 | """([ctx], ui or None) -> None |
|
675 | 674 | |
|
676 | 675 | stack: should be linear, and sorted by topo order - oldest first. |
|
677 | 676 | all commits in stack are considered mutable. |
|
678 | 677 | """ |
|
679 | 678 | assert stack |
|
680 | 679 | self.ui = ui or nullui() |
|
681 | 680 | self.opts = opts or {} |
|
682 | 681 | self.stack = stack |
|
683 | 682 | self.repo = stack[-1].repo().unfiltered() |
|
684 | 683 | |
|
685 | 684 | # following fields will be filled later |
|
686 | 685 | self.paths = [] # [str] |
|
687 | 686 | self.status = None # ctx.status output |
|
688 | 687 | self.fctxmap = {} # {path: {ctx: fctx}} |
|
689 | 688 | self.fixupmap = {} # {path: filefixupstate} |
|
690 | 689 | self.replacemap = {} # {oldnode: newnode or None} |
|
691 | 690 | self.finalnode = None # head after all fixups |
|
692 | 691 | self.ctxaffected = set() # ctx that will be absorbed into |
|
693 | 692 | |
|
694 | 693 | def diffwith(self, targetctx, match=None, fm=None): |
|
695 | 694 | """diff and prepare fixups. update self.fixupmap, self.paths""" |
|
696 | 695 | # only care about modified files |
|
697 | 696 | self.status = self.stack[-1].status(targetctx, match) |
|
698 | 697 | self.paths = [] |
|
699 | 698 | # but if --edit-lines is used, the user may want to edit files |
|
700 | 699 | # even if they are not modified |
|
701 | 700 | editopt = self.opts.get(b'edit_lines') |
|
702 | 701 | if not self.status.modified and editopt and match: |
|
703 | 702 | interestingpaths = match.files() |
|
704 | 703 | else: |
|
705 | 704 | interestingpaths = self.status.modified |
|
706 | 705 | # prepare the filefixupstate |
|
707 | 706 | seenfctxs = set() |
|
708 | 707 | # sorting is necessary to eliminate ambiguity for the "double move" |
|
709 | 708 | # case: "hg cp A B; hg cp A C; hg rm A", then only "B" can affect "A". |
|
710 | 709 | for path in sorted(interestingpaths): |
|
711 | 710 | self.ui.debug(b'calculating fixups for %s\n' % path) |
|
712 | 711 | targetfctx = targetctx[path] |
|
713 | 712 | fctxs, ctx2fctx = getfilestack(self.stack, path, seenfctxs) |
|
714 | 713 | # ignore symbolic links or binary, or unchanged files |
|
715 | 714 | if any( |
|
716 | 715 | f.islink() or stringutil.binary(f.data()) |
|
717 | 716 | for f in [targetfctx] + fctxs |
|
718 | 717 | if not isinstance(f, emptyfilecontext) |
|
719 | 718 | ): |
|
720 | 719 | continue |
|
721 | 720 | if targetfctx.data() == fctxs[-1].data() and not editopt: |
|
722 | 721 | continue |
|
723 | 722 | seenfctxs.update(fctxs[1:]) |
|
724 | 723 | self.fctxmap[path] = ctx2fctx |
|
725 | 724 | fstate = filefixupstate(fctxs, path, ui=self.ui, opts=self.opts) |
|
726 | 725 | if fm is not None: |
|
727 | 726 | fm.startitem() |
|
728 | 727 | fm.plain(b'showing changes for ') |
|
729 | 728 | fm.write(b'path', b'%s\n', path, label=b'absorb.path') |
|
730 | 729 | fm.data(linetype=b'path') |
|
731 | 730 | fstate.diffwith(targetfctx, fm) |
|
732 | 731 | self.fixupmap[path] = fstate |
|
733 | 732 | self.paths.append(path) |
|
734 | 733 | self.ctxaffected.update(fstate.ctxaffected) |
|
735 | 734 | |
|
736 | 735 | def apply(self): |
|
737 | 736 | """apply fixups to individual filefixupstates""" |
|
738 | 737 | for path, state in pycompat.iteritems(self.fixupmap): |
|
739 | 738 | if self.ui.debugflag: |
|
740 | 739 | self.ui.write(_(b'applying fixups to %s\n') % path) |
|
741 | 740 | state.apply() |
|
742 | 741 | |
|
743 | 742 | @property |
|
744 | 743 | def chunkstats(self): |
|
745 | 744 | """-> {path: chunkstats}. collect chunkstats from filefixupstates""" |
|
746 | 745 | return { |
|
747 | 746 | path: state.chunkstats |
|
748 | 747 | for path, state in pycompat.iteritems(self.fixupmap) |
|
749 | 748 | } |
|
750 | 749 | |
|
751 | 750 | def commit(self): |
|
752 | 751 | """commit changes. update self.finalnode, self.replacemap""" |
|
753 | 752 | with self.repo.transaction(b'absorb') as tr: |
|
754 | 753 | self._commitstack() |
|
755 | 754 | self._movebookmarks(tr) |
|
756 | 755 | if self.repo[b'.'].node() in self.replacemap: |
|
757 | 756 | self._moveworkingdirectoryparent() |
|
758 | 757 | self._cleanupoldcommits() |
|
759 | 758 | return self.finalnode |
|
760 | 759 | |
|
761 | 760 | def printchunkstats(self): |
|
762 | 761 | """print things like '1 of 2 chunk(s) applied'""" |
|
763 | 762 | ui = self.ui |
|
764 | 763 | chunkstats = self.chunkstats |
|
765 | 764 | if ui.verbose: |
|
766 | 765 | # chunkstats for each file |
|
767 | 766 | for path, stat in pycompat.iteritems(chunkstats): |
|
768 | 767 | if stat[0]: |
|
769 | 768 | ui.write( |
|
770 | 769 | _(b'%s: %d of %d chunk(s) applied\n') |
|
771 | 770 | % (path, stat[0], stat[1]) |
|
772 | 771 | ) |
|
773 | 772 | elif not ui.quiet: |
|
774 | 773 | # a summary for all files |
|
775 | 774 | stats = chunkstats.values() |
|
776 | 775 | applied, total = (sum(s[i] for s in stats) for i in (0, 1)) |
|
777 | 776 | ui.write(_(b'%d of %d chunk(s) applied\n') % (applied, total)) |
|
778 | 777 | |
|
779 | 778 | def _commitstack(self): |
|
780 | 779 | """make new commits. update self.finalnode, self.replacemap. |
|
781 | 780 | it is splitted from "commit" to avoid too much indentation. |
|
782 | 781 | """ |
|
783 | 782 | # last node (20-char) committed by us |
|
784 | 783 | lastcommitted = None |
|
785 | 784 | # p1 which overrides the parent of the next commit, "None" means use |
|
786 | 785 | # the original parent unchanged |
|
787 | 786 | nextp1 = None |
|
788 | 787 | for ctx in self.stack: |
|
789 | 788 | memworkingcopy = self._getnewfilecontents(ctx) |
|
790 | 789 | if not memworkingcopy and not lastcommitted: |
|
791 | 790 | # nothing changed, nothing commited |
|
792 | 791 | nextp1 = ctx |
|
793 | 792 | continue |
|
794 | 793 | willbecomenoop = ctx.files() and self._willbecomenoop( |
|
795 | 794 | memworkingcopy, ctx, nextp1 |
|
796 | 795 | ) |
|
797 | 796 | if self.skip_empty_successor and willbecomenoop: |
|
798 | 797 | # changeset is no longer necessary |
|
799 | 798 | self.replacemap[ctx.node()] = None |
|
800 | 799 | msg = _(b'became empty and was dropped') |
|
801 | 800 | else: |
|
802 | 801 | # changeset needs re-commit |
|
803 | 802 | nodestr = self._commitsingle(memworkingcopy, ctx, p1=nextp1) |
|
804 | 803 | lastcommitted = self.repo[nodestr] |
|
805 | 804 | nextp1 = lastcommitted |
|
806 | 805 | self.replacemap[ctx.node()] = lastcommitted.node() |
|
807 | 806 | if memworkingcopy: |
|
808 | 807 | if willbecomenoop: |
|
809 | 808 | msg = _(b'%d file(s) changed, became empty as %s') |
|
810 | 809 | else: |
|
811 | 810 | msg = _(b'%d file(s) changed, became %s') |
|
812 | 811 | msg = msg % ( |
|
813 | 812 | len(memworkingcopy), |
|
814 | 813 | self._ctx2str(lastcommitted), |
|
815 | 814 | ) |
|
816 | 815 | else: |
|
817 | 816 | msg = _(b'became %s') % self._ctx2str(lastcommitted) |
|
818 | 817 | if self.ui.verbose and msg: |
|
819 | 818 | self.ui.write(_(b'%s: %s\n') % (self._ctx2str(ctx), msg)) |
|
820 | 819 | self.finalnode = lastcommitted and lastcommitted.node() |
|
821 | 820 | |
|
822 | 821 | def _ctx2str(self, ctx): |
|
823 | 822 | if self.ui.debugflag: |
|
824 | 823 | return b'%d:%s' % (ctx.rev(), ctx.hex()) |
|
825 | 824 | else: |
|
826 | 825 | return b'%d:%s' % (ctx.rev(), short(ctx.node())) |
|
827 | 826 | |
|
828 | 827 | def _getnewfilecontents(self, ctx): |
|
829 | 828 | """(ctx) -> {path: str} |
|
830 | 829 | |
|
831 | 830 | fetch file contents from filefixupstates. |
|
832 | 831 | return the working copy overrides - files different from ctx. |
|
833 | 832 | """ |
|
834 | 833 | result = {} |
|
835 | 834 | for path in self.paths: |
|
836 | 835 | ctx2fctx = self.fctxmap[path] # {ctx: fctx} |
|
837 | 836 | if ctx not in ctx2fctx: |
|
838 | 837 | continue |
|
839 | 838 | fctx = ctx2fctx[ctx] |
|
840 | 839 | content = fctx.data() |
|
841 | 840 | newcontent = self.fixupmap[path].getfinalcontent(fctx) |
|
842 | 841 | if content != newcontent: |
|
843 | 842 | result[fctx.path()] = newcontent |
|
844 | 843 | return result |
|
845 | 844 | |
|
846 | 845 | def _movebookmarks(self, tr): |
|
847 | 846 | repo = self.repo |
|
848 | 847 | needupdate = [ |
|
849 | 848 | (name, self.replacemap[hsh]) |
|
850 | 849 | for name, hsh in pycompat.iteritems(repo._bookmarks) |
|
851 | 850 | if hsh in self.replacemap |
|
852 | 851 | ] |
|
853 | 852 | changes = [] |
|
854 | 853 | for name, hsh in needupdate: |
|
855 | 854 | if hsh: |
|
856 | 855 | changes.append((name, hsh)) |
|
857 | 856 | if self.ui.verbose: |
|
858 | 857 | self.ui.write( |
|
859 | 858 | _(b'moving bookmark %s to %s\n') % (name, hex(hsh)) |
|
860 | 859 | ) |
|
861 | 860 | else: |
|
862 | 861 | changes.append((name, None)) |
|
863 | 862 | if self.ui.verbose: |
|
864 | 863 | self.ui.write(_(b'deleting bookmark %s\n') % name) |
|
865 | 864 | repo._bookmarks.applychanges(repo, tr, changes) |
|
866 | 865 | |
|
867 | 866 | def _moveworkingdirectoryparent(self): |
|
868 | 867 | if not self.finalnode: |
|
869 | 868 | # Find the latest not-{obsoleted,stripped} parent. |
|
870 | 869 | revs = self.repo.revs(b'max(::. - %ln)', self.replacemap.keys()) |
|
871 | 870 | ctx = self.repo[revs.first()] |
|
872 | 871 | self.finalnode = ctx.node() |
|
873 | 872 | else: |
|
874 | 873 | ctx = self.repo[self.finalnode] |
|
875 | 874 | |
|
876 | 875 | dirstate = self.repo.dirstate |
|
877 | 876 | # dirstate.rebuild invalidates fsmonitorstate, causing "hg status" to |
|
878 | 877 | # be slow. in absorb's case, no need to invalidate fsmonitorstate. |
|
879 | 878 | noop = lambda: 0 |
|
880 | 879 | restore = noop |
|
881 | 880 | if util.safehasattr(dirstate, '_fsmonitorstate'): |
|
882 | 881 | bak = dirstate._fsmonitorstate.invalidate |
|
883 | 882 | |
|
884 | 883 | def restore(): |
|
885 | 884 | dirstate._fsmonitorstate.invalidate = bak |
|
886 | 885 | |
|
887 | 886 | dirstate._fsmonitorstate.invalidate = noop |
|
888 | 887 | try: |
|
889 | 888 | with dirstate.parentchange(): |
|
890 | 889 | dirstate.rebuild(ctx.node(), ctx.manifest(), self.paths) |
|
891 | 890 | finally: |
|
892 | 891 | restore() |
|
893 | 892 | |
|
894 | 893 | @staticmethod |
|
895 | 894 | def _willbecomenoop(memworkingcopy, ctx, pctx=None): |
|
896 | 895 | """({path: content}, ctx, ctx) -> bool. test if a commit will be noop |
|
897 | 896 | |
|
898 | 897 | if it will become an empty commit (does not change anything, after the |
|
899 | 898 | memworkingcopy overrides), return True. otherwise return False. |
|
900 | 899 | """ |
|
901 | 900 | if not pctx: |
|
902 | 901 | parents = ctx.parents() |
|
903 | 902 | if len(parents) != 1: |
|
904 | 903 | return False |
|
905 | 904 | pctx = parents[0] |
|
906 | 905 | if ctx.branch() != pctx.branch(): |
|
907 | 906 | return False |
|
908 | 907 | if ctx.extra().get(b'close'): |
|
909 | 908 | return False |
|
910 | 909 | # ctx changes more files (not a subset of memworkingcopy) |
|
911 | 910 | if not set(ctx.files()).issubset(set(memworkingcopy)): |
|
912 | 911 | return False |
|
913 | 912 | for path, content in pycompat.iteritems(memworkingcopy): |
|
914 | 913 | if path not in pctx or path not in ctx: |
|
915 | 914 | return False |
|
916 | 915 | fctx = ctx[path] |
|
917 | 916 | pfctx = pctx[path] |
|
918 | 917 | if pfctx.flags() != fctx.flags(): |
|
919 | 918 | return False |
|
920 | 919 | if pfctx.data() != content: |
|
921 | 920 | return False |
|
922 | 921 | return True |
|
923 | 922 | |
|
924 | 923 | def _commitsingle(self, memworkingcopy, ctx, p1=None): |
|
925 | 924 | """(ctx, {path: content}, node) -> node. make a single commit |
|
926 | 925 | |
|
927 | 926 | the commit is a clone from ctx, with a (optionally) different p1, and |
|
928 | 927 | different file contents replaced by memworkingcopy. |
|
929 | 928 | """ |
|
930 | parents = p1 and (p1, nullid) | |
|
929 | parents = p1 and (p1, self.repo.nullid) | |
|
931 | 930 | extra = ctx.extra() |
|
932 | 931 | if self._useobsolete and self.ui.configbool(b'absorb', b'add-noise'): |
|
933 | 932 | extra[b'absorb_source'] = ctx.hex() |
|
934 | 933 | |
|
935 | 934 | desc = rewriteutil.update_hash_refs( |
|
936 | 935 | ctx.repo(), |
|
937 | 936 | ctx.description(), |
|
938 | 937 | { |
|
939 | 938 | oldnode: [newnode] |
|
940 | 939 | for oldnode, newnode in self.replacemap.items() |
|
941 | 940 | }, |
|
942 | 941 | ) |
|
943 | 942 | mctx = overlaycontext( |
|
944 | 943 | memworkingcopy, ctx, parents, extra=extra, desc=desc |
|
945 | 944 | ) |
|
946 | 945 | return mctx.commit() |
|
947 | 946 | |
|
948 | 947 | @util.propertycache |
|
949 | 948 | def _useobsolete(self): |
|
950 | 949 | """() -> bool""" |
|
951 | 950 | return obsolete.isenabled(self.repo, obsolete.createmarkersopt) |
|
952 | 951 | |
|
953 | 952 | def _cleanupoldcommits(self): |
|
954 | 953 | replacements = { |
|
955 | 954 | k: ([v] if v is not None else []) |
|
956 | 955 | for k, v in pycompat.iteritems(self.replacemap) |
|
957 | 956 | } |
|
958 | 957 | if replacements: |
|
959 | 958 | scmutil.cleanupnodes( |
|
960 | 959 | self.repo, replacements, operation=b'absorb', fixphase=True |
|
961 | 960 | ) |
|
962 | 961 | |
|
963 | 962 | @util.propertycache |
|
964 | 963 | def skip_empty_successor(self): |
|
965 | 964 | return rewriteutil.skip_empty_successor(self.ui, b'absorb') |
|
966 | 965 | |
|
967 | 966 | |
|
968 | 967 | def _parsechunk(hunk): |
|
969 | 968 | """(crecord.uihunk or patch.recordhunk) -> (path, (a1, a2, [bline]))""" |
|
970 | 969 | if type(hunk) not in (crecord.uihunk, patch.recordhunk): |
|
971 | 970 | return None, None |
|
972 | 971 | path = hunk.header.filename() |
|
973 | 972 | a1 = hunk.fromline + len(hunk.before) - 1 |
|
974 | 973 | # remove before and after context |
|
975 | 974 | hunk.before = hunk.after = [] |
|
976 | 975 | buf = util.stringio() |
|
977 | 976 | hunk.write(buf) |
|
978 | 977 | patchlines = mdiff.splitnewlines(buf.getvalue()) |
|
979 | 978 | # hunk.prettystr() will update hunk.removed |
|
980 | 979 | a2 = a1 + hunk.removed |
|
981 | 980 | blines = [l[1:] for l in patchlines[1:] if not l.startswith(b'-')] |
|
982 | 981 | return path, (a1, a2, blines) |
|
983 | 982 | |
|
984 | 983 | |
|
985 | 984 | def overlaydiffcontext(ctx, chunks): |
|
986 | 985 | """(ctx, [crecord.uihunk]) -> memctx |
|
987 | 986 | |
|
988 | 987 | return a memctx with some [1] patches (chunks) applied to ctx. |
|
989 | 988 | [1]: modifications are handled. renames, mode changes, etc. are ignored. |
|
990 | 989 | """ |
|
991 | 990 | # sadly the applying-patch logic is hardly reusable, and messy: |
|
992 | 991 | # 1. the core logic "_applydiff" is too heavy - it writes .rej files, it |
|
993 | 992 | # needs a file stream of a patch and will re-parse it, while we have |
|
994 | 993 | # structured hunk objects at hand. |
|
995 | 994 | # 2. a lot of different implementations about "chunk" (patch.hunk, |
|
996 | 995 | # patch.recordhunk, crecord.uihunk) |
|
997 | 996 | # as we only care about applying changes to modified files, no mode |
|
998 | 997 | # change, no binary diff, and no renames, it's probably okay to |
|
999 | 998 | # re-invent the logic using much simpler code here. |
|
1000 | 999 | memworkingcopy = {} # {path: content} |
|
1001 | 1000 | patchmap = defaultdict(lambda: []) # {path: [(a1, a2, [bline])]} |
|
1002 | 1001 | for path, info in map(_parsechunk, chunks): |
|
1003 | 1002 | if not path or not info: |
|
1004 | 1003 | continue |
|
1005 | 1004 | patchmap[path].append(info) |
|
1006 | 1005 | for path, patches in pycompat.iteritems(patchmap): |
|
1007 | 1006 | if path not in ctx or not patches: |
|
1008 | 1007 | continue |
|
1009 | 1008 | patches.sort(reverse=True) |
|
1010 | 1009 | lines = mdiff.splitnewlines(ctx[path].data()) |
|
1011 | 1010 | for a1, a2, blines in patches: |
|
1012 | 1011 | lines[a1:a2] = blines |
|
1013 | 1012 | memworkingcopy[path] = b''.join(lines) |
|
1014 | 1013 | return overlaycontext(memworkingcopy, ctx) |
|
1015 | 1014 | |
|
1016 | 1015 | |
|
1017 | 1016 | def absorb(ui, repo, stack=None, targetctx=None, pats=None, opts=None): |
|
1018 | 1017 | """pick fixup chunks from targetctx, apply them to stack. |
|
1019 | 1018 | |
|
1020 | 1019 | if targetctx is None, the working copy context will be used. |
|
1021 | 1020 | if stack is None, the current draft stack will be used. |
|
1022 | 1021 | return fixupstate. |
|
1023 | 1022 | """ |
|
1024 | 1023 | if stack is None: |
|
1025 | 1024 | limit = ui.configint(b'absorb', b'max-stack-size') |
|
1026 | 1025 | headctx = repo[b'.'] |
|
1027 | 1026 | if len(headctx.parents()) > 1: |
|
1028 | 1027 | raise error.InputError(_(b'cannot absorb into a merge')) |
|
1029 | 1028 | stack = getdraftstack(headctx, limit) |
|
1030 | 1029 | if limit and len(stack) >= limit: |
|
1031 | 1030 | ui.warn( |
|
1032 | 1031 | _( |
|
1033 | 1032 | b'absorb: only the recent %d changesets will ' |
|
1034 | 1033 | b'be analysed\n' |
|
1035 | 1034 | ) |
|
1036 | 1035 | % limit |
|
1037 | 1036 | ) |
|
1038 | 1037 | if not stack: |
|
1039 | 1038 | raise error.InputError(_(b'no mutable changeset to change')) |
|
1040 | 1039 | if targetctx is None: # default to working copy |
|
1041 | 1040 | targetctx = repo[None] |
|
1042 | 1041 | if pats is None: |
|
1043 | 1042 | pats = () |
|
1044 | 1043 | if opts is None: |
|
1045 | 1044 | opts = {} |
|
1046 | 1045 | state = fixupstate(stack, ui=ui, opts=opts) |
|
1047 | 1046 | matcher = scmutil.match(targetctx, pats, opts) |
|
1048 | 1047 | if opts.get(b'interactive'): |
|
1049 | 1048 | diff = patch.diff(repo, stack[-1].node(), targetctx.node(), matcher) |
|
1050 | 1049 | origchunks = patch.parsepatch(diff) |
|
1051 | 1050 | chunks = cmdutil.recordfilter(ui, origchunks, matcher)[0] |
|
1052 | 1051 | targetctx = overlaydiffcontext(stack[-1], chunks) |
|
1053 | 1052 | fm = None |
|
1054 | 1053 | if opts.get(b'print_changes') or not opts.get(b'apply_changes'): |
|
1055 | 1054 | fm = ui.formatter(b'absorb', opts) |
|
1056 | 1055 | state.diffwith(targetctx, matcher, fm) |
|
1057 | 1056 | if fm is not None: |
|
1058 | 1057 | fm.startitem() |
|
1059 | 1058 | fm.write( |
|
1060 | 1059 | b"count", b"\n%d changesets affected\n", len(state.ctxaffected) |
|
1061 | 1060 | ) |
|
1062 | 1061 | fm.data(linetype=b'summary') |
|
1063 | 1062 | for ctx in reversed(stack): |
|
1064 | 1063 | if ctx not in state.ctxaffected: |
|
1065 | 1064 | continue |
|
1066 | 1065 | fm.startitem() |
|
1067 | 1066 | fm.context(ctx=ctx) |
|
1068 | 1067 | fm.data(linetype=b'changeset') |
|
1069 | 1068 | fm.write(b'node', b'%-7.7s ', ctx.hex(), label=b'absorb.node') |
|
1070 | 1069 | descfirstline = ctx.description().splitlines()[0] |
|
1071 | 1070 | fm.write( |
|
1072 | 1071 | b'descfirstline', |
|
1073 | 1072 | b'%s\n', |
|
1074 | 1073 | descfirstline, |
|
1075 | 1074 | label=b'absorb.description', |
|
1076 | 1075 | ) |
|
1077 | 1076 | fm.end() |
|
1078 | 1077 | if not opts.get(b'dry_run'): |
|
1079 | 1078 | if ( |
|
1080 | 1079 | not opts.get(b'apply_changes') |
|
1081 | 1080 | and state.ctxaffected |
|
1082 | 1081 | and ui.promptchoice( |
|
1083 | 1082 | b"apply changes (y/N)? $$ &Yes $$ &No", default=1 |
|
1084 | 1083 | ) |
|
1085 | 1084 | ): |
|
1086 | 1085 | raise error.CanceledError(_(b'absorb cancelled\n')) |
|
1087 | 1086 | |
|
1088 | 1087 | state.apply() |
|
1089 | 1088 | if state.commit(): |
|
1090 | 1089 | state.printchunkstats() |
|
1091 | 1090 | elif not ui.quiet: |
|
1092 | 1091 | ui.write(_(b'nothing applied\n')) |
|
1093 | 1092 | return state |
|
1094 | 1093 | |
|
1095 | 1094 | |
|
1096 | 1095 | @command( |
|
1097 | 1096 | b'absorb', |
|
1098 | 1097 | [ |
|
1099 | 1098 | ( |
|
1100 | 1099 | b'a', |
|
1101 | 1100 | b'apply-changes', |
|
1102 | 1101 | None, |
|
1103 | 1102 | _(b'apply changes without prompting for confirmation'), |
|
1104 | 1103 | ), |
|
1105 | 1104 | ( |
|
1106 | 1105 | b'p', |
|
1107 | 1106 | b'print-changes', |
|
1108 | 1107 | None, |
|
1109 | 1108 | _(b'always print which changesets are modified by which changes'), |
|
1110 | 1109 | ), |
|
1111 | 1110 | ( |
|
1112 | 1111 | b'i', |
|
1113 | 1112 | b'interactive', |
|
1114 | 1113 | None, |
|
1115 | 1114 | _(b'interactively select which chunks to apply'), |
|
1116 | 1115 | ), |
|
1117 | 1116 | ( |
|
1118 | 1117 | b'e', |
|
1119 | 1118 | b'edit-lines', |
|
1120 | 1119 | None, |
|
1121 | 1120 | _( |
|
1122 | 1121 | b'edit what lines belong to which changesets before commit ' |
|
1123 | 1122 | b'(EXPERIMENTAL)' |
|
1124 | 1123 | ), |
|
1125 | 1124 | ), |
|
1126 | 1125 | ] |
|
1127 | 1126 | + commands.dryrunopts |
|
1128 | 1127 | + commands.templateopts |
|
1129 | 1128 | + commands.walkopts, |
|
1130 | 1129 | _(b'hg absorb [OPTION] [FILE]...'), |
|
1131 | 1130 | helpcategory=command.CATEGORY_COMMITTING, |
|
1132 | 1131 | helpbasic=True, |
|
1133 | 1132 | ) |
|
1134 | 1133 | def absorbcmd(ui, repo, *pats, **opts): |
|
1135 | 1134 | """incorporate corrections into the stack of draft changesets |
|
1136 | 1135 | |
|
1137 | 1136 | absorb analyzes each change in your working directory and attempts to |
|
1138 | 1137 | amend the changed lines into the changesets in your stack that first |
|
1139 | 1138 | introduced those lines. |
|
1140 | 1139 | |
|
1141 | 1140 | If absorb cannot find an unambiguous changeset to amend for a change, |
|
1142 | 1141 | that change will be left in the working directory, untouched. They can be |
|
1143 | 1142 | observed by :hg:`status` or :hg:`diff` afterwards. In other words, |
|
1144 | 1143 | absorb does not write to the working directory. |
|
1145 | 1144 | |
|
1146 | 1145 | Changesets outside the revset `::. and not public() and not merge()` will |
|
1147 | 1146 | not be changed. |
|
1148 | 1147 | |
|
1149 | 1148 | Changesets that become empty after applying the changes will be deleted. |
|
1150 | 1149 | |
|
1151 | 1150 | By default, absorb will show what it plans to do and prompt for |
|
1152 | 1151 | confirmation. If you are confident that the changes will be absorbed |
|
1153 | 1152 | to the correct place, run :hg:`absorb -a` to apply the changes |
|
1154 | 1153 | immediately. |
|
1155 | 1154 | |
|
1156 | 1155 | Returns 0 on success, 1 if all chunks were ignored and nothing amended. |
|
1157 | 1156 | """ |
|
1158 | 1157 | opts = pycompat.byteskwargs(opts) |
|
1159 | 1158 | |
|
1160 | 1159 | with repo.wlock(), repo.lock(): |
|
1161 | 1160 | if not opts[b'dry_run']: |
|
1162 | 1161 | cmdutil.checkunfinished(repo) |
|
1163 | 1162 | |
|
1164 | 1163 | state = absorb(ui, repo, pats=pats, opts=opts) |
|
1165 | 1164 | if sum(s[0] for s in state.chunkstats.values()) == 0: |
|
1166 | 1165 | return 1 |
@@ -1,76 +1,74 b'' | |||
|
1 | 1 | # amend.py - provide the amend command |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2017 Facebook, Inc. |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | """provide the amend command (EXPERIMENTAL) |
|
8 | 8 | |
|
9 | 9 | This extension provides an ``amend`` command that is similar to |
|
10 | 10 | ``commit --amend`` but does not prompt an editor. |
|
11 | 11 | """ |
|
12 | 12 | |
|
13 | 13 | from __future__ import absolute_import |
|
14 | 14 | |
|
15 | 15 | from mercurial.i18n import _ |
|
16 | 16 | from mercurial import ( |
|
17 | 17 | cmdutil, |
|
18 | 18 | commands, |
|
19 | pycompat, | |
|
20 | 19 | registrar, |
|
21 | 20 | ) |
|
22 | 21 | |
|
23 | 22 | # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for |
|
24 | 23 | # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should |
|
25 | 24 | # be specifying the version(s) of Mercurial they are tested with, or |
|
26 | 25 | # leave the attribute unspecified. |
|
27 | 26 | testedwith = b'ships-with-hg-core' |
|
28 | 27 | |
|
29 | 28 | cmdtable = {} |
|
30 | 29 | command = registrar.command(cmdtable) |
|
31 | 30 | |
|
32 | 31 | |
|
33 | 32 | @command( |
|
34 | 33 | b'amend', |
|
35 | 34 | [ |
|
36 | 35 | ( |
|
37 | 36 | b'A', |
|
38 | 37 | b'addremove', |
|
39 | 38 | None, |
|
40 | 39 | _(b'mark new/missing files as added/removed before committing'), |
|
41 | 40 | ), |
|
42 | 41 | (b'e', b'edit', None, _(b'invoke editor on commit messages')), |
|
43 | 42 | (b'i', b'interactive', None, _(b'use interactive mode')), |
|
44 | 43 | ( |
|
45 | 44 | b'', |
|
46 | 45 | b'close-branch', |
|
47 | 46 | None, |
|
48 | 47 | _(b'mark a branch as closed, hiding it from the branch list'), |
|
49 | 48 | ), |
|
50 | 49 | (b's', b'secret', None, _(b'use the secret phase for committing')), |
|
51 | 50 | (b'n', b'note', b'', _(b'store a note on the amend')), |
|
52 | 51 | ] |
|
53 | 52 | + cmdutil.walkopts |
|
54 | 53 | + cmdutil.commitopts |
|
55 | 54 | + cmdutil.commitopts2 |
|
56 | 55 | + cmdutil.commitopts3, |
|
57 | 56 | _(b'[OPTION]... [FILE]...'), |
|
58 | 57 | helpcategory=command.CATEGORY_COMMITTING, |
|
59 | 58 | inferrepo=True, |
|
60 | 59 | ) |
|
61 | 60 | def amend(ui, repo, *pats, **opts): |
|
62 | 61 | """amend the working copy parent with all or specified outstanding changes |
|
63 | 62 | |
|
64 | 63 | Similar to :hg:`commit --amend`, but reuse the commit message without |
|
65 | 64 | invoking editor, unless ``--edit`` was set. |
|
66 | 65 | |
|
67 | 66 | See :hg:`help commit` for more details. |
|
68 | 67 | """ |
|
69 | opts = pycompat.byteskwargs(opts) | |
|
70 | cmdutil.checknotesize(ui, opts) | |
|
68 | cmdutil.check_note_size(opts) | |
|
71 | 69 | |
|
72 | 70 | with repo.wlock(), repo.lock(): |
|
73 |
if not opts.get( |
|
|
74 |
opts[ |
|
|
75 |
opts[ |
|
|
76 |
return commands._docommit(ui, repo, *pats, ** |
|
|
71 | if not opts.get('logfile'): | |
|
72 | opts['message'] = opts.get('message') or repo[b'.'].description() | |
|
73 | opts['amend'] = True | |
|
74 | return commands._docommit(ui, repo, *pats, **opts) |
@@ -1,335 +1,339 b'' | |||
|
1 | 1 | # bzr.py - bzr support for the convert extension |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | |
|
8 | # This module is for handling 'bzr', that was formerly known as Bazaar-NG; | |
|
9 | # it cannot access 'bar' repositories, but they were never used very much | |
|
8 | # This module is for handling Breezy imports or `brz`, but it's also compatible | |
|
9 | # with Bazaar or `bzr`, that was formerly known as Bazaar-NG; | |
|
10 | # it cannot access `bar` repositories, but they were never used very much. | |
|
10 | 11 | from __future__ import absolute_import |
|
11 | 12 | |
|
12 | 13 | import os |
|
13 | 14 | |
|
14 | 15 | from mercurial.i18n import _ |
|
15 | 16 | from mercurial import ( |
|
16 | 17 | demandimport, |
|
17 | 18 | error, |
|
18 | 19 | pycompat, |
|
20 | util, | |
|
19 | 21 | ) |
|
20 | 22 | from . import common |
|
21 | 23 | |
|
24 | ||
|
22 | 25 | # these do not work with demandimport, blacklist |
|
23 | 26 | demandimport.IGNORES.update( |
|
24 | 27 | [ |
|
25 |
b'b |
|
|
26 |
b'b |
|
|
28 | b'breezy.transactions', | |
|
29 | b'breezy.urlutils', | |
|
27 | 30 | b'ElementPath', |
|
28 | 31 | ] |
|
29 | 32 | ) |
|
30 | 33 | |
|
31 | 34 | try: |
|
32 | 35 | # bazaar imports |
|
33 |
import b |
|
|
34 |
import b |
|
|
35 |
import b |
|
|
36 |
import b |
|
|
36 | import breezy.bzr.bzrdir | |
|
37 | import breezy.errors | |
|
38 | import breezy.revision | |
|
39 | import breezy.revisionspec | |
|
37 | 40 | |
|
38 |
bzrdir = b |
|
|
39 |
errors = b |
|
|
40 |
revision = b |
|
|
41 |
revisionspec = b |
|
|
41 | bzrdir = breezy.bzr.bzrdir | |
|
42 | errors = breezy.errors | |
|
43 | revision = breezy.revision | |
|
44 | revisionspec = breezy.revisionspec | |
|
42 | 45 | revisionspec.RevisionSpec |
|
43 | 46 | except ImportError: |
|
44 | 47 | pass |
|
45 | 48 | |
|
46 |
supportedkinds = ( |
|
|
49 | supportedkinds = ('file', 'symlink') | |
|
47 | 50 | |
|
48 | 51 | |
|
49 | 52 | class bzr_source(common.converter_source): |
|
50 | 53 | """Reads Bazaar repositories by using the Bazaar Python libraries""" |
|
51 | 54 | |
|
52 | 55 | def __init__(self, ui, repotype, path, revs=None): |
|
53 | 56 | super(bzr_source, self).__init__(ui, repotype, path, revs=revs) |
|
54 | 57 | |
|
55 | 58 | if not os.path.exists(os.path.join(path, b'.bzr')): |
|
56 | 59 | raise common.NoRepo( |
|
57 | 60 | _(b'%s does not look like a Bazaar repository') % path |
|
58 | 61 | ) |
|
59 | 62 | |
|
60 | 63 | try: |
|
61 |
# access b |
|
|
64 | # access breezy stuff | |
|
62 | 65 | bzrdir |
|
63 | 66 | except NameError: |
|
64 | 67 | raise common.NoRepo(_(b'Bazaar modules could not be loaded')) |
|
65 | 68 | |
|
66 |
path = |
|
|
69 | path = util.abspath(path) | |
|
67 | 70 | self._checkrepotype(path) |
|
68 | 71 | try: |
|
69 |
|
|
|
72 | bzr_dir = bzrdir.BzrDir.open(path.decode()) | |
|
73 | self.sourcerepo = bzr_dir.open_repository() | |
|
70 | 74 | except errors.NoRepositoryPresent: |
|
71 | 75 | raise common.NoRepo( |
|
72 | 76 | _(b'%s does not look like a Bazaar repository') % path |
|
73 | 77 | ) |
|
74 | 78 | self._parentids = {} |
|
75 | 79 | self._saverev = ui.configbool(b'convert', b'bzr.saverev') |
|
76 | 80 | |
|
77 | 81 | def _checkrepotype(self, path): |
|
78 | 82 | # Lightweight checkouts detection is informational but probably |
|
79 | 83 | # fragile at API level. It should not terminate the conversion. |
|
80 | 84 | try: |
|
81 | dir = bzrdir.BzrDir.open_containing(path)[0] | |
|
85 | dir = bzrdir.BzrDir.open_containing(path.decode())[0] | |
|
82 | 86 | try: |
|
83 | 87 | tree = dir.open_workingtree(recommend_upgrade=False) |
|
84 | 88 | branch = tree.branch |
|
85 | 89 | except (errors.NoWorkingTree, errors.NotLocalUrl): |
|
86 | 90 | tree = None |
|
87 | 91 | branch = dir.open_branch() |
|
88 | 92 | if ( |
|
89 | 93 | tree is not None |
|
90 |
and tree. |
|
|
91 |
!= branch. |
|
|
94 | and tree.controldir.root_transport.base | |
|
95 | != branch.controldir.root_transport.base | |
|
92 | 96 | ): |
|
93 | 97 | self.ui.warn( |
|
94 | 98 | _( |
|
95 | 99 | b'warning: lightweight checkouts may cause ' |
|
96 | 100 | b'conversion failures, try with a regular ' |
|
97 | 101 | b'branch instead.\n' |
|
98 | 102 | ) |
|
99 | 103 | ) |
|
100 | 104 | except Exception: |
|
101 | 105 | self.ui.note(_(b'bzr source type could not be determined\n')) |
|
102 | 106 | |
|
103 | 107 | def before(self): |
|
104 | 108 | """Before the conversion begins, acquire a read lock |
|
105 | 109 | for all the operations that might need it. Fortunately |
|
106 | 110 | read locks don't block other reads or writes to the |
|
107 | 111 | repository, so this shouldn't have any impact on the usage of |
|
108 | 112 | the source repository. |
|
109 | 113 | |
|
110 | 114 | The alternative would be locking on every operation that |
|
111 | 115 | needs locks (there are currently two: getting the file and |
|
112 | 116 | getting the parent map) and releasing immediately after, |
|
113 | 117 | but this approach can take even 40% longer.""" |
|
114 | 118 | self.sourcerepo.lock_read() |
|
115 | 119 | |
|
116 | 120 | def after(self): |
|
117 | 121 | self.sourcerepo.unlock() |
|
118 | 122 | |
|
119 | 123 | def _bzrbranches(self): |
|
120 | 124 | return self.sourcerepo.find_branches(using=True) |
|
121 | 125 | |
|
122 | 126 | def getheads(self): |
|
123 | 127 | if not self.revs: |
|
124 | 128 | # Set using=True to avoid nested repositories (see issue3254) |
|
125 | 129 | heads = sorted([b.last_revision() for b in self._bzrbranches()]) |
|
126 | 130 | else: |
|
127 | 131 | revid = None |
|
128 | 132 | for branch in self._bzrbranches(): |
|
129 | 133 | try: |
|
130 |
r = |
|
|
134 | revspec = self.revs[0].decode() | |
|
135 | r = revisionspec.RevisionSpec.from_string(revspec) | |
|
131 | 136 | info = r.in_history(branch) |
|
132 | 137 | except errors.BzrError: |
|
133 | 138 | pass |
|
134 | 139 | revid = info.rev_id |
|
135 | 140 | if revid is None: |
|
136 | 141 | raise error.Abort( |
|
137 | 142 | _(b'%s is not a valid revision') % self.revs[0] |
|
138 | 143 | ) |
|
139 | 144 | heads = [revid] |
|
140 | 145 | # Empty repositories return 'null:', which cannot be retrieved |
|
141 | 146 | heads = [h for h in heads if h != b'null:'] |
|
142 | 147 | return heads |
|
143 | 148 | |
|
144 | 149 | def getfile(self, name, rev): |
|
150 | name = name.decode() | |
|
145 | 151 | revtree = self.sourcerepo.revision_tree(rev) |
|
146 | fileid = revtree.path2id(name.decode(self.encoding or b'utf-8')) | |
|
147 | kind = None | |
|
148 | if fileid is not None: | |
|
149 | kind = revtree.kind(fileid) | |
|
152 | ||
|
153 | try: | |
|
154 | kind = revtree.kind(name) | |
|
155 | except breezy.errors.NoSuchFile: | |
|
156 | return None, None | |
|
150 | 157 | if kind not in supportedkinds: |
|
151 | 158 | # the file is not available anymore - was deleted |
|
152 | 159 | return None, None |
|
153 | mode = self._modecache[(name, rev)] | |
|
154 |
if kind == |
|
|
155 |
target = revtree.get_symlink_target( |
|
|
160 | mode = self._modecache[(name.encode(), rev)] | |
|
161 | if kind == 'symlink': | |
|
162 | target = revtree.get_symlink_target(name) | |
|
156 | 163 | if target is None: |
|
157 | 164 | raise error.Abort( |
|
158 | 165 | _(b'%s.%s symlink has no target') % (name, rev) |
|
159 | 166 | ) |
|
160 | return target, mode | |
|
167 | return target.encode(), mode | |
|
161 | 168 | else: |
|
162 |
sio = revtree.get_file( |
|
|
169 | sio = revtree.get_file(name) | |
|
163 | 170 | return sio.read(), mode |
|
164 | 171 | |
|
165 | 172 | def getchanges(self, version, full): |
|
166 | 173 | if full: |
|
167 | 174 | raise error.Abort(_(b"convert from cvs does not support --full")) |
|
168 | 175 | self._modecache = {} |
|
169 | 176 | self._revtree = self.sourcerepo.revision_tree(version) |
|
170 | 177 | # get the parentids from the cache |
|
171 | 178 | parentids = self._parentids.pop(version) |
|
172 | 179 | # only diff against first parent id |
|
173 | 180 | prevtree = self.sourcerepo.revision_tree(parentids[0]) |
|
174 | 181 | files, changes = self._gettreechanges(self._revtree, prevtree) |
|
175 | 182 | return files, changes, set() |
|
176 | 183 | |
|
177 | 184 | def getcommit(self, version): |
|
178 | 185 | rev = self.sourcerepo.get_revision(version) |
|
179 | 186 | # populate parent id cache |
|
180 | 187 | if not rev.parent_ids: |
|
181 | 188 | parents = [] |
|
182 | 189 | self._parentids[version] = (revision.NULL_REVISION,) |
|
183 | 190 | else: |
|
184 | 191 | parents = self._filterghosts(rev.parent_ids) |
|
185 | 192 | self._parentids[version] = parents |
|
186 | 193 | |
|
187 |
branch = |
|
|
188 |
if branch == |
|
|
189 |
branch = |
|
|
194 | branch = rev.properties.get('branch-nick', 'default') | |
|
195 | if branch == 'trunk': | |
|
196 | branch = 'default' | |
|
190 | 197 | return common.commit( |
|
191 | 198 | parents=parents, |
|
192 | 199 | date=b'%d %d' % (rev.timestamp, -rev.timezone), |
|
193 | 200 | author=self.recode(rev.committer), |
|
194 | 201 | desc=self.recode(rev.message), |
|
195 | branch=branch, | |
|
202 | branch=branch.encode('utf8'), | |
|
196 | 203 | rev=version, |
|
197 | 204 | saverev=self._saverev, |
|
198 | 205 | ) |
|
199 | 206 | |
|
200 | 207 | def gettags(self): |
|
201 | 208 | bytetags = {} |
|
202 | 209 | for branch in self._bzrbranches(): |
|
203 | 210 | if not branch.supports_tags(): |
|
204 | 211 | return {} |
|
205 | 212 | tagdict = branch.tags.get_tag_dict() |
|
206 | 213 | for name, rev in pycompat.iteritems(tagdict): |
|
207 | 214 | bytetags[self.recode(name)] = rev |
|
208 | 215 | return bytetags |
|
209 | 216 | |
|
210 | 217 | def getchangedfiles(self, rev, i): |
|
211 | 218 | self._modecache = {} |
|
212 | 219 | curtree = self.sourcerepo.revision_tree(rev) |
|
213 | 220 | if i is not None: |
|
214 | 221 | parentid = self._parentids[rev][i] |
|
215 | 222 | else: |
|
216 | 223 | # no parent id, get the empty revision |
|
217 | 224 | parentid = revision.NULL_REVISION |
|
218 | 225 | |
|
219 | 226 | prevtree = self.sourcerepo.revision_tree(parentid) |
|
220 | 227 | changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]] |
|
221 | 228 | return changes |
|
222 | 229 | |
|
223 | 230 | def _gettreechanges(self, current, origin): |
|
224 | 231 | revid = current._revision_id |
|
225 | 232 | changes = [] |
|
226 | 233 | renames = {} |
|
227 | 234 | seen = set() |
|
228 | 235 | |
|
229 | 236 | # Fall back to the deprecated attribute for legacy installations. |
|
230 | 237 | try: |
|
231 | 238 | inventory = origin.root_inventory |
|
232 | 239 | except AttributeError: |
|
233 | 240 | inventory = origin.inventory |
|
234 | 241 | |
|
235 | 242 | # Process the entries by reverse lexicographic name order to |
|
236 | 243 | # handle nested renames correctly, most specific first. |
|
244 | ||
|
245 | def key(c): | |
|
246 | return c.path[0] or c.path[1] or "" | |
|
247 | ||
|
237 | 248 | curchanges = sorted( |
|
238 | 249 | current.iter_changes(origin), |
|
239 | key=lambda c: c[1][0] or c[1][1], | |
|
250 | key=key, | |
|
240 | 251 | reverse=True, |
|
241 | 252 | ) |
|
242 | for ( | |
|
243 | fileid, | |
|
244 | paths, | |
|
245 |
change |
|
|
246 | versioned, | |
|
247 | parent, | |
|
248 | name, | |
|
249 | kind, | |
|
250 | executable, | |
|
251 | ) in curchanges: | |
|
252 | ||
|
253 | for change in curchanges: | |
|
254 | paths = change.path | |
|
255 | kind = change.kind | |
|
256 | executable = change.executable | |
|
253 | 257 | if paths[0] == u'' or paths[1] == u'': |
|
254 | 258 | # ignore changes to tree root |
|
255 | 259 | continue |
|
256 | 260 | |
|
257 | 261 | # bazaar tracks directories, mercurial does not, so |
|
258 | 262 | # we have to rename the directory contents |
|
259 |
if kind[1] == |
|
|
260 |
if kind[0] not in (None, |
|
|
263 | if kind[1] == 'directory': | |
|
264 | if kind[0] not in (None, 'directory'): | |
|
261 | 265 | # Replacing 'something' with a directory, record it |
|
262 | 266 | # so it can be removed. |
|
263 | 267 | changes.append((self.recode(paths[0]), revid)) |
|
264 | 268 | |
|
265 |
if kind[0] == |
|
|
269 | if kind[0] == 'directory' and None not in paths: | |
|
266 | 270 | renaming = paths[0] != paths[1] |
|
267 | 271 | # neither an add nor an delete - a move |
|
268 | 272 | # rename all directory contents manually |
|
269 | 273 | subdir = inventory.path2id(paths[0]) |
|
270 | 274 | # get all child-entries of the directory |
|
271 | 275 | for name, entry in inventory.iter_entries(subdir): |
|
272 | 276 | # hg does not track directory renames |
|
273 |
if entry.kind == |
|
|
277 | if entry.kind == 'directory': | |
|
274 | 278 | continue |
|
275 |
frompath = self.recode(paths[0] + |
|
|
279 | frompath = self.recode(paths[0] + '/' + name) | |
|
276 | 280 | if frompath in seen: |
|
277 | 281 | # Already handled by a more specific change entry |
|
278 | 282 | # This is important when you have: |
|
279 | 283 | # a => b |
|
280 | 284 | # a/c => a/c |
|
281 | 285 | # Here a/c must not be renamed into b/c |
|
282 | 286 | continue |
|
283 | 287 | seen.add(frompath) |
|
284 | 288 | if not renaming: |
|
285 | 289 | continue |
|
286 |
topath = self.recode(paths[1] + |
|
|
290 | topath = self.recode(paths[1] + '/' + name) | |
|
287 | 291 | # register the files as changed |
|
288 | 292 | changes.append((frompath, revid)) |
|
289 | 293 | changes.append((topath, revid)) |
|
290 | 294 | # add to mode cache |
|
291 | 295 | mode = ( |
|
292 | 296 | (entry.executable and b'x') |
|
293 |
or (entry.kind == |
|
|
297 | or (entry.kind == 'symlink' and b's') | |
|
294 | 298 | or b'' |
|
295 | 299 | ) |
|
296 | 300 | self._modecache[(topath, revid)] = mode |
|
297 | 301 | # register the change as move |
|
298 | 302 | renames[topath] = frompath |
|
299 | 303 | |
|
300 | 304 | # no further changes, go to the next change |
|
301 | 305 | continue |
|
302 | 306 | |
|
303 | 307 | # we got unicode paths, need to convert them |
|
304 | 308 | path, topath = paths |
|
305 | 309 | if path is not None: |
|
306 | 310 | path = self.recode(path) |
|
307 | 311 | if topath is not None: |
|
308 | 312 | topath = self.recode(topath) |
|
309 | 313 | seen.add(path or topath) |
|
310 | 314 | |
|
311 | 315 | if topath is None: |
|
312 | 316 | # file deleted |
|
313 | 317 | changes.append((path, revid)) |
|
314 | 318 | continue |
|
315 | 319 | |
|
316 | 320 | # renamed |
|
317 | 321 | if path and path != topath: |
|
318 | 322 | renames[topath] = path |
|
319 | 323 | changes.append((path, revid)) |
|
320 | 324 | |
|
321 | 325 | # populate the mode cache |
|
322 | 326 | kind, executable = [e[1] for e in (kind, executable)] |
|
323 |
mode = (executable and b'x') or (kind == |
|
|
327 | mode = (executable and b'x') or (kind == 'symlink' and b'l') or b'' | |
|
324 | 328 | self._modecache[(topath, revid)] = mode |
|
325 | 329 | changes.append((topath, revid)) |
|
326 | 330 | |
|
327 | 331 | return changes, renames |
|
328 | 332 | |
|
329 | 333 | def _filterghosts(self, ids): |
|
330 | 334 | """Filters out ghost revisions which hg does not support, see |
|
331 | 335 | <http://bazaar-vcs.org/GhostRevision> |
|
332 | 336 | """ |
|
333 | 337 | parentmap = self.sourcerepo.get_parent_map(ids) |
|
334 | 338 | parents = tuple([parent for parent in ids if parent in parentmap]) |
|
335 | 339 | return parents |
@@ -1,531 +1,532 b'' | |||
|
1 | 1 | # git.py - git support for the convert extension |
|
2 | 2 | # |
|
3 | 3 | # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others |
|
4 | 4 | # |
|
5 | 5 | # This software may be used and distributed according to the terms of the |
|
6 | 6 | # GNU General Public License version 2 or any later version. |
|
7 | 7 | from __future__ import absolute_import |
|
8 | 8 | |
|
9 | 9 | import os |
|
10 | 10 | |
|
11 | 11 | from mercurial.i18n import _ |
|
12 |
from mercurial.node import |
|
|
12 | from mercurial.node import sha1nodeconstants | |
|
13 | 13 | from mercurial import ( |
|
14 | 14 | config, |
|
15 | 15 | error, |
|
16 | 16 | pycompat, |
|
17 | util, | |
|
17 | 18 | ) |
|
18 | 19 | |
|
19 | 20 | from . import common |
|
20 | 21 | |
|
21 | 22 | |
|
22 | 23 | class submodule(object): |
|
23 | 24 | def __init__(self, path, node, url): |
|
24 | 25 | self.path = path |
|
25 | 26 | self.node = node |
|
26 | 27 | self.url = url |
|
27 | 28 | |
|
28 | 29 | def hgsub(self): |
|
29 | 30 | return b"%s = [git]%s" % (self.path, self.url) |
|
30 | 31 | |
|
31 | 32 | def hgsubstate(self): |
|
32 | 33 | return b"%s %s" % (self.node, self.path) |
|
33 | 34 | |
|
34 | 35 | |
|
35 | 36 | # Keys in extra fields that should not be copied if the user requests. |
|
36 | 37 | bannedextrakeys = { |
|
37 | 38 | # Git commit object built-ins. |
|
38 | 39 | b'tree', |
|
39 | 40 | b'parent', |
|
40 | 41 | b'author', |
|
41 | 42 | b'committer', |
|
42 | 43 | # Mercurial built-ins. |
|
43 | 44 | b'branch', |
|
44 | 45 | b'close', |
|
45 | 46 | } |
|
46 | 47 | |
|
47 | 48 | |
|
48 | 49 | class convert_git(common.converter_source, common.commandline): |
|
49 | 50 | # Windows does not support GIT_DIR= construct while other systems |
|
50 | 51 | # cannot remove environment variable. Just assume none have |
|
51 | 52 | # both issues. |
|
52 | 53 | |
|
53 | 54 | def _gitcmd(self, cmd, *args, **kwargs): |
|
54 | 55 | return cmd(b'--git-dir=%s' % self.path, *args, **kwargs) |
|
55 | 56 | |
|
56 | 57 | def gitrun0(self, *args, **kwargs): |
|
57 | 58 | return self._gitcmd(self.run0, *args, **kwargs) |
|
58 | 59 | |
|
59 | 60 | def gitrun(self, *args, **kwargs): |
|
60 | 61 | return self._gitcmd(self.run, *args, **kwargs) |
|
61 | 62 | |
|
62 | 63 | def gitrunlines0(self, *args, **kwargs): |
|
63 | 64 | return self._gitcmd(self.runlines0, *args, **kwargs) |
|
64 | 65 | |
|
65 | 66 | def gitrunlines(self, *args, **kwargs): |
|
66 | 67 | return self._gitcmd(self.runlines, *args, **kwargs) |
|
67 | 68 | |
|
68 | 69 | def gitpipe(self, *args, **kwargs): |
|
69 | 70 | return self._gitcmd(self._run3, *args, **kwargs) |
|
70 | 71 | |
|
71 | 72 | def __init__(self, ui, repotype, path, revs=None): |
|
72 | 73 | super(convert_git, self).__init__(ui, repotype, path, revs=revs) |
|
73 | 74 | common.commandline.__init__(self, ui, b'git') |
|
74 | 75 | |
|
75 | 76 | # Pass an absolute path to git to prevent from ever being interpreted |
|
76 | 77 | # as a URL |
|
77 |
path = |
|
|
78 | path = util.abspath(path) | |
|
78 | 79 | |
|
79 | 80 | if os.path.isdir(path + b"/.git"): |
|
80 | 81 | path += b"/.git" |
|
81 | 82 | if not os.path.exists(path + b"/objects"): |
|
82 | 83 | raise common.NoRepo( |
|
83 | 84 | _(b"%s does not look like a Git repository") % path |
|
84 | 85 | ) |
|
85 | 86 | |
|
86 | 87 | # The default value (50) is based on the default for 'git diff'. |
|
87 | 88 | similarity = ui.configint(b'convert', b'git.similarity') |
|
88 | 89 | if similarity < 0 or similarity > 100: |
|
89 | 90 | raise error.Abort(_(b'similarity must be between 0 and 100')) |
|
90 | 91 | if similarity > 0: |
|
91 | 92 | self.simopt = [b'-C%d%%' % similarity] |
|
92 | 93 | findcopiesharder = ui.configbool( |
|
93 | 94 | b'convert', b'git.findcopiesharder' |
|
94 | 95 | ) |
|
95 | 96 | if findcopiesharder: |
|
96 | 97 | self.simopt.append(b'--find-copies-harder') |
|
97 | 98 | |
|
98 | 99 | renamelimit = ui.configint(b'convert', b'git.renamelimit') |
|
99 | 100 | self.simopt.append(b'-l%d' % renamelimit) |
|
100 | 101 | else: |
|
101 | 102 | self.simopt = [] |
|
102 | 103 | |
|
103 | 104 | common.checktool(b'git', b'git') |
|
104 | 105 | |
|
105 | 106 | self.path = path |
|
106 | 107 | self.submodules = [] |
|
107 | 108 | |
|
108 | 109 | self.catfilepipe = self.gitpipe(b'cat-file', b'--batch') |
|
109 | 110 | |
|
110 | 111 | self.copyextrakeys = self.ui.configlist(b'convert', b'git.extrakeys') |
|
111 | 112 | banned = set(self.copyextrakeys) & bannedextrakeys |
|
112 | 113 | if banned: |
|
113 | 114 | raise error.Abort( |
|
114 | 115 | _(b'copying of extra key is forbidden: %s') |
|
115 | 116 | % _(b', ').join(sorted(banned)) |
|
116 | 117 | ) |
|
117 | 118 | |
|
118 | 119 | committeractions = self.ui.configlist( |
|
119 | 120 | b'convert', b'git.committeractions' |
|
120 | 121 | ) |
|
121 | 122 | |
|
122 | 123 | messagedifferent = None |
|
123 | 124 | messagealways = None |
|
124 | 125 | for a in committeractions: |
|
125 | 126 | if a.startswith((b'messagedifferent', b'messagealways')): |
|
126 | 127 | k = a |
|
127 | 128 | v = None |
|
128 | 129 | if b'=' in a: |
|
129 | 130 | k, v = a.split(b'=', 1) |
|
130 | 131 | |
|
131 | 132 | if k == b'messagedifferent': |
|
132 | 133 | messagedifferent = v or b'committer:' |
|
133 | 134 | elif k == b'messagealways': |
|
134 | 135 | messagealways = v or b'committer:' |
|
135 | 136 | |
|
136 | 137 | if messagedifferent and messagealways: |
|
137 | 138 | raise error.Abort( |
|
138 | 139 | _( |
|
139 | 140 | b'committeractions cannot define both ' |
|
140 | 141 | b'messagedifferent and messagealways' |
|
141 | 142 | ) |
|
142 | 143 | ) |
|
143 | 144 | |
|
144 | 145 | dropcommitter = b'dropcommitter' in committeractions |
|
145 | 146 | replaceauthor = b'replaceauthor' in committeractions |
|
146 | 147 | |
|
147 | 148 | if dropcommitter and replaceauthor: |
|
148 | 149 | raise error.Abort( |
|
149 | 150 | _( |
|
150 | 151 | b'committeractions cannot define both ' |
|
151 | 152 | b'dropcommitter and replaceauthor' |
|
152 | 153 | ) |
|
153 | 154 | ) |
|
154 | 155 | |
|
155 | 156 | if dropcommitter and messagealways: |
|
156 | 157 | raise error.Abort( |
|
157 | 158 | _( |
|
158 | 159 | b'committeractions cannot define both ' |
|
159 | 160 | b'dropcommitter and messagealways' |
|
160 | 161 | ) |
|
161 | 162 | ) |
|
162 | 163 | |
|
163 | 164 | if not messagedifferent and not messagealways: |
|
164 | 165 | messagedifferent = b'committer:' |
|
165 | 166 | |
|
166 | 167 | self.committeractions = { |
|
167 | 168 | b'dropcommitter': dropcommitter, |
|
168 | 169 | b'replaceauthor': replaceauthor, |
|
169 | 170 | b'messagedifferent': messagedifferent, |
|
170 | 171 | b'messagealways': messagealways, |
|
171 | 172 | } |
|
172 | 173 | |
|
173 | 174 | def after(self): |
|
174 | 175 | for f in self.catfilepipe: |
|
175 | 176 | f.close() |
|
176 | 177 | |
|
177 | 178 | def getheads(self): |
|
178 | 179 | if not self.revs: |
|
179 | 180 | output, status = self.gitrun( |
|
180 | 181 | b'rev-parse', b'--branches', b'--remotes' |
|
181 | 182 | ) |
|
182 | 183 | heads = output.splitlines() |
|
183 | 184 | if status: |
|
184 | 185 | raise error.Abort(_(b'cannot retrieve git heads')) |
|
185 | 186 | else: |
|
186 | 187 | heads = [] |
|
187 | 188 | for rev in self.revs: |
|
188 | 189 | rawhead, ret = self.gitrun(b'rev-parse', b'--verify', rev) |
|
189 | 190 | heads.append(rawhead[:-1]) |
|
190 | 191 | if ret: |
|
191 | 192 | raise error.Abort(_(b'cannot retrieve git head "%s"') % rev) |
|
192 | 193 | return heads |
|
193 | 194 | |
|
194 | 195 | def catfile(self, rev, ftype): |
|
195 | if rev == nullhex: | |
|
196 | if rev == sha1nodeconstants.nullhex: | |
|
196 | 197 | raise IOError |
|
197 | 198 | self.catfilepipe[0].write(rev + b'\n') |
|
198 | 199 | self.catfilepipe[0].flush() |
|
199 | 200 | info = self.catfilepipe[1].readline().split() |
|
200 | 201 | if info[1] != ftype: |
|
201 | 202 | raise error.Abort( |
|
202 | 203 | _(b'cannot read %r object at %s') |
|
203 | 204 | % (pycompat.bytestr(ftype), rev) |
|
204 | 205 | ) |
|
205 | 206 | size = int(info[2]) |
|
206 | 207 | data = self.catfilepipe[1].read(size) |
|
207 | 208 | if len(data) < size: |
|
208 | 209 | raise error.Abort( |
|
209 | 210 | _(b'cannot read %r object at %s: unexpected size') |
|
210 | 211 | % (ftype, rev) |
|
211 | 212 | ) |
|
212 | 213 | # read the trailing newline |
|
213 | 214 | self.catfilepipe[1].read(1) |
|
214 | 215 | return data |
|
215 | 216 | |
|
216 | 217 | def getfile(self, name, rev): |
|
217 | if rev == nullhex: | |
|
218 | if rev == sha1nodeconstants.nullhex: | |
|
218 | 219 | return None, None |
|
219 | 220 | if name == b'.hgsub': |
|
220 | 221 | data = b'\n'.join([m.hgsub() for m in self.submoditer()]) |
|
221 | 222 | mode = b'' |
|
222 | 223 | elif name == b'.hgsubstate': |
|
223 | 224 | data = b'\n'.join([m.hgsubstate() for m in self.submoditer()]) |
|
224 | 225 | mode = b'' |
|
225 | 226 | else: |
|
226 | 227 | data = self.catfile(rev, b"blob") |
|
227 | 228 | mode = self.modecache[(name, rev)] |
|
228 | 229 | return data, mode |
|
229 | 230 | |
|
230 | 231 | def submoditer(self): |
|
231 | null = nullhex | |
|
232 | null = sha1nodeconstants.nullhex | |
|
232 | 233 | for m in sorted(self.submodules, key=lambda p: p.path): |
|
233 | 234 | if m.node != null: |
|
234 | 235 | yield m |
|
235 | 236 | |
|
236 | 237 | def parsegitmodules(self, content): |
|
237 | 238 | """Parse the formatted .gitmodules file, example file format: |
|
238 | 239 | [submodule "sub"]\n |
|
239 | 240 | \tpath = sub\n |
|
240 | 241 | \turl = git://giturl\n |
|
241 | 242 | """ |
|
242 | 243 | self.submodules = [] |
|
243 | 244 | c = config.config() |
|
244 | 245 | # Each item in .gitmodules starts with whitespace that cant be parsed |
|
245 | 246 | c.parse( |
|
246 | 247 | b'.gitmodules', |
|
247 | 248 | b'\n'.join(line.strip() for line in content.split(b'\n')), |
|
248 | 249 | ) |
|
249 | 250 | for sec in c.sections(): |
|
250 | 251 | # turn the config object into a real dict |
|
251 | 252 | s = dict(c.items(sec)) |
|
252 | 253 | if b'url' in s and b'path' in s: |
|
253 | 254 | self.submodules.append(submodule(s[b'path'], b'', s[b'url'])) |
|
254 | 255 | |
|
255 | 256 | def retrievegitmodules(self, version): |
|
256 | 257 | modules, ret = self.gitrun( |
|
257 | 258 | b'show', b'%s:%s' % (version, b'.gitmodules') |
|
258 | 259 | ) |
|
259 | 260 | if ret: |
|
260 | 261 | # This can happen if a file is in the repo that has permissions |
|
261 | 262 | # 160000, but there is no .gitmodules file. |
|
262 | 263 | self.ui.warn( |
|
263 | 264 | _(b"warning: cannot read submodules config file in %s\n") |
|
264 | 265 | % version |
|
265 | 266 | ) |
|
266 | 267 | return |
|
267 | 268 | |
|
268 | 269 | try: |
|
269 | 270 | self.parsegitmodules(modules) |
|
270 | 271 | except error.ParseError: |
|
271 | 272 | self.ui.warn( |
|
272 | 273 | _(b"warning: unable to parse .gitmodules in %s\n") % version |
|
273 | 274 | ) |
|
274 | 275 | return |
|
275 | 276 | |
|
276 | 277 | for m in self.submodules: |
|
277 | 278 | node, ret = self.gitrun(b'rev-parse', b'%s:%s' % (version, m.path)) |
|
278 | 279 | if ret: |
|
279 | 280 | continue |
|
280 | 281 | m.node = node.strip() |
|
281 | 282 | |
|
282 | 283 | def getchanges(self, version, full): |
|
283 | 284 | if full: |
|
284 | 285 | raise error.Abort(_(b"convert from git does not support --full")) |
|
285 | 286 | self.modecache = {} |
|
286 | 287 | cmd = ( |
|
287 | 288 | [b'diff-tree', b'-z', b'--root', b'-m', b'-r'] |
|
288 | 289 | + self.simopt |
|
289 | 290 | + [version] |
|
290 | 291 | ) |
|
291 | 292 | output, status = self.gitrun(*cmd) |
|
292 | 293 | if status: |
|
293 | 294 | raise error.Abort(_(b'cannot read changes in %s') % version) |
|
294 | 295 | changes = [] |
|
295 | 296 | copies = {} |
|
296 | 297 | seen = set() |
|
297 | 298 | entry = None |
|
298 | 299 | subexists = [False] |
|
299 | 300 | subdeleted = [False] |
|
300 | 301 | difftree = output.split(b'\x00') |
|
301 | 302 | lcount = len(difftree) |
|
302 | 303 | i = 0 |
|
303 | 304 | |
|
304 | 305 | skipsubmodules = self.ui.configbool(b'convert', b'git.skipsubmodules') |
|
305 | 306 | |
|
306 | 307 | def add(entry, f, isdest): |
|
307 | 308 | seen.add(f) |
|
308 | 309 | h = entry[3] |
|
309 | 310 | p = entry[1] == b"100755" |
|
310 | 311 | s = entry[1] == b"120000" |
|
311 | 312 | renamesource = not isdest and entry[4][0] == b'R' |
|
312 | 313 | |
|
313 | 314 | if f == b'.gitmodules': |
|
314 | 315 | if skipsubmodules: |
|
315 | 316 | return |
|
316 | 317 | |
|
317 | 318 | subexists[0] = True |
|
318 | 319 | if entry[4] == b'D' or renamesource: |
|
319 | 320 | subdeleted[0] = True |
|
320 | changes.append((b'.hgsub', nullhex)) | |
|
321 | changes.append((b'.hgsub', sha1nodeconstants.nullhex)) | |
|
321 | 322 | else: |
|
322 | 323 | changes.append((b'.hgsub', b'')) |
|
323 | 324 | elif entry[1] == b'160000' or entry[0] == b':160000': |
|
324 | 325 | if not skipsubmodules: |
|
325 | 326 | subexists[0] = True |
|
326 | 327 | else: |
|
327 | 328 | if renamesource: |
|
328 | h = nullhex | |
|
329 | h = sha1nodeconstants.nullhex | |
|
329 | 330 | self.modecache[(f, h)] = (p and b"x") or (s and b"l") or b"" |
|
330 | 331 | changes.append((f, h)) |
|
331 | 332 | |
|
332 | 333 | while i < lcount: |
|
333 | 334 | l = difftree[i] |
|
334 | 335 | i += 1 |
|
335 | 336 | if not entry: |
|
336 | 337 | if not l.startswith(b':'): |
|
337 | 338 | continue |
|
338 | 339 | entry = tuple(pycompat.bytestr(p) for p in l.split()) |
|
339 | 340 | continue |
|
340 | 341 | f = l |
|
341 | 342 | if entry[4][0] == b'C': |
|
342 | 343 | copysrc = f |
|
343 | 344 | copydest = difftree[i] |
|
344 | 345 | i += 1 |
|
345 | 346 | f = copydest |
|
346 | 347 | copies[copydest] = copysrc |
|
347 | 348 | if f not in seen: |
|
348 | 349 | add(entry, f, False) |
|
349 | 350 | # A file can be copied multiple times, or modified and copied |
|
350 | 351 | # simultaneously. So f can be repeated even if fdest isn't. |
|
351 | 352 | if entry[4][0] == b'R': |
|
352 | 353 | # rename: next line is the destination |
|
353 | 354 | fdest = difftree[i] |
|
354 | 355 | i += 1 |
|
355 | 356 | if fdest not in seen: |
|
356 | 357 | add(entry, fdest, True) |
|
357 | 358 | # .gitmodules isn't imported at all, so it being copied to |
|
358 | 359 | # and fro doesn't really make sense |
|
359 | 360 | if f != b'.gitmodules' and fdest != b'.gitmodules': |
|
360 | 361 | copies[fdest] = f |
|
361 | 362 | entry = None |
|
362 | 363 | |
|
363 | 364 | if subexists[0]: |
|
364 | 365 | if subdeleted[0]: |
|
365 | changes.append((b'.hgsubstate', nullhex)) | |
|
366 | changes.append((b'.hgsubstate', sha1nodeconstants.nullhex)) | |
|
366 | 367 | else: |
|
367 | 368 | self.retrievegitmodules(version) |
|
368 | 369 | changes.append((b'.hgsubstate', b'')) |
|
369 | 370 | return (changes, copies, set()) |
|
370 | 371 | |
|
371 | 372 | def getcommit(self, version): |
|
372 | 373 | c = self.catfile(version, b"commit") # read the commit hash |
|
373 | 374 | end = c.find(b"\n\n") |
|
374 | 375 | message = c[end + 2 :] |
|
375 | 376 | message = self.recode(message) |
|
376 | 377 | l = c[:end].splitlines() |
|
377 | 378 | parents = [] |
|
378 | 379 | author = committer = None |
|
379 | 380 | extra = {} |
|
380 | 381 | for e in l[1:]: |
|
381 | 382 | n, v = e.split(b" ", 1) |
|
382 | 383 | if n == b"author": |
|
383 | 384 | p = v.split() |
|
384 | 385 | tm, tz = p[-2:] |
|
385 | 386 | author = b" ".join(p[:-2]) |
|
386 | 387 | if author[0] == b"<": |
|
387 | 388 | author = author[1:-1] |
|
388 | 389 | author = self.recode(author) |
|
389 | 390 | if n == b"committer": |
|
390 | 391 | p = v.split() |
|
391 | 392 | tm, tz = p[-2:] |
|
392 | 393 | committer = b" ".join(p[:-2]) |
|
393 | 394 | if committer[0] == b"<": |
|
394 | 395 | committer = committer[1:-1] |
|
395 | 396 | committer = self.recode(committer) |
|
396 | 397 | if n == b"parent": |
|
397 | 398 | parents.append(v) |
|
398 | 399 | if n in self.copyextrakeys: |
|
399 | 400 | extra[n] = v |
|
400 | 401 | |
|
401 | 402 | if self.committeractions[b'dropcommitter']: |
|
402 | 403 | committer = None |
|
403 | 404 | elif self.committeractions[b'replaceauthor']: |
|
404 | 405 | author = committer |
|
405 | 406 | |
|
406 | 407 | if committer: |
|
407 | 408 | messagealways = self.committeractions[b'messagealways'] |
|
408 | 409 | messagedifferent = self.committeractions[b'messagedifferent'] |
|
409 | 410 | if messagealways: |
|
410 | 411 | message += b'\n%s %s\n' % (messagealways, committer) |
|
411 | 412 | elif messagedifferent and author != committer: |
|
412 | 413 | message += b'\n%s %s\n' % (messagedifferent, committer) |
|
413 | 414 | |
|
414 | 415 | tzs, tzh, tzm = tz[-5:-4] + b"1", tz[-4:-2], tz[-2:] |
|
415 | 416 | tz = -int(tzs) * (int(tzh) * 3600 + int(tzm)) |
|
416 | 417 | date = tm + b" " + (b"%d" % tz) |
|
417 | 418 | saverev = self.ui.configbool(b'convert', b'git.saverev') |
|
418 | 419 | |
|
419 | 420 | c = common.commit( |
|
420 | 421 | parents=parents, |
|
421 | 422 | date=date, |
|
422 | 423 | author=author, |
|
423 | 424 | desc=message, |
|
424 | 425 | rev=version, |
|
425 | 426 | extra=extra, |
|
426 | 427 | saverev=saverev, |
|
427 | 428 | ) |
|
428 | 429 | return c |
|
429 | 430 | |
|
430 | 431 | def numcommits(self): |
|
431 | 432 | output, ret = self.gitrunlines(b'rev-list', b'--all') |
|
432 | 433 | if ret: |
|
433 | 434 | raise error.Abort( |
|
434 | 435 | _(b'cannot retrieve number of commits in %s') % self.path |
|
435 | 436 | ) |
|
436 | 437 | return len(output) |
|
437 | 438 | |
|
438 | 439 | def gettags(self): |
|
439 | 440 | tags = {} |
|
440 | 441 | alltags = {} |
|
441 | 442 | output, status = self.gitrunlines(b'ls-remote', b'--tags', self.path) |
|
442 | 443 | |
|
443 | 444 | if status: |
|
444 | 445 | raise error.Abort(_(b'cannot read tags from %s') % self.path) |
|
445 | 446 | prefix = b'refs/tags/' |
|
446 | 447 | |
|
447 | 448 | # Build complete list of tags, both annotated and bare ones |
|
448 | 449 | for line in output: |
|
449 | 450 | line = line.strip() |
|
450 | 451 | if line.startswith(b"error:") or line.startswith(b"fatal:"): |
|
451 | 452 | raise error.Abort(_(b'cannot read tags from %s') % self.path) |
|
452 | 453 | node, tag = line.split(None, 1) |
|
453 | 454 | if not tag.startswith(prefix): |
|
454 | 455 | continue |
|
455 | 456 | alltags[tag[len(prefix) :]] = node |
|
456 | 457 | |
|
457 | 458 | # Filter out tag objects for annotated tag refs |
|
458 | 459 | for tag in alltags: |
|
459 | 460 | if tag.endswith(b'^{}'): |
|
460 | 461 | tags[tag[:-3]] = alltags[tag] |
|
461 | 462 | else: |
|
462 | 463 | if tag + b'^{}' in alltags: |
|
463 | 464 | continue |
|
464 | 465 | else: |
|
465 | 466 | tags[tag] = alltags[tag] |
|
466 | 467 | |
|
467 | 468 | return tags |
|
468 | 469 | |
|
469 | 470 | def getchangedfiles(self, version, i): |
|
470 | 471 | changes = [] |
|
471 | 472 | if i is None: |
|
472 | 473 | output, status = self.gitrunlines( |
|
473 | 474 | b'diff-tree', b'--root', b'-m', b'-r', version |
|
474 | 475 | ) |
|
475 | 476 | if status: |
|
476 | 477 | raise error.Abort(_(b'cannot read changes in %s') % version) |
|
477 | 478 | for l in output: |
|
478 | 479 | if b"\t" not in l: |
|
479 | 480 | continue |
|
480 | 481 | m, f = l[:-1].split(b"\t") |
|
481 | 482 | changes.append(f) |
|
482 | 483 | else: |
|
483 | 484 | output, status = self.gitrunlines( |
|
484 | 485 | b'diff-tree', |
|
485 | 486 | b'--name-only', |
|
486 | 487 | b'--root', |
|
487 | 488 | b'-r', |
|
488 | 489 | version, |
|
489 | 490 | b'%s^%d' % (version, i + 1), |
|
490 | 491 | b'--', |
|
491 | 492 | ) |
|
492 | 493 | if status: |
|
493 | 494 | raise error.Abort(_(b'cannot read changes in %s') % version) |
|
494 | 495 | changes = [f.rstrip(b'\n') for f in output] |
|
495 | 496 | |
|
496 | 497 | return changes |
|
497 | 498 | |
|
498 | 499 | def getbookmarks(self): |
|
499 | 500 | bookmarks = {} |
|
500 | 501 | |
|
501 | 502 | # Handle local and remote branches |
|
502 | 503 | remoteprefix = self.ui.config(b'convert', b'git.remoteprefix') |
|
503 | 504 | reftypes = [ |
|
504 | 505 | # (git prefix, hg prefix) |
|
505 | 506 | (b'refs/remotes/origin/', remoteprefix + b'/'), |
|
506 | 507 | (b'refs/heads/', b''), |
|
507 | 508 | ] |
|
508 | 509 | |
|
509 | 510 | exclude = { |
|
510 | 511 | b'refs/remotes/origin/HEAD', |
|
511 | 512 | } |
|
512 | 513 | |
|
513 | 514 | try: |
|
514 | 515 | output, status = self.gitrunlines(b'show-ref') |
|
515 | 516 | for line in output: |
|
516 | 517 | line = line.strip() |
|
517 | 518 | rev, name = line.split(None, 1) |
|
518 | 519 | # Process each type of branch |
|
519 | 520 | for gitprefix, hgprefix in reftypes: |
|
520 | 521 | if not name.startswith(gitprefix) or name in exclude: |
|
521 | 522 | continue |
|
522 | 523 | name = b'%s%s' % (hgprefix, name[len(gitprefix) :]) |
|
523 | 524 | bookmarks[name] = rev |
|
524 | 525 | except Exception: |
|
525 | 526 | pass |
|
526 | 527 | |
|
527 | 528 | return bookmarks |
|
528 | 529 | |
|
529 | 530 | def checkrevformat(self, revstr, mapname=b'splicemap'): |
|
530 | 531 | """git revision string is a 40 byte hex""" |
|
531 | 532 | self.checkhexformat(revstr, mapname) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: file copied from mercurial/dirstate.py to mercurial/dirstatemap.py | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: file was removed | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: file was removed | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now