##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r66:64866b2d merge stable
parent child Browse files
Show More
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.3.1
2 current_version = 4.4.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.3.1
12 state = in_progress
13 version = 4.4.0
16 14
@@ -1,471 +1,471 b''
1 1 {
2 2 Beaker = super.buildPythonPackage {
3 3 name = "Beaker-1.7.0";
4 4 buildInputs = with self; [];
5 5 doCheck = false;
6 6 propagatedBuildInputs = with self; [];
7 7 src = fetchurl {
8 8 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
9 9 md5 = "386be3f7fe427358881eee4622b428b3";
10 10 };
11 11 meta = {
12 12 license = [ pkgs.lib.licenses.bsdOriginal ];
13 13 };
14 14 };
15 15 Jinja2 = super.buildPythonPackage {
16 16 name = "Jinja2-2.8";
17 17 buildInputs = with self; [];
18 18 doCheck = false;
19 19 propagatedBuildInputs = with self; [MarkupSafe];
20 20 src = fetchurl {
21 21 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
22 22 md5 = "edb51693fe22c53cee5403775c71a99e";
23 23 };
24 24 meta = {
25 25 license = [ pkgs.lib.licenses.bsdOriginal ];
26 26 };
27 27 };
28 28 Mako = super.buildPythonPackage {
29 29 name = "Mako-1.0.4";
30 30 buildInputs = with self; [];
31 31 doCheck = false;
32 32 propagatedBuildInputs = with self; [MarkupSafe];
33 33 src = fetchurl {
34 34 url = "https://pypi.python.org/packages/7a/ae/925434246ee90b42e8ef57d3b30a0ab7caf9a2de3e449b876c56dcb48155/Mako-1.0.4.tar.gz";
35 35 md5 = "c5fc31a323dd4990683d2f2da02d4e20";
36 36 };
37 37 meta = {
38 38 license = [ pkgs.lib.licenses.mit ];
39 39 };
40 40 };
41 41 MarkupSafe = super.buildPythonPackage {
42 42 name = "MarkupSafe-0.23";
43 43 buildInputs = with self; [];
44 44 doCheck = false;
45 45 propagatedBuildInputs = with self; [];
46 46 src = fetchurl {
47 47 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
48 48 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
49 49 };
50 50 meta = {
51 51 license = [ pkgs.lib.licenses.bsdOriginal ];
52 52 };
53 53 };
54 54 PasteDeploy = super.buildPythonPackage {
55 55 name = "PasteDeploy-1.5.2";
56 56 buildInputs = with self; [];
57 57 doCheck = false;
58 58 propagatedBuildInputs = with self; [];
59 59 src = fetchurl {
60 60 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
61 61 md5 = "352b7205c78c8de4987578d19431af3b";
62 62 };
63 63 meta = {
64 64 license = [ pkgs.lib.licenses.mit ];
65 65 };
66 66 };
67 67 Pyro4 = super.buildPythonPackage {
68 68 name = "Pyro4-4.41";
69 69 buildInputs = with self; [];
70 70 doCheck = false;
71 71 propagatedBuildInputs = with self; [serpent];
72 72 src = fetchurl {
73 73 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
74 74 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
75 75 };
76 76 meta = {
77 77 license = [ pkgs.lib.licenses.mit ];
78 78 };
79 79 };
80 80 WebOb = super.buildPythonPackage {
81 81 name = "WebOb-1.3.1";
82 82 buildInputs = with self; [];
83 83 doCheck = false;
84 84 propagatedBuildInputs = with self; [];
85 85 src = fetchurl {
86 86 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
87 87 md5 = "20918251c5726956ba8fef22d1556177";
88 88 };
89 89 meta = {
90 90 license = [ pkgs.lib.licenses.mit ];
91 91 };
92 92 };
93 93 WebTest = super.buildPythonPackage {
94 94 name = "WebTest-1.4.3";
95 95 buildInputs = with self; [];
96 96 doCheck = false;
97 97 propagatedBuildInputs = with self; [WebOb];
98 98 src = fetchurl {
99 99 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
100 100 md5 = "631ce728bed92c681a4020a36adbc353";
101 101 };
102 102 meta = {
103 103 license = [ pkgs.lib.licenses.mit ];
104 104 };
105 105 };
106 106 configobj = super.buildPythonPackage {
107 107 name = "configobj-5.0.6";
108 108 buildInputs = with self; [];
109 109 doCheck = false;
110 110 propagatedBuildInputs = with self; [six];
111 111 src = fetchurl {
112 112 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
113 113 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
114 114 };
115 115 meta = {
116 116 license = [ pkgs.lib.licenses.bsdOriginal ];
117 117 };
118 118 };
119 119 dulwich = super.buildPythonPackage {
120 120 name = "dulwich-0.13.0";
121 121 buildInputs = with self; [];
122 122 doCheck = false;
123 123 propagatedBuildInputs = with self; [];
124 124 src = fetchurl {
125 125 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
126 126 md5 = "6dede0626657c2bd08f48ca1221eea91";
127 127 };
128 128 meta = {
129 129 license = [ pkgs.lib.licenses.gpl2Plus ];
130 130 };
131 131 };
132 132 greenlet = super.buildPythonPackage {
133 133 name = "greenlet-0.4.7";
134 134 buildInputs = with self; [];
135 135 doCheck = false;
136 136 propagatedBuildInputs = with self; [];
137 137 src = fetchurl {
138 138 url = "https://pypi.python.org/packages/7a/9f/a1a0d9bdf3203ae1502c5a8434fe89d323599d78a106985bc327351a69d4/greenlet-0.4.7.zip";
139 139 md5 = "c2333a8ff30fa75c5d5ec0e67b461086";
140 140 };
141 141 meta = {
142 142 license = [ pkgs.lib.licenses.mit ];
143 143 };
144 144 };
145 145 gunicorn = super.buildPythonPackage {
146 146 name = "gunicorn-19.6.0";
147 147 buildInputs = with self; [];
148 148 doCheck = false;
149 149 propagatedBuildInputs = with self; [];
150 150 src = fetchurl {
151 151 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
152 152 md5 = "338e5e8a83ea0f0625f768dba4597530";
153 153 };
154 154 meta = {
155 155 license = [ pkgs.lib.licenses.mit ];
156 156 };
157 157 };
158 158 hgsubversion = super.buildPythonPackage {
159 159 name = "hgsubversion-1.8.6";
160 160 buildInputs = with self; [];
161 161 doCheck = false;
162 162 propagatedBuildInputs = with self; [mercurial subvertpy];
163 163 src = fetchurl {
164 164 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
165 165 md5 = "9310cb266031cf8d0779885782a84a5b";
166 166 };
167 167 meta = {
168 168 license = [ pkgs.lib.licenses.gpl1 ];
169 169 };
170 170 };
171 171 infrae.cache = super.buildPythonPackage {
172 172 name = "infrae.cache-1.0.1";
173 173 buildInputs = with self; [];
174 174 doCheck = false;
175 175 propagatedBuildInputs = with self; [Beaker repoze.lru];
176 176 src = fetchurl {
177 177 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
178 178 md5 = "b09076a766747e6ed2a755cc62088e32";
179 179 };
180 180 meta = {
181 181 license = [ pkgs.lib.licenses.zpt21 ];
182 182 };
183 183 };
184 184 mercurial = super.buildPythonPackage {
185 185 name = "mercurial-3.8.4";
186 186 buildInputs = with self; [];
187 187 doCheck = false;
188 188 propagatedBuildInputs = with self; [];
189 189 src = fetchurl {
190 190 url = "https://pypi.python.org/packages/bc/16/b66eef0b70ee2b4ebb8e76622fe21bbed834606dd8c1bd30d6936ebf6f45/mercurial-3.8.4.tar.gz";
191 191 md5 = "cec2c3db688cb87142809089c6ae13e9";
192 192 };
193 193 meta = {
194 194 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
195 195 };
196 196 };
197 197 mock = super.buildPythonPackage {
198 198 name = "mock-1.0.1";
199 199 buildInputs = with self; [];
200 200 doCheck = false;
201 201 propagatedBuildInputs = with self; [];
202 202 src = fetchurl {
203 203 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
204 204 md5 = "869f08d003c289a97c1a6610faf5e913";
205 205 };
206 206 meta = {
207 207 license = [ pkgs.lib.licenses.bsdOriginal ];
208 208 };
209 209 };
210 210 msgpack-python = super.buildPythonPackage {
211 211 name = "msgpack-python-0.4.6";
212 212 buildInputs = with self; [];
213 213 doCheck = false;
214 214 propagatedBuildInputs = with self; [];
215 215 src = fetchurl {
216 216 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
217 217 md5 = "8b317669314cf1bc881716cccdaccb30";
218 218 };
219 219 meta = {
220 220 license = [ pkgs.lib.licenses.asl20 ];
221 221 };
222 222 };
223 223 py = super.buildPythonPackage {
224 224 name = "py-1.4.29";
225 225 buildInputs = with self; [];
226 226 doCheck = false;
227 227 propagatedBuildInputs = with self; [];
228 228 src = fetchurl {
229 229 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
230 230 md5 = "c28e0accba523a29b35a48bb703fb96c";
231 231 };
232 232 meta = {
233 233 license = [ pkgs.lib.licenses.mit ];
234 234 };
235 235 };
236 236 pyramid = super.buildPythonPackage {
237 237 name = "pyramid-1.6.1";
238 238 buildInputs = with self; [];
239 239 doCheck = false;
240 240 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
241 241 src = fetchurl {
242 242 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
243 243 md5 = "b18688ff3cc33efdbb098a35b45dd122";
244 244 };
245 245 meta = {
246 246 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
247 247 };
248 248 };
249 249 pyramid-jinja2 = super.buildPythonPackage {
250 250 name = "pyramid-jinja2-2.5";
251 251 buildInputs = with self; [];
252 252 doCheck = false;
253 253 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
254 254 src = fetchurl {
255 255 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
256 256 md5 = "07cb6547204ac5e6f0b22a954ccee928";
257 257 };
258 258 meta = {
259 259 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
260 260 };
261 261 };
262 262 pyramid-mako = super.buildPythonPackage {
263 263 name = "pyramid-mako-1.0.2";
264 264 buildInputs = with self; [];
265 265 doCheck = false;
266 266 propagatedBuildInputs = with self; [pyramid Mako];
267 267 src = fetchurl {
268 268 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
269 269 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
270 270 };
271 271 meta = {
272 272 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
273 273 };
274 274 };
275 275 pytest = super.buildPythonPackage {
276 276 name = "pytest-2.8.5";
277 277 buildInputs = with self; [];
278 278 doCheck = false;
279 279 propagatedBuildInputs = with self; [py];
280 280 src = fetchurl {
281 281 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
282 282 md5 = "8493b06f700862f1294298d6c1b715a9";
283 283 };
284 284 meta = {
285 285 license = [ pkgs.lib.licenses.mit ];
286 286 };
287 287 };
288 288 repoze.lru = super.buildPythonPackage {
289 289 name = "repoze.lru-0.6";
290 290 buildInputs = with self; [];
291 291 doCheck = false;
292 292 propagatedBuildInputs = with self; [];
293 293 src = fetchurl {
294 294 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
295 295 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
296 296 };
297 297 meta = {
298 298 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
299 299 };
300 300 };
301 301 rhodecode-vcsserver = super.buildPythonPackage {
302 name = "rhodecode-vcsserver-4.3.1";
302 name = "rhodecode-vcsserver-4.4.0";
303 303 buildInputs = with self; [mock pytest WebTest];
304 304 doCheck = true;
305 305 propagatedBuildInputs = with self; [configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid Pyro4 simplejson subprocess32 waitress WebOb];
306 306 src = ./.;
307 307 meta = {
308 308 license = [ pkgs.lib.licenses.gpl3 { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
309 309 };
310 310 };
311 311 serpent = super.buildPythonPackage {
312 312 name = "serpent-1.12";
313 313 buildInputs = with self; [];
314 314 doCheck = false;
315 315 propagatedBuildInputs = with self; [];
316 316 src = fetchurl {
317 317 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
318 318 md5 = "05869ac7b062828b34f8f927f0457b65";
319 319 };
320 320 meta = {
321 321 license = [ pkgs.lib.licenses.mit ];
322 322 };
323 323 };
324 324 setuptools = super.buildPythonPackage {
325 325 name = "setuptools-20.8.1";
326 326 buildInputs = with self; [];
327 327 doCheck = false;
328 328 propagatedBuildInputs = with self; [];
329 329 src = fetchurl {
330 330 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
331 331 md5 = "fe58a5cac0df20bb83942b252a4b0543";
332 332 };
333 333 meta = {
334 334 license = [ pkgs.lib.licenses.mit ];
335 335 };
336 336 };
337 337 simplejson = super.buildPythonPackage {
338 338 name = "simplejson-3.7.2";
339 339 buildInputs = with self; [];
340 340 doCheck = false;
341 341 propagatedBuildInputs = with self; [];
342 342 src = fetchurl {
343 343 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
344 344 md5 = "a5fc7d05d4cb38492285553def5d4b46";
345 345 };
346 346 meta = {
347 347 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
348 348 };
349 349 };
350 350 six = super.buildPythonPackage {
351 351 name = "six-1.9.0";
352 352 buildInputs = with self; [];
353 353 doCheck = false;
354 354 propagatedBuildInputs = with self; [];
355 355 src = fetchurl {
356 356 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
357 357 md5 = "476881ef4012262dfc8adc645ee786c4";
358 358 };
359 359 meta = {
360 360 license = [ pkgs.lib.licenses.mit ];
361 361 };
362 362 };
363 363 subprocess32 = super.buildPythonPackage {
364 364 name = "subprocess32-3.2.6";
365 365 buildInputs = with self; [];
366 366 doCheck = false;
367 367 propagatedBuildInputs = with self; [];
368 368 src = fetchurl {
369 369 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
370 370 md5 = "754c5ab9f533e764f931136974b618f1";
371 371 };
372 372 meta = {
373 373 license = [ pkgs.lib.licenses.psfl ];
374 374 };
375 375 };
376 376 subvertpy = super.buildPythonPackage {
377 377 name = "subvertpy-0.9.3";
378 378 buildInputs = with self; [];
379 379 doCheck = false;
380 380 propagatedBuildInputs = with self; [];
381 381 src = fetchurl {
382 382 url = "https://github.com/jelmer/subvertpy/archive/subvertpy-0.9.3.tar.gz";
383 383 md5 = "7b745a47128050ea5a73efcd913ec1cf";
384 384 };
385 385 meta = {
386 386 license = [ pkgs.lib.licenses.lgpl21Plus ];
387 387 };
388 388 };
389 389 translationstring = super.buildPythonPackage {
390 390 name = "translationstring-1.3";
391 391 buildInputs = with self; [];
392 392 doCheck = false;
393 393 propagatedBuildInputs = with self; [];
394 394 src = fetchurl {
395 395 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
396 396 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
397 397 };
398 398 meta = {
399 399 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
400 400 };
401 401 };
402 402 venusian = super.buildPythonPackage {
403 403 name = "venusian-1.0";
404 404 buildInputs = with self; [];
405 405 doCheck = false;
406 406 propagatedBuildInputs = with self; [];
407 407 src = fetchurl {
408 408 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
409 409 md5 = "dccf2eafb7113759d60c86faf5538756";
410 410 };
411 411 meta = {
412 412 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
413 413 };
414 414 };
415 415 waitress = super.buildPythonPackage {
416 416 name = "waitress-0.8.9";
417 417 buildInputs = with self; [];
418 418 doCheck = false;
419 419 propagatedBuildInputs = with self; [setuptools];
420 420 src = fetchurl {
421 421 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
422 422 md5 = "da3f2e62b3676be5dd630703a68e2a04";
423 423 };
424 424 meta = {
425 425 license = [ pkgs.lib.licenses.zpt21 ];
426 426 };
427 427 };
428 428 wheel = super.buildPythonPackage {
429 429 name = "wheel-0.29.0";
430 430 buildInputs = with self; [];
431 431 doCheck = false;
432 432 propagatedBuildInputs = with self; [];
433 433 src = fetchurl {
434 434 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
435 435 md5 = "555a67e4507cedee23a0deb9651e452f";
436 436 };
437 437 meta = {
438 438 license = [ pkgs.lib.licenses.mit ];
439 439 };
440 440 };
441 441 zope.deprecation = super.buildPythonPackage {
442 442 name = "zope.deprecation-4.1.1";
443 443 buildInputs = with self; [];
444 444 doCheck = false;
445 445 propagatedBuildInputs = with self; [setuptools];
446 446 src = fetchurl {
447 447 url = "https://pypi.python.org/packages/c5/c9/e760f131fcde817da6c186a3f4952b8f206b7eeb269bb6f0836c715c5f20/zope.deprecation-4.1.1.tar.gz";
448 448 md5 = "ce261b9384066f7e13b63525778430cb";
449 449 };
450 450 meta = {
451 451 license = [ pkgs.lib.licenses.zpt21 ];
452 452 };
453 453 };
454 454 zope.interface = super.buildPythonPackage {
455 455 name = "zope.interface-4.1.3";
456 456 buildInputs = with self; [];
457 457 doCheck = false;
458 458 propagatedBuildInputs = with self; [setuptools];
459 459 src = fetchurl {
460 460 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
461 461 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
462 462 };
463 463 meta = {
464 464 license = [ pkgs.lib.licenses.zpt21 ];
465 465 };
466 466 };
467 467
468 468 ### Test requirements
469 469
470 470
471 471 }
@@ -1,1 +1,1 b''
1 4.3.1 No newline at end of file
1 4.4.0 No newline at end of file
@@ -1,588 +1,575 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import urllib
24 24 import urllib2
25 25 from functools import wraps
26 26
27 27 from dulwich import index, objects
28 28 from dulwich.client import HttpGitClient, LocalGitClient
29 29 from dulwich.errors import (
30 30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 31 MissingCommitError, ObjectMissing, HangupException,
32 32 UnexpectedCommandError)
33 33 from dulwich.repo import Repo as DulwichRepo, Tag
34 34 from dulwich.server import update_server_info
35 35
36 36 from vcsserver import exceptions, settings, subprocessio
37 37 from vcsserver.utils import safe_str
38 38 from vcsserver.base import RepoFactory
39 39 from vcsserver.hgcompat import (
40 40 hg_url, httpbasicauthhandler, httpdigestauthhandler)
41 41
42 42
43 43 DIR_STAT = stat.S_IFDIR
44 44 FILE_MODE = stat.S_IFMT
45 45 GIT_LINK = objects.S_IFGITLINK
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 def reraise_safe_exceptions(func):
51 51 """Converts Dulwich exceptions to something neutral."""
52 52 @wraps(func)
53 53 def wrapper(*args, **kwargs):
54 54 try:
55 55 return func(*args, **kwargs)
56 56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 57 ObjectMissing) as e:
58 58 raise exceptions.LookupException(e.message)
59 59 except (HangupException, UnexpectedCommandError) as e:
60 60 raise exceptions.VcsException(e.message)
61 61 return wrapper
62 62
63 63
64 64 class Repo(DulwichRepo):
65 65 """
66 66 A wrapper for dulwich Repo class.
67 67
68 68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
69 69 "Too many open files" error. We need to close all opened file descriptors
70 70 once the repo object is destroyed.
71 71
72 72 TODO: mikhail: please check if we need this wrapper after updating dulwich
73 73 to 0.12.0 +
74 74 """
75 75 def __del__(self):
76 76 if hasattr(self, 'object_store'):
77 77 self.close()
78 78
79 79
80 80 class GitFactory(RepoFactory):
81 81
82 82 def _create_repo(self, wire, create):
83 83 repo_path = str_to_dulwich(wire['path'])
84 84 return Repo(repo_path)
85 85
86 86
87 87 class GitRemote(object):
88 88
89 89 def __init__(self, factory):
90 90 self._factory = factory
91 91
92 92 self._bulk_methods = {
93 93 "author": self.commit_attribute,
94 94 "date": self.get_object_attrs,
95 95 "message": self.commit_attribute,
96 96 "parents": self.commit_attribute,
97 97 "_commit": self.revision,
98 98 }
99 99
100 100 def _assign_ref(self, wire, ref, commit_id):
101 101 repo = self._factory.repo(wire)
102 102 repo[ref] = commit_id
103 103
104 104 @reraise_safe_exceptions
105 105 def add_object(self, wire, content):
106 106 repo = self._factory.repo(wire)
107 107 blob = objects.Blob()
108 108 blob.set_raw_string(content)
109 109 repo.object_store.add_object(blob)
110 110 return blob.id
111 111
112 112 @reraise_safe_exceptions
113 113 def assert_correct_path(self, wire):
114 114 try:
115 115 self._factory.repo(wire)
116 116 except NotGitRepository as e:
117 117 # Exception can contain unicode which we convert
118 118 raise exceptions.AbortException(repr(e))
119 119
120 120 @reraise_safe_exceptions
121 121 def bare(self, wire):
122 122 repo = self._factory.repo(wire)
123 123 return repo.bare
124 124
125 125 @reraise_safe_exceptions
126 126 def blob_as_pretty_string(self, wire, sha):
127 127 repo = self._factory.repo(wire)
128 128 return repo[sha].as_pretty_string()
129 129
130 130 @reraise_safe_exceptions
131 131 def blob_raw_length(self, wire, sha):
132 132 repo = self._factory.repo(wire)
133 133 blob = repo[sha]
134 134 return blob.raw_length()
135 135
136 136 @reraise_safe_exceptions
137 137 def bulk_request(self, wire, rev, pre_load):
138 138 result = {}
139 139 for attr in pre_load:
140 140 try:
141 141 method = self._bulk_methods[attr]
142 142 args = [wire, rev]
143 143 if attr == "date":
144 144 args.extend(["commit_time", "commit_timezone"])
145 145 elif attr in ["author", "message", "parents"]:
146 146 args.append(attr)
147 147 result[attr] = method(*args)
148 148 except KeyError:
149 149 raise exceptions.VcsException(
150 150 "Unknown bulk attribute: %s" % attr)
151 151 return result
152 152
153 153 def _build_opener(self, url):
154 154 handlers = []
155 155 url_obj = hg_url(url)
156 156 _, authinfo = url_obj.authinfo()
157 157
158 158 if authinfo:
159 159 # create a password manager
160 160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
161 161 passmgr.add_password(*authinfo)
162 162
163 163 handlers.extend((httpbasicauthhandler(passmgr),
164 164 httpdigestauthhandler(passmgr)))
165 165
166 166 return urllib2.build_opener(*handlers)
167 167
168 168 @reraise_safe_exceptions
169 169 def check_url(self, url, config):
170 170 url_obj = hg_url(url)
171 171 test_uri, _ = url_obj.authinfo()
172 172 url_obj.passwd = '*****'
173 173 cleaned_uri = str(url_obj)
174 174
175 175 if not test_uri.endswith('info/refs'):
176 176 test_uri = test_uri.rstrip('/') + '/info/refs'
177 177
178 178 o = self._build_opener(url)
179 179 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
180 180
181 181 q = {"service": 'git-upload-pack'}
182 182 qs = '?%s' % urllib.urlencode(q)
183 183 cu = "%s%s" % (test_uri, qs)
184 184 req = urllib2.Request(cu, None, {})
185 185
186 186 try:
187 187 resp = o.open(req)
188 188 if resp.code != 200:
189 189 raise Exception('Return Code is not 200')
190 190 except Exception as e:
191 191 # means it cannot be cloned
192 192 raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
193 193
194 194 # now detect if it's proper git repo
195 195 gitdata = resp.read()
196 196 if 'service=git-upload-pack' in gitdata:
197 197 pass
198 198 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
199 199 # old style git can return some other format !
200 200 pass
201 201 else:
202 202 raise urllib2.URLError(
203 203 "url [%s] does not look like an git" % (cleaned_uri,))
204 204
205 205 return True
206 206
207 207 @reraise_safe_exceptions
208 208 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
209 209 remote_refs = self.fetch(wire, url, apply_refs=False)
210 210 repo = self._factory.repo(wire)
211 211 if isinstance(valid_refs, list):
212 212 valid_refs = tuple(valid_refs)
213 213
214 214 for k in remote_refs:
215 215 # only parse heads/tags and skip so called deferred tags
216 216 if k.startswith(valid_refs) and not k.endswith(deferred):
217 217 repo[k] = remote_refs[k]
218 218
219 219 if update_after_clone:
220 220 # we want to checkout HEAD
221 221 repo["HEAD"] = remote_refs["HEAD"]
222 222 index.build_index_from_tree(repo.path, repo.index_path(),
223 223 repo.object_store, repo["HEAD"].tree)
224 224
225 225 # TODO: this is quite complex, check if that can be simplified
226 226 @reraise_safe_exceptions
227 227 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
228 228 repo = self._factory.repo(wire)
229 229 object_store = repo.object_store
230 230
231 231 # Create tree and populates it with blobs
232 232 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
233 233
234 234 for node in updated:
235 235 # Compute subdirs if needed
236 236 dirpath, nodename = vcspath.split(node['path'])
237 237 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
238 238 parent = commit_tree
239 239 ancestors = [('', parent)]
240 240
241 241 # Tries to dig for the deepest existing tree
242 242 while dirnames:
243 243 curdir = dirnames.pop(0)
244 244 try:
245 245 dir_id = parent[curdir][1]
246 246 except KeyError:
247 247 # put curdir back into dirnames and stops
248 248 dirnames.insert(0, curdir)
249 249 break
250 250 else:
251 251 # If found, updates parent
252 252 parent = repo[dir_id]
253 253 ancestors.append((curdir, parent))
254 254 # Now parent is deepest existing tree and we need to create
255 255 # subtrees for dirnames (in reverse order)
256 256 # [this only applies for nodes from added]
257 257 new_trees = []
258 258
259 259 blob = objects.Blob.from_string(node['content'])
260 260
261 261 if dirnames:
262 262 # If there are trees which should be created we need to build
263 263 # them now (in reverse order)
264 264 reversed_dirnames = list(reversed(dirnames))
265 265 curtree = objects.Tree()
266 266 curtree[node['node_path']] = node['mode'], blob.id
267 267 new_trees.append(curtree)
268 268 for dirname in reversed_dirnames[:-1]:
269 269 newtree = objects.Tree()
270 270 newtree[dirname] = (DIR_STAT, curtree.id)
271 271 new_trees.append(newtree)
272 272 curtree = newtree
273 273 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
274 274 else:
275 275 parent.add(
276 276 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
277 277
278 278 new_trees.append(parent)
279 279 # Update ancestors
280 280 reversed_ancestors = reversed(
281 281 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
282 282 for parent, tree, path in reversed_ancestors:
283 283 parent[path] = (DIR_STAT, tree.id)
284 284 object_store.add_object(tree)
285 285
286 286 object_store.add_object(blob)
287 287 for tree in new_trees:
288 288 object_store.add_object(tree)
289 289
290 290 for node_path in removed:
291 291 paths = node_path.split('/')
292 292 tree = commit_tree
293 293 trees = [tree]
294 294 # Traverse deep into the forest...
295 295 for path in paths:
296 296 try:
297 297 obj = repo[tree[path][1]]
298 298 if isinstance(obj, objects.Tree):
299 299 trees.append(obj)
300 300 tree = obj
301 301 except KeyError:
302 302 break
303 303 # Cut down the blob and all rotten trees on the way back...
304 304 for path, tree in reversed(zip(paths, trees)):
305 305 del tree[path]
306 306 if tree:
307 307 # This tree still has elements - don't remove it or any
308 308 # of it's parents
309 309 break
310 310
311 311 object_store.add_object(commit_tree)
312 312
313 313 # Create commit
314 314 commit = objects.Commit()
315 315 commit.tree = commit_tree.id
316 316 for k, v in commit_data.iteritems():
317 317 setattr(commit, k, v)
318 318 object_store.add_object(commit)
319 319
320 320 ref = 'refs/heads/%s' % branch
321 321 repo.refs[ref] = commit.id
322 322
323 323 return commit.id
324 324
325 325 @reraise_safe_exceptions
326 326 def fetch(self, wire, url, apply_refs=True, refs=None):
327 327 if url != 'default' and '://' not in url:
328 328 client = LocalGitClient(url)
329 329 else:
330 330 url_obj = hg_url(url)
331 331 o = self._build_opener(url)
332 332 url, _ = url_obj.authinfo()
333 333 client = HttpGitClient(base_url=url, opener=o)
334 334 repo = self._factory.repo(wire)
335 335
336 336 determine_wants = repo.object_store.determine_wants_all
337 337 if refs:
338 338 def determine_wants_requested(references):
339 339 return [references[r] for r in references if r in refs]
340 340 determine_wants = determine_wants_requested
341 341
342 342 try:
343 343 remote_refs = client.fetch(
344 344 path=url, target=repo, determine_wants=determine_wants)
345 345 except NotGitRepository:
346 346 log.warning(
347 347 'Trying to fetch from "%s" failed, not a Git repository.', url)
348 348 raise exceptions.AbortException()
349 349
350 350 # mikhail: client.fetch() returns all the remote refs, but fetches only
351 351 # refs filtered by `determine_wants` function. We need to filter result
352 352 # as well
353 353 if refs:
354 354 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
355 355
356 356 if apply_refs:
357 357 # TODO: johbo: Needs proper test coverage with a git repository
358 358 # that contains a tag object, so that we would end up with
359 359 # a peeled ref at this point.
360 360 PEELED_REF_MARKER = '^{}'
361 361 for k in remote_refs:
362 362 if k.endswith(PEELED_REF_MARKER):
363 363 log.info("Skipping peeled reference %s", k)
364 364 continue
365 365 repo[k] = remote_refs[k]
366 366
367 367 if refs:
368 368 # mikhail: explicitly set the head to the last ref.
369 369 repo['HEAD'] = remote_refs[refs[-1]]
370 370
371 371 # TODO: mikhail: should we return remote_refs here to be
372 372 # consistent?
373 373 else:
374 374 return remote_refs
375 375
376 376 @reraise_safe_exceptions
377 377 def get_remote_refs(self, wire, url):
378 378 repo = Repo(url)
379 379 return repo.get_refs()
380 380
381 381 @reraise_safe_exceptions
382 382 def get_description(self, wire):
383 383 repo = self._factory.repo(wire)
384 384 return repo.get_description()
385 385
386 386 @reraise_safe_exceptions
387 387 def get_file_history(self, wire, file_path, commit_id, limit):
388 388 repo = self._factory.repo(wire)
389 389 include = [commit_id]
390 390 paths = [file_path]
391 391
392 392 walker = repo.get_walker(include, paths=paths, max_entries=limit)
393 393 return [x.commit.id for x in walker]
394 394
395 395 @reraise_safe_exceptions
396 396 def get_missing_revs(self, wire, rev1, rev2, path2):
397 397 repo = self._factory.repo(wire)
398 398 LocalGitClient(thin_packs=False).fetch(path2, repo)
399 399
400 400 wire_remote = wire.copy()
401 401 wire_remote['path'] = path2
402 402 repo_remote = self._factory.repo(wire_remote)
403 403 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
404 404
405 405 revs = [
406 406 x.commit.id
407 407 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
408 408 return revs
409 409
410 410 @reraise_safe_exceptions
411 411 def get_object(self, wire, sha):
412 412 repo = self._factory.repo(wire)
413 413 obj = repo.get_object(sha)
414 414 commit_id = obj.id
415 415
416 416 if isinstance(obj, Tag):
417 417 commit_id = obj.object[1]
418 418
419 419 return {
420 420 'id': obj.id,
421 421 'type': obj.type_name,
422 422 'commit_id': commit_id
423 423 }
424 424
425 425 @reraise_safe_exceptions
426 426 def get_object_attrs(self, wire, sha, *attrs):
427 427 repo = self._factory.repo(wire)
428 428 obj = repo.get_object(sha)
429 429 return list(getattr(obj, a) for a in attrs)
430 430
431 431 @reraise_safe_exceptions
432 def get_refs(self, wire, keys=None):
433 # FIXME(skreft): this method is affected by bug
434 # http://bugs.rhodecode.com/issues/298.
435 # Basically, it will overwrite previously computed references if
436 # there's another one with the same name and given the order of
437 # repo.get_refs() is not guaranteed, the output of this method is not
438 # stable either.
432 def get_refs(self, wire):
439 433 repo = self._factory.repo(wire)
440 refs = repo.get_refs()
441 if keys is None:
442 return refs
443 434
444 _refs = {}
445 for ref, sha in refs.iteritems():
446 for k, type_ in keys:
447 if ref.startswith(k):
448 _key = ref[len(k):]
449 if type_ == 'T':
450 sha = repo.get_object(sha).id
451 _refs[_key] = [sha, type_]
452 break
453 return _refs
435 repo.refs._peeled_refs
436 result = {}
437 for ref, sha in repo.refs.as_dict().items():
438 peeled_sha = repo.refs._peeled_refs.get(ref, sha)
439 result[ref] = peeled_sha
440 return result
454 441
455 442 @reraise_safe_exceptions
456 443 def get_refs_path(self, wire):
457 444 repo = self._factory.repo(wire)
458 445 return repo.refs.path
459 446
460 447 @reraise_safe_exceptions
461 448 def head(self, wire):
462 449 repo = self._factory.repo(wire)
463 450 return repo.head()
464 451
465 452 @reraise_safe_exceptions
466 453 def init(self, wire):
467 454 repo_path = str_to_dulwich(wire['path'])
468 455 self.repo = Repo.init(repo_path)
469 456
470 457 @reraise_safe_exceptions
471 458 def init_bare(self, wire):
472 459 repo_path = str_to_dulwich(wire['path'])
473 460 self.repo = Repo.init_bare(repo_path)
474 461
475 462 @reraise_safe_exceptions
476 463 def revision(self, wire, rev):
477 464 repo = self._factory.repo(wire)
478 465 obj = repo[rev]
479 466 obj_data = {
480 467 'id': obj.id,
481 468 }
482 469 try:
483 470 obj_data['tree'] = obj.tree
484 471 except AttributeError:
485 472 pass
486 473 return obj_data
487 474
488 475 @reraise_safe_exceptions
489 476 def commit_attribute(self, wire, rev, attr):
490 477 repo = self._factory.repo(wire)
491 478 obj = repo[rev]
492 479 return getattr(obj, attr)
493 480
494 481 @reraise_safe_exceptions
495 482 def set_refs(self, wire, key, value):
496 483 repo = self._factory.repo(wire)
497 484 repo.refs[key] = value
498 485
499 486 @reraise_safe_exceptions
500 487 def remove_ref(self, wire, key):
501 488 repo = self._factory.repo(wire)
502 489 del repo.refs[key]
503 490
504 491 @reraise_safe_exceptions
505 492 def tree_changes(self, wire, source_id, target_id):
506 493 repo = self._factory.repo(wire)
507 494 source = repo[source_id].tree if source_id else None
508 495 target = repo[target_id].tree
509 496 result = repo.object_store.tree_changes(source, target)
510 497 return list(result)
511 498
512 499 @reraise_safe_exceptions
513 500 def tree_items(self, wire, tree_id):
514 501 repo = self._factory.repo(wire)
515 502 tree = repo[tree_id]
516 503
517 504 result = []
518 505 for item in tree.iteritems():
519 506 item_sha = item.sha
520 507 item_mode = item.mode
521 508
522 509 if FILE_MODE(item_mode) == GIT_LINK:
523 510 item_type = "link"
524 511 else:
525 512 item_type = repo[item_sha].type_name
526 513
527 514 result.append((item.path, item_mode, item_sha, item_type))
528 515 return result
529 516
530 517 @reraise_safe_exceptions
531 518 def update_server_info(self, wire):
532 519 repo = self._factory.repo(wire)
533 520 update_server_info(repo)
534 521
535 522 @reraise_safe_exceptions
536 523 def discover_git_version(self):
537 524 stdout, _ = self.run_git_command(
538 525 {}, ['--version'], _bare=True, _safe=True)
539 526 return stdout
540 527
541 528 @reraise_safe_exceptions
542 529 def run_git_command(self, wire, cmd, **opts):
543 530 path = wire.get('path', None)
544 531
545 532 if path and os.path.isdir(path):
546 533 opts['cwd'] = path
547 534
548 535 if '_bare' in opts:
549 536 _copts = []
550 537 del opts['_bare']
551 538 else:
552 539 _copts = ['-c', 'core.quotepath=false', ]
553 540 safe_call = False
554 541 if '_safe' in opts:
555 542 # no exc on failure
556 543 del opts['_safe']
557 544 safe_call = True
558 545
559 546 gitenv = os.environ.copy()
560 547 gitenv.update(opts.pop('extra_env', {}))
561 548 # need to clean fix GIT_DIR !
562 549 if 'GIT_DIR' in gitenv:
563 550 del gitenv['GIT_DIR']
564 551 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
565 552
566 553 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
567 554
568 555 try:
569 556 _opts = {'env': gitenv, 'shell': False}
570 557 _opts.update(opts)
571 558 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
572 559
573 560 return ''.join(p), ''.join(p.error)
574 561 except (EnvironmentError, OSError) as err:
575 562 tb_err = ("Couldn't run git command (%s).\n"
576 563 "Original error was:%s\n" % (cmd, err))
577 564 log.exception(tb_err)
578 565 if safe_call:
579 566 return '', err
580 567 else:
581 568 raise exceptions.VcsException(tb_err)
582 569
583 570
584 571 def str_to_dulwich(value):
585 572 """
586 573 Dulwich 0.10.1a requires `unicode` objects to be passed in.
587 574 """
588 575 return value.decode(settings.WIRE_ENCODING)
@@ -1,700 +1,707 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import sys
22 22 import urllib
23 23 import urllib2
24 24
25 25 from hgext import largefiles, rebase
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex, hg_url,
34 34 httpbasicauthhandler, httpdigestauthhandler, httppeer, localrepository,
35 35 match, memctx, exchange, memfilectx, nullrev, patch, peer, revrange, ui,
36 36 Abort, LookupError, RepoError, RepoLookupError, InterventionRequired,
37 37 RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 57 # signal in a non-main thread, thus generating a ValueError.
58 58 baseui.setconfig('worker', 'numcpus', 1)
59 59
60 60 # If there is no config for the largefiles extension, we explicitly disable
61 61 # it here. This overrides settings from repositories hgrc file. Recent
62 62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 63 # repo.
64 64 if not baseui.hasconfig('extensions', 'largefiles'):
65 65 log.debug('Explicitly disable largefiles extension for repo.')
66 66 baseui.setconfig('extensions', 'largefiles', '!')
67 67
68 68 return baseui
69 69
70 70
71 71 def reraise_safe_exceptions(func):
72 72 """Decorator for converting mercurial exceptions to something neutral."""
73 73 def wrapper(*args, **kwargs):
74 74 try:
75 75 return func(*args, **kwargs)
76 76 except (Abort, InterventionRequired):
77 77 raise_from_original(exceptions.AbortException)
78 78 except RepoLookupError:
79 79 raise_from_original(exceptions.LookupException)
80 80 except RequirementError:
81 81 raise_from_original(exceptions.RequirementException)
82 82 except RepoError:
83 83 raise_from_original(exceptions.VcsException)
84 84 except LookupError:
85 85 raise_from_original(exceptions.LookupException)
86 86 except Exception as e:
87 87 if not hasattr(e, '_vcs_kind'):
88 88 log.exception("Unhandled exception in hg remote call")
89 89 raise_from_original(exceptions.UnhandledException)
90 90 raise
91 91 return wrapper
92 92
93 93
94 94 def raise_from_original(new_type):
95 95 """
96 96 Raise a new exception type with original args and traceback.
97 97 """
98 98 _, original, traceback = sys.exc_info()
99 99 try:
100 100 raise new_type(*original.args), None, traceback
101 101 finally:
102 102 del traceback
103 103
104 104
105 105 class MercurialFactory(RepoFactory):
106 106
107 107 def _create_config(self, config, hooks=True):
108 108 if not hooks:
109 109 hooks_to_clean = frozenset((
110 110 'changegroup.repo_size', 'preoutgoing.pre_pull',
111 111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
112 112 new_config = []
113 113 for section, option, value in config:
114 114 if section == 'hooks' and option in hooks_to_clean:
115 115 continue
116 116 new_config.append((section, option, value))
117 117 config = new_config
118 118
119 119 baseui = make_ui_from_config(config)
120 120 return baseui
121 121
122 122 def _create_repo(self, wire, create):
123 123 baseui = self._create_config(wire["config"])
124 124 return localrepository(baseui, wire["path"], create)
125 125
126 126
127 127 class HgRemote(object):
128 128
129 129 def __init__(self, factory):
130 130 self._factory = factory
131 131
132 132 self._bulk_methods = {
133 133 "affected_files": self.ctx_files,
134 134 "author": self.ctx_user,
135 135 "branch": self.ctx_branch,
136 136 "children": self.ctx_children,
137 137 "date": self.ctx_date,
138 138 "message": self.ctx_description,
139 139 "parents": self.ctx_parents,
140 140 "status": self.ctx_status,
141 141 "_file_paths": self.ctx_list,
142 142 }
143 143
144 144 @reraise_safe_exceptions
145 145 def archive_repo(self, archive_path, mtime, file_info, kind):
146 146 if kind == "tgz":
147 147 archiver = archival.tarit(archive_path, mtime, "gz")
148 148 elif kind == "tbz2":
149 149 archiver = archival.tarit(archive_path, mtime, "bz2")
150 150 elif kind == 'zip':
151 151 archiver = archival.zipit(archive_path, mtime)
152 152 else:
153 153 raise exceptions.ArchiveException(
154 154 'Remote does not support: "%s".' % kind)
155 155
156 156 for f_path, f_mode, f_is_link, f_content in file_info:
157 157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
158 158 archiver.done()
159 159
160 160 @reraise_safe_exceptions
161 161 def bookmarks(self, wire):
162 162 repo = self._factory.repo(wire)
163 163 return dict(repo._bookmarks)
164 164
165 165 @reraise_safe_exceptions
166 166 def branches(self, wire, normal, closed):
167 167 repo = self._factory.repo(wire)
168 168 iter_branches = repo.branchmap().iterbranches()
169 169 bt = {}
170 170 for branch_name, _heads, tip, is_closed in iter_branches:
171 171 if normal and not is_closed:
172 172 bt[branch_name] = tip
173 173 if closed and is_closed:
174 174 bt[branch_name] = tip
175 175
176 176 return bt
177 177
178 178 @reraise_safe_exceptions
179 179 def bulk_request(self, wire, rev, pre_load):
180 180 result = {}
181 181 for attr in pre_load:
182 182 try:
183 183 method = self._bulk_methods[attr]
184 184 result[attr] = method(wire, rev)
185 185 except KeyError:
186 186 raise exceptions.VcsException(
187 187 'Unknown bulk attribute: "%s"' % attr)
188 188 return result
189 189
190 190 @reraise_safe_exceptions
191 191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
192 192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
193 193 clone(baseui, source, dest, noupdate=not update_after_clone)
194 194
195 195 @reraise_safe_exceptions
196 196 def commitctx(
197 197 self, wire, message, parents, commit_time, commit_timezone,
198 198 user, files, extra, removed, updated):
199 199
200 200 def _filectxfn(_repo, memctx, path):
201 201 """
202 202 Marks given path as added/changed/removed in a given _repo. This is
203 203 for internal mercurial commit function.
204 204 """
205 205
206 206 # check if this path is removed
207 207 if path in removed:
208 208 # returning None is a way to mark node for removal
209 209 return None
210 210
211 211 # check if this path is added
212 212 for node in updated:
213 213 if node['path'] == path:
214 214 return memfilectx(
215 215 _repo,
216 216 path=node['path'],
217 217 data=node['content'],
218 218 islink=False,
219 219 isexec=bool(node['mode'] & stat.S_IXUSR),
220 220 copied=False,
221 221 memctx=memctx)
222 222
223 223 raise exceptions.AbortException(
224 224 "Given path haven't been marked as added, "
225 225 "changed or removed (%s)" % path)
226 226
227 227 repo = self._factory.repo(wire)
228 228
229 229 commit_ctx = memctx(
230 230 repo=repo,
231 231 parents=parents,
232 232 text=message,
233 233 files=files,
234 234 filectxfn=_filectxfn,
235 235 user=user,
236 236 date=(commit_time, commit_timezone),
237 237 extra=extra)
238 238
239 239 n = repo.commitctx(commit_ctx)
240 240 new_id = hex(n)
241 241
242 242 return new_id
243 243
244 244 @reraise_safe_exceptions
245 245 def ctx_branch(self, wire, revision):
246 246 repo = self._factory.repo(wire)
247 247 ctx = repo[revision]
248 248 return ctx.branch()
249 249
250 250 @reraise_safe_exceptions
251 251 def ctx_children(self, wire, revision):
252 252 repo = self._factory.repo(wire)
253 253 ctx = repo[revision]
254 254 return [child.rev() for child in ctx.children()]
255 255
256 256 @reraise_safe_exceptions
257 257 def ctx_date(self, wire, revision):
258 258 repo = self._factory.repo(wire)
259 259 ctx = repo[revision]
260 260 return ctx.date()
261 261
262 262 @reraise_safe_exceptions
263 263 def ctx_description(self, wire, revision):
264 264 repo = self._factory.repo(wire)
265 265 ctx = repo[revision]
266 266 return ctx.description()
267 267
268 268 @reraise_safe_exceptions
269 269 def ctx_diff(
270 270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
271 271 repo = self._factory.repo(wire)
272 272 ctx = repo[revision]
273 273 result = ctx.diff(
274 274 git=git, ignore_whitespace=ignore_whitespace, context=context)
275 275 return list(result)
276 276
277 277 @reraise_safe_exceptions
278 278 def ctx_files(self, wire, revision):
279 279 repo = self._factory.repo(wire)
280 280 ctx = repo[revision]
281 281 return ctx.files()
282 282
283 283 @reraise_safe_exceptions
284 284 def ctx_list(self, path, revision):
285 285 repo = self._factory.repo(path)
286 286 ctx = repo[revision]
287 287 return list(ctx)
288 288
289 289 @reraise_safe_exceptions
290 290 def ctx_parents(self, wire, revision):
291 291 repo = self._factory.repo(wire)
292 292 ctx = repo[revision]
293 293 return [parent.rev() for parent in ctx.parents()]
294 294
295 295 @reraise_safe_exceptions
296 296 def ctx_substate(self, wire, revision):
297 297 repo = self._factory.repo(wire)
298 298 ctx = repo[revision]
299 299 return ctx.substate
300 300
301 301 @reraise_safe_exceptions
302 302 def ctx_status(self, wire, revision):
303 303 repo = self._factory.repo(wire)
304 304 ctx = repo[revision]
305 305 status = repo[ctx.p1().node()].status(other=ctx.node())
306 306 # object of status (odd, custom named tuple in mercurial) is not
307 307 # correctly serializable via Pyro, we make it a list, as the underling
308 308 # API expects this to be a list
309 309 return list(status)
310 310
311 311 @reraise_safe_exceptions
312 312 def ctx_user(self, wire, revision):
313 313 repo = self._factory.repo(wire)
314 314 ctx = repo[revision]
315 315 return ctx.user()
316 316
317 317 @reraise_safe_exceptions
318 318 def check_url(self, url, config):
319 log.info("Checking URL for remote cloning/import: %s", url)
319 320 _proto = None
320 321 if '+' in url[:url.find('://')]:
321 322 _proto = url[0:url.find('+')]
322 323 url = url[url.find('+') + 1:]
323 324 handlers = []
324 325 url_obj = hg_url(url)
325 326 test_uri, authinfo = url_obj.authinfo()
326 327 url_obj.passwd = '*****'
327 328 cleaned_uri = str(url_obj)
328 329
329 330 if authinfo:
330 331 # create a password manager
331 332 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
332 333 passmgr.add_password(*authinfo)
333 334
334 335 handlers.extend((httpbasicauthhandler(passmgr),
335 336 httpdigestauthhandler(passmgr)))
336 337
337 338 o = urllib2.build_opener(*handlers)
338 339 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
339 340 ('Accept', 'application/mercurial-0.1')]
340 341
341 342 q = {"cmd": 'between'}
342 343 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
343 344 qs = '?%s' % urllib.urlencode(q)
344 345 cu = "%s%s" % (test_uri, qs)
345 346 req = urllib2.Request(cu, None, {})
346 347
347 348 try:
349 log.debug("Trying to open URL %s", url)
348 350 resp = o.open(req)
349 351 if resp.code != 200:
350 352 raise exceptions.URLError('Return Code is not 200')
351 353 except Exception as e:
354 log.warning("URL cannot be opened: %s", url, exc_info=True)
352 355 # means it cannot be cloned
353 356 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
354 357
355 358 # now check if it's a proper hg repo, but don't do it for svn
356 359 try:
357 360 if _proto == 'svn':
358 361 pass
359 362 else:
360 363 # check for pure hg repos
364 log.debug(
365 "Verifying if URL is a Mercurial repository: %s", url)
361 366 httppeer(make_ui_from_config(config), url).lookup('tip')
362 367 except Exception as e:
368 log.warning("URL is not a valid Mercurial repository: %s", url)
363 369 raise exceptions.URLError(
364 370 "url [%s] does not look like an hg repo org_exc: %s"
365 371 % (cleaned_uri, e))
366 372
373 log.info("URL is a valid Mercurial repository: %s", url)
367 374 return True
368 375
369 376 @reraise_safe_exceptions
370 377 def diff(
371 378 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
372 379 context):
373 380 repo = self._factory.repo(wire)
374 381
375 382 if file_filter:
376 383 filter = match(file_filter[0], '', [file_filter[1]])
377 384 else:
378 385 filter = file_filter
379 386 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
380 387
381 388 try:
382 389 return "".join(patch.diff(
383 390 repo, node1=rev1, node2=rev2, match=filter, opts=opts))
384 391 except RepoLookupError:
385 392 raise exceptions.LookupException()
386 393
387 394 @reraise_safe_exceptions
388 395 def file_history(self, wire, revision, path, limit):
389 396 repo = self._factory.repo(wire)
390 397
391 398 ctx = repo[revision]
392 399 fctx = ctx.filectx(path)
393 400
394 401 def history_iter():
395 402 limit_rev = fctx.rev()
396 403 for obj in reversed(list(fctx.filelog())):
397 404 obj = fctx.filectx(obj)
398 405 if limit_rev >= obj.rev():
399 406 yield obj
400 407
401 408 history = []
402 409 for cnt, obj in enumerate(history_iter()):
403 410 if limit and cnt >= limit:
404 411 break
405 412 history.append(hex(obj.node()))
406 413
407 414 return [x for x in history]
408 415
409 416 @reraise_safe_exceptions
410 417 def file_history_untill(self, wire, revision, path, limit):
411 418 repo = self._factory.repo(wire)
412 419 ctx = repo[revision]
413 420 fctx = ctx.filectx(path)
414 421
415 422 file_log = list(fctx.filelog())
416 423 if limit:
417 424 # Limit to the last n items
418 425 file_log = file_log[-limit:]
419 426
420 427 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
421 428
422 429 @reraise_safe_exceptions
423 430 def fctx_annotate(self, wire, revision, path):
424 431 repo = self._factory.repo(wire)
425 432 ctx = repo[revision]
426 433 fctx = ctx.filectx(path)
427 434
428 435 result = []
429 436 for i, annotate_data in enumerate(fctx.annotate()):
430 437 ln_no = i + 1
431 438 sha = hex(annotate_data[0].node())
432 439 result.append((ln_no, sha, annotate_data[1]))
433 440 return result
434 441
435 442 @reraise_safe_exceptions
436 443 def fctx_data(self, wire, revision, path):
437 444 repo = self._factory.repo(wire)
438 445 ctx = repo[revision]
439 446 fctx = ctx.filectx(path)
440 447 return fctx.data()
441 448
442 449 @reraise_safe_exceptions
443 450 def fctx_flags(self, wire, revision, path):
444 451 repo = self._factory.repo(wire)
445 452 ctx = repo[revision]
446 453 fctx = ctx.filectx(path)
447 454 return fctx.flags()
448 455
449 456 @reraise_safe_exceptions
450 457 def fctx_size(self, wire, revision, path):
451 458 repo = self._factory.repo(wire)
452 459 ctx = repo[revision]
453 460 fctx = ctx.filectx(path)
454 461 return fctx.size()
455 462
456 463 @reraise_safe_exceptions
457 464 def get_all_commit_ids(self, wire, name):
458 465 repo = self._factory.repo(wire)
459 466 revs = repo.filtered(name).changelog.index
460 467 return map(lambda x: hex(x[7]), revs)[:-1]
461 468
462 469 @reraise_safe_exceptions
463 470 def get_config_value(self, wire, section, name, untrusted=False):
464 471 repo = self._factory.repo(wire)
465 472 return repo.ui.config(section, name, untrusted=untrusted)
466 473
467 474 @reraise_safe_exceptions
468 475 def get_config_bool(self, wire, section, name, untrusted=False):
469 476 repo = self._factory.repo(wire)
470 477 return repo.ui.configbool(section, name, untrusted=untrusted)
471 478
472 479 @reraise_safe_exceptions
473 480 def get_config_list(self, wire, section, name, untrusted=False):
474 481 repo = self._factory.repo(wire)
475 482 return repo.ui.configlist(section, name, untrusted=untrusted)
476 483
477 484 @reraise_safe_exceptions
478 485 def is_large_file(self, wire, path):
479 486 return largefiles.lfutil.isstandin(path)
480 487
481 488 @reraise_safe_exceptions
482 489 def in_store(self, wire, sha):
483 490 repo = self._factory.repo(wire)
484 491 return largefiles.lfutil.instore(repo, sha)
485 492
486 493 @reraise_safe_exceptions
487 494 def in_user_cache(self, wire, sha):
488 495 repo = self._factory.repo(wire)
489 496 return largefiles.lfutil.inusercache(repo.ui, sha)
490 497
491 498 @reraise_safe_exceptions
492 499 def store_path(self, wire, sha):
493 500 repo = self._factory.repo(wire)
494 501 return largefiles.lfutil.storepath(repo, sha)
495 502
496 503 @reraise_safe_exceptions
497 504 def link(self, wire, sha, path):
498 505 repo = self._factory.repo(wire)
499 506 largefiles.lfutil.link(
500 507 largefiles.lfutil.usercachepath(repo.ui, sha), path)
501 508
502 509 @reraise_safe_exceptions
503 510 def localrepository(self, wire, create=False):
504 511 self._factory.repo(wire, create=create)
505 512
506 513 @reraise_safe_exceptions
507 514 def lookup(self, wire, revision, both):
508 515 # TODO Paris: Ugly hack to "deserialize" long for msgpack
509 516 if isinstance(revision, float):
510 517 revision = long(revision)
511 518 repo = self._factory.repo(wire)
512 519 try:
513 520 ctx = repo[revision]
514 521 except RepoLookupError:
515 522 raise exceptions.LookupException(revision)
516 523 except LookupError as e:
517 524 raise exceptions.LookupException(e.name)
518 525
519 526 if not both:
520 527 return ctx.hex()
521 528
522 529 ctx = repo[ctx.hex()]
523 530 return ctx.hex(), ctx.rev()
524 531
525 532 @reraise_safe_exceptions
526 533 def pull(self, wire, url, commit_ids=None):
527 534 repo = self._factory.repo(wire)
528 535 remote = peer(repo, {}, url)
529 536 if commit_ids:
530 537 commit_ids = [bin(commit_id) for commit_id in commit_ids]
531 538
532 539 return exchange.pull(
533 540 repo, remote, heads=commit_ids, force=None).cgresult
534 541
535 542 @reraise_safe_exceptions
536 543 def revision(self, wire, rev):
537 544 repo = self._factory.repo(wire)
538 545 ctx = repo[rev]
539 546 return ctx.rev()
540 547
541 548 @reraise_safe_exceptions
542 549 def rev_range(self, wire, filter):
543 550 repo = self._factory.repo(wire)
544 551 revisions = [rev for rev in revrange(repo, filter)]
545 552 return revisions
546 553
547 554 @reraise_safe_exceptions
548 555 def rev_range_hash(self, wire, node):
549 556 repo = self._factory.repo(wire)
550 557
551 558 def get_revs(repo, rev_opt):
552 559 if rev_opt:
553 560 revs = revrange(repo, rev_opt)
554 561 if len(revs) == 0:
555 562 return (nullrev, nullrev)
556 563 return max(revs), min(revs)
557 564 else:
558 565 return len(repo) - 1, 0
559 566
560 567 stop, start = get_revs(repo, [node + ':'])
561 568 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
562 569 return revs
563 570
564 571 @reraise_safe_exceptions
565 572 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
566 573 other_path = kwargs.pop('other_path', None)
567 574
568 575 # case when we want to compare two independent repositories
569 576 if other_path and other_path != wire["path"]:
570 577 baseui = self._factory._create_config(wire["config"])
571 578 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
572 579 else:
573 580 repo = self._factory.repo(wire)
574 581 return list(repo.revs(rev_spec, *args))
575 582
576 583 @reraise_safe_exceptions
577 584 def strip(self, wire, revision, update, backup):
578 585 repo = self._factory.repo(wire)
579 586 ctx = repo[revision]
580 587 hgext_strip(
581 588 repo.baseui, repo, ctx.node(), update=update, backup=backup)
582 589
583 590 @reraise_safe_exceptions
584 591 def tag(self, wire, name, revision, message, local, user,
585 592 tag_time, tag_timezone):
586 593 repo = self._factory.repo(wire)
587 594 ctx = repo[revision]
588 595 node = ctx.node()
589 596
590 597 date = (tag_time, tag_timezone)
591 598 try:
592 599 repo.tag(name, node, message, local, user, date)
593 600 except Abort:
594 601 log.exception("Tag operation aborted")
595 602 raise exceptions.AbortException()
596 603
597 604 @reraise_safe_exceptions
598 605 def tags(self, wire):
599 606 repo = self._factory.repo(wire)
600 607 return repo.tags()
601 608
602 609 @reraise_safe_exceptions
603 610 def update(self, wire, node=None, clean=False):
604 611 repo = self._factory.repo(wire)
605 612 baseui = self._factory._create_config(wire['config'])
606 613 commands.update(baseui, repo, node=node, clean=clean)
607 614
608 615 @reraise_safe_exceptions
609 616 def identify(self, wire):
610 617 repo = self._factory.repo(wire)
611 618 baseui = self._factory._create_config(wire['config'])
612 619 output = io.BytesIO()
613 620 baseui.write = output.write
614 621 # This is required to get a full node id
615 622 baseui.debugflag = True
616 623 commands.identify(baseui, repo, id=True)
617 624
618 625 return output.getvalue()
619 626
620 627 @reraise_safe_exceptions
621 628 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
622 629 hooks=True):
623 630 repo = self._factory.repo(wire)
624 631 baseui = self._factory._create_config(wire['config'], hooks=hooks)
625 632
626 633 # Mercurial internally has a lot of logic that checks ONLY if
627 634 # option is defined, we just pass those if they are defined then
628 635 opts = {}
629 636 if bookmark:
630 637 opts['bookmark'] = bookmark
631 638 if branch:
632 639 opts['branch'] = branch
633 640 if revision:
634 641 opts['rev'] = revision
635 642
636 643 commands.pull(baseui, repo, source, **opts)
637 644
638 645 @reraise_safe_exceptions
639 646 def heads(self, wire, branch=None):
640 647 repo = self._factory.repo(wire)
641 648 baseui = self._factory._create_config(wire['config'])
642 649 output = io.BytesIO()
643 650
644 651 def write(data, **unused_kwargs):
645 652 output.write(data)
646 653
647 654 baseui.write = write
648 655 if branch:
649 656 args = [branch]
650 657 else:
651 658 args = []
652 659 commands.heads(baseui, repo, template='{node} ', *args)
653 660
654 661 return output.getvalue()
655 662
656 663 @reraise_safe_exceptions
657 664 def ancestor(self, wire, revision1, revision2):
658 665 repo = self._factory.repo(wire)
659 666 baseui = self._factory._create_config(wire['config'])
660 667 output = io.BytesIO()
661 668 baseui.write = output.write
662 669 commands.debugancestor(baseui, repo, revision1, revision2)
663 670
664 671 return output.getvalue()
665 672
666 673 @reraise_safe_exceptions
667 674 def push(self, wire, revisions, dest_path, hooks=True,
668 675 push_branches=False):
669 676 repo = self._factory.repo(wire)
670 677 baseui = self._factory._create_config(wire['config'], hooks=hooks)
671 678 commands.push(baseui, repo, dest=dest_path, rev=revisions,
672 679 new_branch=push_branches)
673 680
674 681 @reraise_safe_exceptions
675 682 def merge(self, wire, revision):
676 683 repo = self._factory.repo(wire)
677 684 baseui = self._factory._create_config(wire['config'])
678 685 repo.ui.setconfig('ui', 'merge', 'internal:dump')
679 686 commands.merge(baseui, repo, rev=revision)
680 687
681 688 @reraise_safe_exceptions
682 689 def commit(self, wire, message, username):
683 690 repo = self._factory.repo(wire)
684 691 baseui = self._factory._create_config(wire['config'])
685 692 repo.ui.setconfig('ui', 'username', username)
686 693 commands.commit(baseui, repo, message=message)
687 694
688 695 @reraise_safe_exceptions
689 696 def rebase(self, wire, source=None, dest=None, abort=False):
690 697 repo = self._factory.repo(wire)
691 698 baseui = self._factory._create_config(wire['config'])
692 699 repo.ui.setconfig('ui', 'merge', 'internal:dump')
693 700 rebase.rebase(
694 701 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
695 702
696 703 @reraise_safe_exceptions
697 704 def bookmark(self, wire, bookmark, revision=None):
698 705 repo = self._factory.repo(wire)
699 706 baseui = self._factory._create_config(wire['config'])
700 707 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
General Comments 0
You need to be logged in to leave comments. Login now