##// END OF EJS Templates
release: Merge default into stable for release preparation
milka -
r897:2904bd35 merge stable
parent child Browse files
Show More
@@ -1,5 +1,5 b''
1 1 [bumpversion]
2 current_version = 4.22.0
2 current_version = 4.23.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.22.0
12 state = in_progress
13 version = 4.23.0
16 14
@@ -1,1090 +1,1090 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "atomicwrites" = super.buildPythonPackage {
8 8 name = "atomicwrites-1.3.0";
9 9 doCheck = false;
10 10 src = fetchurl {
11 11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
12 12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.mit ];
16 16 };
17 17 };
18 18 "attrs" = super.buildPythonPackage {
19 19 name = "attrs-19.3.0";
20 20 doCheck = false;
21 21 src = fetchurl {
22 22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
23 23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
24 24 };
25 25 meta = {
26 26 license = [ pkgs.lib.licenses.mit ];
27 27 };
28 28 };
29 29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 30 name = "backports.shutil-get-terminal-size-1.0.0";
31 31 doCheck = false;
32 32 src = fetchurl {
33 33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 35 };
36 36 meta = {
37 37 license = [ pkgs.lib.licenses.mit ];
38 38 };
39 39 };
40 40 "beautifulsoup4" = super.buildPythonPackage {
41 41 name = "beautifulsoup4-4.6.3";
42 42 doCheck = false;
43 43 src = fetchurl {
44 44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 46 };
47 47 meta = {
48 48 license = [ pkgs.lib.licenses.mit ];
49 49 };
50 50 };
51 51 "cffi" = super.buildPythonPackage {
52 52 name = "cffi-1.12.3";
53 53 doCheck = false;
54 54 propagatedBuildInputs = [
55 55 self."pycparser"
56 56 ];
57 57 src = fetchurl {
58 58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
59 59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.mit ];
63 63 };
64 64 };
65 65 "configobj" = super.buildPythonPackage {
66 66 name = "configobj-5.0.6";
67 67 doCheck = false;
68 68 propagatedBuildInputs = [
69 69 self."six"
70 70 ];
71 71 src = fetchurl {
72 72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
73 73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.bsdOriginal ];
77 77 };
78 78 };
79 79 "configparser" = super.buildPythonPackage {
80 80 name = "configparser-4.0.2";
81 81 doCheck = false;
82 82 src = fetchurl {
83 83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
84 84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
85 85 };
86 86 meta = {
87 87 license = [ pkgs.lib.licenses.mit ];
88 88 };
89 89 };
90 90 "contextlib2" = super.buildPythonPackage {
91 91 name = "contextlib2-0.6.0.post1";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
95 95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.psfl ];
99 99 };
100 100 };
101 101 "cov-core" = super.buildPythonPackage {
102 102 name = "cov-core-1.15.0";
103 103 doCheck = false;
104 104 propagatedBuildInputs = [
105 105 self."coverage"
106 106 ];
107 107 src = fetchurl {
108 108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
109 109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
110 110 };
111 111 meta = {
112 112 license = [ pkgs.lib.licenses.mit ];
113 113 };
114 114 };
115 115 "coverage" = super.buildPythonPackage {
116 116 name = "coverage-4.5.4";
117 117 doCheck = false;
118 118 src = fetchurl {
119 119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
120 120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
121 121 };
122 122 meta = {
123 123 license = [ pkgs.lib.licenses.asl20 ];
124 124 };
125 125 };
126 126 "decorator" = super.buildPythonPackage {
127 127 name = "decorator-4.1.2";
128 128 doCheck = false;
129 129 src = fetchurl {
130 130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
131 131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
132 132 };
133 133 meta = {
134 134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
135 135 };
136 136 };
137 137 "dogpile.cache" = super.buildPythonPackage {
138 138 name = "dogpile.cache-0.9.0";
139 139 doCheck = false;
140 140 propagatedBuildInputs = [
141 141 self."decorator"
142 142 ];
143 143 src = fetchurl {
144 144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
145 145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
146 146 };
147 147 meta = {
148 148 license = [ pkgs.lib.licenses.bsdOriginal ];
149 149 };
150 150 };
151 151 "dogpile.core" = super.buildPythonPackage {
152 152 name = "dogpile.core-0.4.1";
153 153 doCheck = false;
154 154 src = fetchurl {
155 155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
156 156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
157 157 };
158 158 meta = {
159 159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 160 };
161 161 };
162 162 "dulwich" = super.buildPythonPackage {
163 163 name = "dulwich-0.13.0";
164 164 doCheck = false;
165 165 src = fetchurl {
166 166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
167 167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
168 168 };
169 169 meta = {
170 170 license = [ pkgs.lib.licenses.gpl2Plus ];
171 171 };
172 172 };
173 173 "enum34" = super.buildPythonPackage {
174 174 name = "enum34-1.1.10";
175 175 doCheck = false;
176 176 src = fetchurl {
177 177 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
178 178 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
179 179 };
180 180 meta = {
181 181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 182 };
183 183 };
184 184 "funcsigs" = super.buildPythonPackage {
185 185 name = "funcsigs-1.0.2";
186 186 doCheck = false;
187 187 src = fetchurl {
188 188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
189 189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
190 190 };
191 191 meta = {
192 192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
193 193 };
194 194 };
195 195 "gevent" = super.buildPythonPackage {
196 196 name = "gevent-1.5.0";
197 197 doCheck = false;
198 198 propagatedBuildInputs = [
199 199 self."greenlet"
200 200 ];
201 201 src = fetchurl {
202 202 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
203 203 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
204 204 };
205 205 meta = {
206 206 license = [ pkgs.lib.licenses.mit ];
207 207 };
208 208 };
209 209 "gprof2dot" = super.buildPythonPackage {
210 210 name = "gprof2dot-2017.9.19";
211 211 doCheck = false;
212 212 src = fetchurl {
213 213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
214 214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
215 215 };
216 216 meta = {
217 217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
218 218 };
219 219 };
220 220 "greenlet" = super.buildPythonPackage {
221 221 name = "greenlet-0.4.15";
222 222 doCheck = false;
223 223 src = fetchurl {
224 224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
225 225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
226 226 };
227 227 meta = {
228 228 license = [ pkgs.lib.licenses.mit ];
229 229 };
230 230 };
231 231 "gunicorn" = super.buildPythonPackage {
232 232 name = "gunicorn-19.9.0";
233 233 doCheck = false;
234 234 src = fetchurl {
235 235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
236 236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
237 237 };
238 238 meta = {
239 239 license = [ pkgs.lib.licenses.mit ];
240 240 };
241 241 };
242 242 "hg-evolve" = super.buildPythonPackage {
243 243 name = "hg-evolve-9.1.0";
244 244 doCheck = false;
245 245 src = fetchurl {
246 246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
247 247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
248 248 };
249 249 meta = {
250 250 license = [ { fullName = "GPLv2+"; } ];
251 251 };
252 252 };
253 253 "hgsubversion" = super.buildPythonPackage {
254 254 name = "hgsubversion-1.9.3";
255 255 doCheck = false;
256 256 propagatedBuildInputs = [
257 257 self."mercurial"
258 258 self."subvertpy"
259 259 ];
260 260 src = fetchurl {
261 261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
262 262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
263 263 };
264 264 meta = {
265 265 license = [ pkgs.lib.licenses.gpl1 ];
266 266 };
267 267 };
268 268 "hupper" = super.buildPythonPackage {
269 269 name = "hupper-1.10.2";
270 270 doCheck = false;
271 271 src = fetchurl {
272 272 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
273 273 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
274 274 };
275 275 meta = {
276 276 license = [ pkgs.lib.licenses.mit ];
277 277 };
278 278 };
279 279 "importlib-metadata" = super.buildPythonPackage {
280 280 name = "importlib-metadata-1.6.0";
281 281 doCheck = false;
282 282 propagatedBuildInputs = [
283 283 self."zipp"
284 284 self."pathlib2"
285 285 self."contextlib2"
286 286 self."configparser"
287 287 ];
288 288 src = fetchurl {
289 289 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
290 290 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
291 291 };
292 292 meta = {
293 293 license = [ pkgs.lib.licenses.asl20 ];
294 294 };
295 295 };
296 296 "ipdb" = super.buildPythonPackage {
297 297 name = "ipdb-0.13.2";
298 298 doCheck = false;
299 299 propagatedBuildInputs = [
300 300 self."setuptools"
301 301 self."ipython"
302 302 ];
303 303 src = fetchurl {
304 304 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
305 305 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
306 306 };
307 307 meta = {
308 308 license = [ pkgs.lib.licenses.bsdOriginal ];
309 309 };
310 310 };
311 311 "ipython" = super.buildPythonPackage {
312 312 name = "ipython-5.1.0";
313 313 doCheck = false;
314 314 propagatedBuildInputs = [
315 315 self."setuptools"
316 316 self."decorator"
317 317 self."pickleshare"
318 318 self."simplegeneric"
319 319 self."traitlets"
320 320 self."prompt-toolkit"
321 321 self."pygments"
322 322 self."pexpect"
323 323 self."backports.shutil-get-terminal-size"
324 324 self."pathlib2"
325 325 self."pexpect"
326 326 ];
327 327 src = fetchurl {
328 328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
329 329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
330 330 };
331 331 meta = {
332 332 license = [ pkgs.lib.licenses.bsdOriginal ];
333 333 };
334 334 };
335 335 "ipython-genutils" = super.buildPythonPackage {
336 336 name = "ipython-genutils-0.2.0";
337 337 doCheck = false;
338 338 src = fetchurl {
339 339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
340 340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
341 341 };
342 342 meta = {
343 343 license = [ pkgs.lib.licenses.bsdOriginal ];
344 344 };
345 345 };
346 346 "mako" = super.buildPythonPackage {
347 347 name = "mako-1.1.0";
348 348 doCheck = false;
349 349 propagatedBuildInputs = [
350 350 self."markupsafe"
351 351 ];
352 352 src = fetchurl {
353 353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
354 354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
355 355 };
356 356 meta = {
357 357 license = [ pkgs.lib.licenses.mit ];
358 358 };
359 359 };
360 360 "markupsafe" = super.buildPythonPackage {
361 361 name = "markupsafe-1.1.1";
362 362 doCheck = false;
363 363 src = fetchurl {
364 364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
365 365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
366 366 };
367 367 meta = {
368 368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
369 369 };
370 370 };
371 371 "mercurial" = super.buildPythonPackage {
372 372 name = "mercurial-5.1.1";
373 373 doCheck = false;
374 374 src = fetchurl {
375 375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
376 376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
380 380 };
381 381 };
382 382 "mock" = super.buildPythonPackage {
383 383 name = "mock-3.0.5";
384 384 doCheck = false;
385 385 propagatedBuildInputs = [
386 386 self."six"
387 387 self."funcsigs"
388 388 ];
389 389 src = fetchurl {
390 390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
391 391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
392 392 };
393 393 meta = {
394 394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
395 395 };
396 396 };
397 397 "more-itertools" = super.buildPythonPackage {
398 398 name = "more-itertools-5.0.0";
399 399 doCheck = false;
400 400 propagatedBuildInputs = [
401 401 self."six"
402 402 ];
403 403 src = fetchurl {
404 404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
405 405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
406 406 };
407 407 meta = {
408 408 license = [ pkgs.lib.licenses.mit ];
409 409 };
410 410 };
411 411 "msgpack-python" = super.buildPythonPackage {
412 412 name = "msgpack-python-0.5.6";
413 413 doCheck = false;
414 414 src = fetchurl {
415 415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
416 416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
417 417 };
418 418 meta = {
419 419 license = [ pkgs.lib.licenses.asl20 ];
420 420 };
421 421 };
422 422 "packaging" = super.buildPythonPackage {
423 423 name = "packaging-20.3";
424 424 doCheck = false;
425 425 propagatedBuildInputs = [
426 426 self."pyparsing"
427 427 self."six"
428 428 ];
429 429 src = fetchurl {
430 430 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
431 431 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
432 432 };
433 433 meta = {
434 434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
435 435 };
436 436 };
437 437 "pastedeploy" = super.buildPythonPackage {
438 438 name = "pastedeploy-2.1.0";
439 439 doCheck = false;
440 440 src = fetchurl {
441 441 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
442 442 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
443 443 };
444 444 meta = {
445 445 license = [ pkgs.lib.licenses.mit ];
446 446 };
447 447 };
448 448 "pathlib2" = super.buildPythonPackage {
449 449 name = "pathlib2-2.3.5";
450 450 doCheck = false;
451 451 propagatedBuildInputs = [
452 452 self."six"
453 453 self."scandir"
454 454 ];
455 455 src = fetchurl {
456 456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
457 457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
458 458 };
459 459 meta = {
460 460 license = [ pkgs.lib.licenses.mit ];
461 461 };
462 462 };
463 463 "pexpect" = super.buildPythonPackage {
464 464 name = "pexpect-4.8.0";
465 465 doCheck = false;
466 466 propagatedBuildInputs = [
467 467 self."ptyprocess"
468 468 ];
469 469 src = fetchurl {
470 470 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
471 471 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
472 472 };
473 473 meta = {
474 474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
475 475 };
476 476 };
477 477 "pickleshare" = super.buildPythonPackage {
478 478 name = "pickleshare-0.7.5";
479 479 doCheck = false;
480 480 propagatedBuildInputs = [
481 481 self."pathlib2"
482 482 ];
483 483 src = fetchurl {
484 484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
485 485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
486 486 };
487 487 meta = {
488 488 license = [ pkgs.lib.licenses.mit ];
489 489 };
490 490 };
491 491 "plaster" = super.buildPythonPackage {
492 492 name = "plaster-1.0";
493 493 doCheck = false;
494 494 propagatedBuildInputs = [
495 495 self."setuptools"
496 496 ];
497 497 src = fetchurl {
498 498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
499 499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
500 500 };
501 501 meta = {
502 502 license = [ pkgs.lib.licenses.mit ];
503 503 };
504 504 };
505 505 "plaster-pastedeploy" = super.buildPythonPackage {
506 506 name = "plaster-pastedeploy-0.7";
507 507 doCheck = false;
508 508 propagatedBuildInputs = [
509 509 self."pastedeploy"
510 510 self."plaster"
511 511 ];
512 512 src = fetchurl {
513 513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
514 514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
515 515 };
516 516 meta = {
517 517 license = [ pkgs.lib.licenses.mit ];
518 518 };
519 519 };
520 520 "pluggy" = super.buildPythonPackage {
521 521 name = "pluggy-0.13.1";
522 522 doCheck = false;
523 523 propagatedBuildInputs = [
524 524 self."importlib-metadata"
525 525 ];
526 526 src = fetchurl {
527 527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
528 528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
529 529 };
530 530 meta = {
531 531 license = [ pkgs.lib.licenses.mit ];
532 532 };
533 533 };
534 534 "prompt-toolkit" = super.buildPythonPackage {
535 535 name = "prompt-toolkit-1.0.18";
536 536 doCheck = false;
537 537 propagatedBuildInputs = [
538 538 self."six"
539 539 self."wcwidth"
540 540 ];
541 541 src = fetchurl {
542 542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
543 543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
544 544 };
545 545 meta = {
546 546 license = [ pkgs.lib.licenses.bsdOriginal ];
547 547 };
548 548 };
549 549 "psutil" = super.buildPythonPackage {
550 550 name = "psutil-5.7.0";
551 551 doCheck = false;
552 552 src = fetchurl {
553 553 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
554 554 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
555 555 };
556 556 meta = {
557 557 license = [ pkgs.lib.licenses.bsdOriginal ];
558 558 };
559 559 };
560 560 "ptyprocess" = super.buildPythonPackage {
561 561 name = "ptyprocess-0.6.0";
562 562 doCheck = false;
563 563 src = fetchurl {
564 564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
565 565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
566 566 };
567 567 meta = {
568 568 license = [ ];
569 569 };
570 570 };
571 571 "py" = super.buildPythonPackage {
572 572 name = "py-1.8.0";
573 573 doCheck = false;
574 574 src = fetchurl {
575 575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
576 576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
577 577 };
578 578 meta = {
579 579 license = [ pkgs.lib.licenses.mit ];
580 580 };
581 581 };
582 582 "pycparser" = super.buildPythonPackage {
583 583 name = "pycparser-2.20";
584 584 doCheck = false;
585 585 src = fetchurl {
586 586 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
587 587 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
588 588 };
589 589 meta = {
590 590 license = [ pkgs.lib.licenses.bsdOriginal ];
591 591 };
592 592 };
593 593 "pygit2" = super.buildPythonPackage {
594 594 name = "pygit2-0.28.2";
595 595 doCheck = false;
596 596 propagatedBuildInputs = [
597 597 self."cffi"
598 598 self."six"
599 599 ];
600 600 src = fetchurl {
601 601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
602 602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
603 603 };
604 604 meta = {
605 605 license = [ { fullName = "GPLv2 with linking exception"; } ];
606 606 };
607 607 };
608 608 "pygments" = super.buildPythonPackage {
609 609 name = "pygments-2.4.2";
610 610 doCheck = false;
611 611 src = fetchurl {
612 612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
613 613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
614 614 };
615 615 meta = {
616 616 license = [ pkgs.lib.licenses.bsdOriginal ];
617 617 };
618 618 };
619 619 "pyparsing" = super.buildPythonPackage {
620 620 name = "pyparsing-2.4.7";
621 621 doCheck = false;
622 622 src = fetchurl {
623 623 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
624 624 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
625 625 };
626 626 meta = {
627 627 license = [ pkgs.lib.licenses.mit ];
628 628 };
629 629 };
630 630 "pyramid" = super.buildPythonPackage {
631 631 name = "pyramid-1.10.4";
632 632 doCheck = false;
633 633 propagatedBuildInputs = [
634 634 self."hupper"
635 635 self."plaster"
636 636 self."plaster-pastedeploy"
637 637 self."setuptools"
638 638 self."translationstring"
639 639 self."venusian"
640 640 self."webob"
641 641 self."zope.deprecation"
642 642 self."zope.interface"
643 643 self."repoze.lru"
644 644 ];
645 645 src = fetchurl {
646 646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
647 647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
648 648 };
649 649 meta = {
650 650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
651 651 };
652 652 };
653 653 "pyramid-mako" = super.buildPythonPackage {
654 654 name = "pyramid-mako-1.1.0";
655 655 doCheck = false;
656 656 propagatedBuildInputs = [
657 657 self."pyramid"
658 658 self."mako"
659 659 ];
660 660 src = fetchurl {
661 661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
662 662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
663 663 };
664 664 meta = {
665 665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
666 666 };
667 667 };
668 668 "pytest" = super.buildPythonPackage {
669 669 name = "pytest-4.6.5";
670 670 doCheck = false;
671 671 propagatedBuildInputs = [
672 672 self."py"
673 673 self."six"
674 674 self."packaging"
675 675 self."attrs"
676 676 self."atomicwrites"
677 677 self."pluggy"
678 678 self."importlib-metadata"
679 679 self."wcwidth"
680 680 self."funcsigs"
681 681 self."pathlib2"
682 682 self."more-itertools"
683 683 ];
684 684 src = fetchurl {
685 685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
686 686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
687 687 };
688 688 meta = {
689 689 license = [ pkgs.lib.licenses.mit ];
690 690 };
691 691 };
692 692 "pytest-cov" = super.buildPythonPackage {
693 693 name = "pytest-cov-2.7.1";
694 694 doCheck = false;
695 695 propagatedBuildInputs = [
696 696 self."pytest"
697 697 self."coverage"
698 698 ];
699 699 src = fetchurl {
700 700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
701 701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
702 702 };
703 703 meta = {
704 704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
705 705 };
706 706 };
707 707 "pytest-profiling" = super.buildPythonPackage {
708 708 name = "pytest-profiling-1.7.0";
709 709 doCheck = false;
710 710 propagatedBuildInputs = [
711 711 self."six"
712 712 self."pytest"
713 713 self."gprof2dot"
714 714 ];
715 715 src = fetchurl {
716 716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
717 717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
718 718 };
719 719 meta = {
720 720 license = [ pkgs.lib.licenses.mit ];
721 721 };
722 722 };
723 723 "pytest-runner" = super.buildPythonPackage {
724 724 name = "pytest-runner-5.1";
725 725 doCheck = false;
726 726 src = fetchurl {
727 727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
728 728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
729 729 };
730 730 meta = {
731 731 license = [ pkgs.lib.licenses.mit ];
732 732 };
733 733 };
734 734 "pytest-sugar" = super.buildPythonPackage {
735 735 name = "pytest-sugar-0.9.2";
736 736 doCheck = false;
737 737 propagatedBuildInputs = [
738 738 self."pytest"
739 739 self."termcolor"
740 740 self."packaging"
741 741 ];
742 742 src = fetchurl {
743 743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
744 744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
745 745 };
746 746 meta = {
747 747 license = [ pkgs.lib.licenses.bsdOriginal ];
748 748 };
749 749 };
750 750 "pytest-timeout" = super.buildPythonPackage {
751 751 name = "pytest-timeout-1.3.3";
752 752 doCheck = false;
753 753 propagatedBuildInputs = [
754 754 self."pytest"
755 755 ];
756 756 src = fetchurl {
757 757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
758 758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
759 759 };
760 760 meta = {
761 761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
762 762 };
763 763 };
764 764 "redis" = super.buildPythonPackage {
765 765 name = "redis-3.4.1";
766 766 doCheck = false;
767 767 src = fetchurl {
768 768 url = "https://files.pythonhosted.org/packages/ef/2e/2c0f59891db7db087a7eeaa79bc7c7f2c039e71a2b5b0a41391e9d462926/redis-3.4.1.tar.gz";
769 769 sha256 = "07yaj0j9fs7xdkg5bg926fa990khyigjbp31si8ai20vj8sv7kqd";
770 770 };
771 771 meta = {
772 772 license = [ pkgs.lib.licenses.mit ];
773 773 };
774 774 };
775 775 "repoze.lru" = super.buildPythonPackage {
776 776 name = "repoze.lru-0.7";
777 777 doCheck = false;
778 778 src = fetchurl {
779 779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
780 780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
781 781 };
782 782 meta = {
783 783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
784 784 };
785 785 };
786 786 "rhodecode-vcsserver" = super.buildPythonPackage {
787 name = "rhodecode-vcsserver-4.22.0";
787 name = "rhodecode-vcsserver-4.23.0";
788 788 buildInputs = [
789 789 self."pytest"
790 790 self."py"
791 791 self."pytest-cov"
792 792 self."pytest-sugar"
793 793 self."pytest-runner"
794 794 self."pytest-profiling"
795 795 self."pytest-timeout"
796 796 self."gprof2dot"
797 797 self."mock"
798 798 self."cov-core"
799 799 self."coverage"
800 800 self."webtest"
801 801 self."beautifulsoup4"
802 802 self."configobj"
803 803 ];
804 804 doCheck = true;
805 805 propagatedBuildInputs = [
806 806 self."configobj"
807 807 self."dogpile.cache"
808 808 self."dogpile.core"
809 809 self."decorator"
810 810 self."dulwich"
811 811 self."hgsubversion"
812 812 self."hg-evolve"
813 813 self."mako"
814 814 self."markupsafe"
815 815 self."mercurial"
816 816 self."msgpack-python"
817 817 self."pastedeploy"
818 818 self."pyramid"
819 819 self."pyramid-mako"
820 820 self."pygit2"
821 821 self."repoze.lru"
822 822 self."redis"
823 823 self."simplejson"
824 824 self."subprocess32"
825 825 self."subvertpy"
826 826 self."six"
827 827 self."translationstring"
828 828 self."webob"
829 829 self."zope.deprecation"
830 830 self."zope.interface"
831 831 self."gevent"
832 832 self."greenlet"
833 833 self."gunicorn"
834 834 self."waitress"
835 835 self."ipdb"
836 836 self."ipython"
837 837 self."pytest"
838 838 self."py"
839 839 self."pytest-cov"
840 840 self."pytest-sugar"
841 841 self."pytest-runner"
842 842 self."pytest-profiling"
843 843 self."pytest-timeout"
844 844 self."gprof2dot"
845 845 self."mock"
846 846 self."cov-core"
847 847 self."coverage"
848 848 self."webtest"
849 849 self."beautifulsoup4"
850 850 ];
851 851 src = ./.;
852 852 meta = {
853 853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
854 854 };
855 855 };
856 856 "scandir" = super.buildPythonPackage {
857 857 name = "scandir-1.10.0";
858 858 doCheck = false;
859 859 src = fetchurl {
860 860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
861 861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
862 862 };
863 863 meta = {
864 864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
865 865 };
866 866 };
867 867 "setproctitle" = super.buildPythonPackage {
868 868 name = "setproctitle-1.1.10";
869 869 doCheck = false;
870 870 src = fetchurl {
871 871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
872 872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
873 873 };
874 874 meta = {
875 875 license = [ pkgs.lib.licenses.bsdOriginal ];
876 876 };
877 877 };
878 878 "setuptools" = super.buildPythonPackage {
879 879 name = "setuptools-44.1.0";
880 880 doCheck = false;
881 881 src = fetchurl {
882 882 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
883 883 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
884 884 };
885 885 meta = {
886 886 license = [ pkgs.lib.licenses.mit ];
887 887 };
888 888 };
889 889 "simplegeneric" = super.buildPythonPackage {
890 890 name = "simplegeneric-0.8.1";
891 891 doCheck = false;
892 892 src = fetchurl {
893 893 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
894 894 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
895 895 };
896 896 meta = {
897 897 license = [ pkgs.lib.licenses.zpl21 ];
898 898 };
899 899 };
900 900 "simplejson" = super.buildPythonPackage {
901 901 name = "simplejson-3.16.0";
902 902 doCheck = false;
903 903 src = fetchurl {
904 904 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
905 905 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
906 906 };
907 907 meta = {
908 908 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
909 909 };
910 910 };
911 911 "six" = super.buildPythonPackage {
912 912 name = "six-1.11.0";
913 913 doCheck = false;
914 914 src = fetchurl {
915 915 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
916 916 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
917 917 };
918 918 meta = {
919 919 license = [ pkgs.lib.licenses.mit ];
920 920 };
921 921 };
922 922 "subprocess32" = super.buildPythonPackage {
923 923 name = "subprocess32-3.5.4";
924 924 doCheck = false;
925 925 src = fetchurl {
926 926 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
927 927 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
928 928 };
929 929 meta = {
930 930 license = [ pkgs.lib.licenses.psfl ];
931 931 };
932 932 };
933 933 "subvertpy" = super.buildPythonPackage {
934 934 name = "subvertpy-0.10.1";
935 935 doCheck = false;
936 936 src = fetchurl {
937 937 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
938 938 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
939 939 };
940 940 meta = {
941 941 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
942 942 };
943 943 };
944 944 "termcolor" = super.buildPythonPackage {
945 945 name = "termcolor-1.1.0";
946 946 doCheck = false;
947 947 src = fetchurl {
948 948 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
949 949 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
950 950 };
951 951 meta = {
952 952 license = [ pkgs.lib.licenses.mit ];
953 953 };
954 954 };
955 955 "traitlets" = super.buildPythonPackage {
956 956 name = "traitlets-4.3.3";
957 957 doCheck = false;
958 958 propagatedBuildInputs = [
959 959 self."ipython-genutils"
960 960 self."six"
961 961 self."decorator"
962 962 self."enum34"
963 963 ];
964 964 src = fetchurl {
965 965 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
966 966 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
967 967 };
968 968 meta = {
969 969 license = [ pkgs.lib.licenses.bsdOriginal ];
970 970 };
971 971 };
972 972 "translationstring" = super.buildPythonPackage {
973 973 name = "translationstring-1.3";
974 974 doCheck = false;
975 975 src = fetchurl {
976 976 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
977 977 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
978 978 };
979 979 meta = {
980 980 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
981 981 };
982 982 };
983 983 "venusian" = super.buildPythonPackage {
984 984 name = "venusian-1.2.0";
985 985 doCheck = false;
986 986 src = fetchurl {
987 987 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
988 988 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
989 989 };
990 990 meta = {
991 991 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
992 992 };
993 993 };
994 994 "waitress" = super.buildPythonPackage {
995 995 name = "waitress-1.3.1";
996 996 doCheck = false;
997 997 src = fetchurl {
998 998 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
999 999 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
1000 1000 };
1001 1001 meta = {
1002 1002 license = [ pkgs.lib.licenses.zpl21 ];
1003 1003 };
1004 1004 };
1005 1005 "wcwidth" = super.buildPythonPackage {
1006 1006 name = "wcwidth-0.1.9";
1007 1007 doCheck = false;
1008 1008 src = fetchurl {
1009 1009 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
1010 1010 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
1011 1011 };
1012 1012 meta = {
1013 1013 license = [ pkgs.lib.licenses.mit ];
1014 1014 };
1015 1015 };
1016 1016 "webob" = super.buildPythonPackage {
1017 1017 name = "webob-1.8.5";
1018 1018 doCheck = false;
1019 1019 src = fetchurl {
1020 1020 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
1021 1021 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
1022 1022 };
1023 1023 meta = {
1024 1024 license = [ pkgs.lib.licenses.mit ];
1025 1025 };
1026 1026 };
1027 1027 "webtest" = super.buildPythonPackage {
1028 1028 name = "webtest-2.0.34";
1029 1029 doCheck = false;
1030 1030 propagatedBuildInputs = [
1031 1031 self."six"
1032 1032 self."webob"
1033 1033 self."waitress"
1034 1034 self."beautifulsoup4"
1035 1035 ];
1036 1036 src = fetchurl {
1037 1037 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
1038 1038 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
1039 1039 };
1040 1040 meta = {
1041 1041 license = [ pkgs.lib.licenses.mit ];
1042 1042 };
1043 1043 };
1044 1044 "zipp" = super.buildPythonPackage {
1045 1045 name = "zipp-1.2.0";
1046 1046 doCheck = false;
1047 1047 propagatedBuildInputs = [
1048 1048 self."contextlib2"
1049 1049 ];
1050 1050 src = fetchurl {
1051 1051 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
1052 1052 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
1053 1053 };
1054 1054 meta = {
1055 1055 license = [ pkgs.lib.licenses.mit ];
1056 1056 };
1057 1057 };
1058 1058 "zope.deprecation" = super.buildPythonPackage {
1059 1059 name = "zope.deprecation-4.4.0";
1060 1060 doCheck = false;
1061 1061 propagatedBuildInputs = [
1062 1062 self."setuptools"
1063 1063 ];
1064 1064 src = fetchurl {
1065 1065 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
1066 1066 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
1067 1067 };
1068 1068 meta = {
1069 1069 license = [ pkgs.lib.licenses.zpl21 ];
1070 1070 };
1071 1071 };
1072 1072 "zope.interface" = super.buildPythonPackage {
1073 1073 name = "zope.interface-4.6.0";
1074 1074 doCheck = false;
1075 1075 propagatedBuildInputs = [
1076 1076 self."setuptools"
1077 1077 ];
1078 1078 src = fetchurl {
1079 1079 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
1080 1080 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
1081 1081 };
1082 1082 meta = {
1083 1083 license = [ pkgs.lib.licenses.zpl21 ];
1084 1084 };
1085 1085 };
1086 1086
1087 1087 ### Test requirements
1088 1088
1089 1089
1090 1090 }
@@ -1,1 +1,1 b''
1 4.22.0 No newline at end of file
1 4.23.0 No newline at end of file
@@ -1,76 +1,130 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import os
18 18 import sys
19 19 import traceback
20 20 import logging
21 21 import urlparse
22 22
23 from vcsserver import exceptions
24 from vcsserver.exceptions import NoContentException
25 from vcsserver.hgcompat import (archival)
23 26 from vcsserver.lib.rc_cache import region_meta
24 27 log = logging.getLogger(__name__)
25 28
26 29
27 30 class RepoFactory(object):
28 31 """
29 32 Utility to create instances of repository
30 33
31 34 It provides internal caching of the `repo` object based on
32 35 the :term:`call context`.
33 36 """
34 37 repo_type = None
35 38
36 39 def __init__(self):
37 40 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38 41
39 42 def _create_config(self, path, config):
40 43 config = {}
41 44 return config
42 45
43 46 def _create_repo(self, wire, create):
44 47 raise NotImplementedError()
45 48
46 49 def repo(self, wire, create=False):
47 50 raise NotImplementedError()
48 51
49 52
50 53 def obfuscate_qs(query_string):
51 54 if query_string is None:
52 55 return None
53 56
54 57 parsed = []
55 58 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
56 59 if k in ['auth_token', 'api_key']:
57 60 v = "*****"
58 61 parsed.append((k, v))
59 62
60 63 return '&'.join('{}{}'.format(
61 64 k, '={}'.format(v) if v else '') for k, v in parsed)
62 65
63 66
64 67 def raise_from_original(new_type):
65 68 """
66 69 Raise a new exception type with original args and traceback.
67 70 """
68 71 exc_type, exc_value, exc_traceback = sys.exc_info()
69 72 new_exc = new_type(*exc_value.args)
70 73 # store the original traceback into the new exc
71 74 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
72 75
73 76 try:
74 77 raise new_exc, None, exc_traceback
75 78 finally:
76 79 del exc_traceback
80
81
82 class ArchiveNode(object):
83 def __init__(self, path, mode, is_link, raw_bytes):
84 self.path = path
85 self.mode = mode
86 self.is_link = is_link
87 self.raw_bytes = raw_bytes
88
89
90 def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
91 archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
92 """
93 walker should be a file walker, for example:
94 def walker():
95 for file_info in files:
96 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
97 """
98 extra_metadata = extra_metadata or {}
99
100 if kind == "tgz":
101 archiver = archival.tarit(archive_dest_path, mtime, "gz")
102 elif kind == "tbz2":
103 archiver = archival.tarit(archive_dest_path, mtime, "bz2")
104 elif kind == 'zip':
105 archiver = archival.zipit(archive_dest_path, mtime)
106 else:
107 raise exceptions.ArchiveException()(
108 'Remote does not support: "%s" archive type.' % kind)
109
110 for f in walker(commit_id, archive_at_path):
111 f_path = os.path.join(archive_dir_name, f.path.lstrip('/'))
112 try:
113 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
114 except NoContentException:
115 # NOTE(marcink): this is a special case for SVN so we can create "empty"
116 # directories which arent supported by archiver
117 archiver.addfile(os.path.join(f_path, '.dir'), f.mode, f.is_link, '')
118
119 if write_metadata:
120 metadata = dict([
121 ('commit_id', commit_id),
122 ('mtime', mtime),
123 ])
124 metadata.update(extra_metadata)
125
126 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata.items()]
127 f_path = os.path.join(archive_dir_name, '.archival.txt')
128 archiver.addfile(f_path, 0o644, False, '\n'.join(meta))
129
130 return archiver.done()
@@ -1,121 +1,125 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Special exception handling over the wire.
20 20
21 21 Since we cannot assume that our client is able to import our exception classes,
22 22 this module provides a "wrapping" mechanism to raise plain exceptions
23 23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 24 different error conditions.
25 25 """
26 26
27 27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28 28
29 29
30 30 def _make_exception(kind, org_exc, *args):
31 31 """
32 32 Prepares a base `Exception` instance to be sent over the wire.
33 33
34 34 To give our caller a hint what this is about, it will attach an attribute
35 35 `_vcs_kind` to the exception.
36 36 """
37 37 exc = Exception(*args)
38 38 exc._vcs_kind = kind
39 39 exc._org_exc = org_exc
40 40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
41 41 return exc
42 42
43 43
44 44 def AbortException(org_exc=None):
45 45 def _make_exception_wrapper(*args):
46 46 return _make_exception('abort', org_exc, *args)
47 47 return _make_exception_wrapper
48 48
49 49
50 50 def ArchiveException(org_exc=None):
51 51 def _make_exception_wrapper(*args):
52 52 return _make_exception('archive', org_exc, *args)
53 53 return _make_exception_wrapper
54 54
55 55
56 56 def LookupException(org_exc=None):
57 57 def _make_exception_wrapper(*args):
58 58 return _make_exception('lookup', org_exc, *args)
59 59 return _make_exception_wrapper
60 60
61 61
62 62 def VcsException(org_exc=None):
63 63 def _make_exception_wrapper(*args):
64 64 return _make_exception('error', org_exc, *args)
65 65 return _make_exception_wrapper
66 66
67 67
68 68 def RepositoryLockedException(org_exc=None):
69 69 def _make_exception_wrapper(*args):
70 70 return _make_exception('repo_locked', org_exc, *args)
71 71 return _make_exception_wrapper
72 72
73 73
74 74 def RepositoryBranchProtectedException(org_exc=None):
75 75 def _make_exception_wrapper(*args):
76 76 return _make_exception('repo_branch_protected', org_exc, *args)
77 77 return _make_exception_wrapper
78 78
79 79
80 80 def RequirementException(org_exc=None):
81 81 def _make_exception_wrapper(*args):
82 82 return _make_exception('requirement', org_exc, *args)
83 83 return _make_exception_wrapper
84 84
85 85
86 86 def UnhandledException(org_exc=None):
87 87 def _make_exception_wrapper(*args):
88 88 return _make_exception('unhandled', org_exc, *args)
89 89 return _make_exception_wrapper
90 90
91 91
92 92 def URLError(org_exc=None):
93 93 def _make_exception_wrapper(*args):
94 94 return _make_exception('url_error', org_exc, *args)
95 95 return _make_exception_wrapper
96 96
97 97
98 98 def SubrepoMergeException(org_exc=None):
99 99 def _make_exception_wrapper(*args):
100 100 return _make_exception('subrepo_merge_error', org_exc, *args)
101 101 return _make_exception_wrapper
102 102
103 103
104 104 class HTTPRepoLocked(HTTPLocked):
105 105 """
106 106 Subclass of HTTPLocked response that allows to set the title and status
107 107 code via constructor arguments.
108 108 """
109 109 def __init__(self, title, status_code=None, **kwargs):
110 110 self.code = status_code or HTTPLocked.code
111 111 self.title = title
112 112 super(HTTPRepoLocked, self).__init__(**kwargs)
113 113
114 114
115 115 class HTTPRepoBranchProtected(HTTPForbidden):
116 116 def __init__(self, *args, **kwargs):
117 117 super(HTTPForbidden, self).__init__(*args, **kwargs)
118 118
119 119
120 120 class RefNotFoundException(KeyError):
121 121 pass
122
123
124 class NoContentException(ValueError):
125 pass
@@ -1,1192 +1,1226 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import collections
19 19 import logging
20 20 import os
21 21 import posixpath as vcspath
22 22 import re
23 23 import stat
24 24 import traceback
25 25 import urllib
26 26 import urllib2
27 27 from functools import wraps
28 28
29 29 import more_itertools
30 30 import pygit2
31 31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 33 from dulwich import index, objects
33 34 from dulwich.client import HttpGitClient, LocalGitClient
34 35 from dulwich.errors import (
35 36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 37 MissingCommitError, ObjectMissing, HangupException,
37 38 UnexpectedCommandError)
38 39 from dulwich.repo import Repo as DulwichRepo
39 40 from dulwich.server import update_server_info
40 41
41 42 from vcsserver import exceptions, settings, subprocessio
42 43 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.base import RepoFactory, obfuscate_qs
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
44 45 from vcsserver.hgcompat import (
45 46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 47 from vcsserver.git_lfs.lib import LFSOidStore
47 48 from vcsserver.vcs_base import RemoteBase
48 49
49 50 DIR_STAT = stat.S_IFDIR
50 51 FILE_MODE = stat.S_IFMT
51 52 GIT_LINK = objects.S_IFGITLINK
52 53 PEELED_REF_MARKER = '^{}'
53 54
54 55
55 56 log = logging.getLogger(__name__)
56 57
57 58
58 59 def str_to_dulwich(value):
59 60 """
60 61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 62 """
62 63 return value.decode(settings.WIRE_ENCODING)
63 64
64 65
65 66 def reraise_safe_exceptions(func):
66 67 """Converts Dulwich exceptions to something neutral."""
67 68
68 69 @wraps(func)
69 70 def wrapper(*args, **kwargs):
70 71 try:
71 72 return func(*args, **kwargs)
72 73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 74 exc = exceptions.LookupException(org_exc=e)
74 75 raise exc(safe_str(e))
75 76 except (HangupException, UnexpectedCommandError) as e:
76 77 exc = exceptions.VcsException(org_exc=e)
77 78 raise exc(safe_str(e))
78 79 except Exception as e:
79 80 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 81 # (KeyError on empty repos), we cannot track this and catch all
81 82 # exceptions, it's an exceptions from other handlers
82 83 #if not hasattr(e, '_vcs_kind'):
83 84 #log.exception("Unhandled exception in git remote call")
84 85 #raise_from_original(exceptions.UnhandledException)
85 86 raise
86 87 return wrapper
87 88
88 89
89 90 class Repo(DulwichRepo):
90 91 """
91 92 A wrapper for dulwich Repo class.
92 93
93 94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 95 "Too many open files" error. We need to close all opened file descriptors
95 96 once the repo object is destroyed.
96 97 """
97 98 def __del__(self):
98 99 if hasattr(self, 'object_store'):
99 100 self.close()
100 101
101 102
102 103 class Repository(LibGit2Repo):
103 104
104 105 def __enter__(self):
105 106 return self
106 107
107 108 def __exit__(self, exc_type, exc_val, exc_tb):
108 109 self.free()
109 110
110 111
111 112 class GitFactory(RepoFactory):
112 113 repo_type = 'git'
113 114
114 115 def _create_repo(self, wire, create, use_libgit2=False):
115 116 if use_libgit2:
116 117 return Repository(wire['path'])
117 118 else:
118 119 repo_path = str_to_dulwich(wire['path'])
119 120 return Repo(repo_path)
120 121
121 122 def repo(self, wire, create=False, use_libgit2=False):
122 123 """
123 124 Get a repository instance for the given path.
124 125 """
125 126 return self._create_repo(wire, create, use_libgit2)
126 127
127 128 def repo_libgit2(self, wire):
128 129 return self.repo(wire, use_libgit2=True)
129 130
130 131
131 132 class GitRemote(RemoteBase):
132 133
133 134 def __init__(self, factory):
134 135 self._factory = factory
135 136 self._bulk_methods = {
136 137 "date": self.date,
137 138 "author": self.author,
138 139 "branch": self.branch,
139 140 "message": self.message,
140 141 "parents": self.parents,
141 142 "_commit": self.revision,
142 143 }
143 144
144 145 def _wire_to_config(self, wire):
145 146 if 'config' in wire:
146 147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 148 return {}
148 149
149 150 def _remote_conf(self, config):
150 151 params = [
151 152 '-c', 'core.askpass=""',
152 153 ]
153 154 ssl_cert_dir = config.get('vcs_ssl_dir')
154 155 if ssl_cert_dir:
155 156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 157 return params
157 158
158 159 @reraise_safe_exceptions
159 160 def discover_git_version(self):
160 161 stdout, _ = self.run_git_command(
161 162 {}, ['--version'], _bare=True, _safe=True)
162 163 prefix = 'git version'
163 164 if stdout.startswith(prefix):
164 165 stdout = stdout[len(prefix):]
165 166 return stdout.strip()
166 167
167 168 @reraise_safe_exceptions
168 169 def is_empty(self, wire):
169 170 repo_init = self._factory.repo_libgit2(wire)
170 171 with repo_init as repo:
171 172
172 173 try:
173 174 has_head = repo.head.name
174 175 if has_head:
175 176 return False
176 177
177 178 # NOTE(marcink): check again using more expensive method
178 179 return repo.is_empty
179 180 except Exception:
180 181 pass
181 182
182 183 return True
183 184
184 185 @reraise_safe_exceptions
185 186 def assert_correct_path(self, wire):
186 187 cache_on, context_uid, repo_id = self._cache_on(wire)
187 188 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 189 def _assert_correct_path(_context_uid, _repo_id):
189 190 try:
190 191 repo_init = self._factory.repo_libgit2(wire)
191 192 with repo_init as repo:
192 193 pass
193 194 except pygit2.GitError:
194 195 path = wire.get('path')
195 196 tb = traceback.format_exc()
196 197 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 198 return False
198 199
199 200 return True
200 201 return _assert_correct_path(context_uid, repo_id)
201 202
202 203 @reraise_safe_exceptions
203 204 def bare(self, wire):
204 205 repo_init = self._factory.repo_libgit2(wire)
205 206 with repo_init as repo:
206 207 return repo.is_bare
207 208
208 209 @reraise_safe_exceptions
209 210 def blob_as_pretty_string(self, wire, sha):
210 211 repo_init = self._factory.repo_libgit2(wire)
211 212 with repo_init as repo:
212 213 blob_obj = repo[sha]
213 214 blob = blob_obj.data
214 215 return blob
215 216
216 217 @reraise_safe_exceptions
217 218 def blob_raw_length(self, wire, sha):
218 219 cache_on, context_uid, repo_id = self._cache_on(wire)
219 220 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 221 def _blob_raw_length(_repo_id, _sha):
221 222
222 223 repo_init = self._factory.repo_libgit2(wire)
223 224 with repo_init as repo:
224 225 blob = repo[sha]
225 226 return blob.size
226 227
227 228 return _blob_raw_length(repo_id, sha)
228 229
229 230 def _parse_lfs_pointer(self, raw_content):
230 231
231 232 spec_string = 'version https://git-lfs.github.com/spec'
232 233 if raw_content and raw_content.startswith(spec_string):
233 234 pattern = re.compile(r"""
234 235 (?:\n)?
235 236 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 237 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 238 ^size[ ](?P<oid_size>[0-9]+)\n
238 239 (?:\n)?
239 240 """, re.VERBOSE | re.MULTILINE)
240 241 match = pattern.match(raw_content)
241 242 if match:
242 243 return match.groupdict()
243 244
244 245 return {}
245 246
246 247 @reraise_safe_exceptions
247 248 def is_large_file(self, wire, commit_id):
248 249 cache_on, context_uid, repo_id = self._cache_on(wire)
249 250
250 251 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 252 def _is_large_file(_repo_id, _sha):
252 253 repo_init = self._factory.repo_libgit2(wire)
253 254 with repo_init as repo:
254 255 blob = repo[commit_id]
255 256 if blob.is_binary:
256 257 return {}
257 258
258 259 return self._parse_lfs_pointer(blob.data)
259 260
260 261 return _is_large_file(repo_id, commit_id)
261 262
262 263 @reraise_safe_exceptions
263 264 def is_binary(self, wire, tree_id):
264 265 cache_on, context_uid, repo_id = self._cache_on(wire)
265 266
266 267 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 268 def _is_binary(_repo_id, _tree_id):
268 269 repo_init = self._factory.repo_libgit2(wire)
269 270 with repo_init as repo:
270 271 blob_obj = repo[tree_id]
271 272 return blob_obj.is_binary
272 273
273 274 return _is_binary(repo_id, tree_id)
274 275
275 276 @reraise_safe_exceptions
276 277 def in_largefiles_store(self, wire, oid):
277 278 conf = self._wire_to_config(wire)
278 279 repo_init = self._factory.repo_libgit2(wire)
279 280 with repo_init as repo:
280 281 repo_name = repo.path
281 282
282 283 store_location = conf.get('vcs_git_lfs_store_location')
283 284 if store_location:
284 285
285 286 store = LFSOidStore(
286 287 oid=oid, repo=repo_name, store_location=store_location)
287 288 return store.has_oid()
288 289
289 290 return False
290 291
291 292 @reraise_safe_exceptions
292 293 def store_path(self, wire, oid):
293 294 conf = self._wire_to_config(wire)
294 295 repo_init = self._factory.repo_libgit2(wire)
295 296 with repo_init as repo:
296 297 repo_name = repo.path
297 298
298 299 store_location = conf.get('vcs_git_lfs_store_location')
299 300 if store_location:
300 301 store = LFSOidStore(
301 302 oid=oid, repo=repo_name, store_location=store_location)
302 303 return store.oid_path
303 304 raise ValueError('Unable to fetch oid with path {}'.format(oid))
304 305
305 306 @reraise_safe_exceptions
306 307 def bulk_request(self, wire, rev, pre_load):
307 308 cache_on, context_uid, repo_id = self._cache_on(wire)
308 309 @self.region.conditional_cache_on_arguments(condition=cache_on)
309 310 def _bulk_request(_repo_id, _rev, _pre_load):
310 311 result = {}
311 312 for attr in pre_load:
312 313 try:
313 314 method = self._bulk_methods[attr]
314 315 args = [wire, rev]
315 316 result[attr] = method(*args)
316 317 except KeyError as e:
317 318 raise exceptions.VcsException(e)(
318 319 "Unknown bulk attribute: %s" % attr)
319 320 return result
320 321
321 322 return _bulk_request(repo_id, rev, sorted(pre_load))
322 323
323 324 def _build_opener(self, url):
324 325 handlers = []
325 326 url_obj = url_parser(url)
326 327 _, authinfo = url_obj.authinfo()
327 328
328 329 if authinfo:
329 330 # create a password manager
330 331 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
331 332 passmgr.add_password(*authinfo)
332 333
333 334 handlers.extend((httpbasicauthhandler(passmgr),
334 335 httpdigestauthhandler(passmgr)))
335 336
336 337 return urllib2.build_opener(*handlers)
337 338
338 339 def _type_id_to_name(self, type_id):
339 340 return {
340 341 1: b'commit',
341 342 2: b'tree',
342 343 3: b'blob',
343 344 4: b'tag'
344 345 }[type_id]
345 346
346 347 @reraise_safe_exceptions
347 348 def check_url(self, url, config):
348 349 url_obj = url_parser(url)
349 350 test_uri, _ = url_obj.authinfo()
350 351 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
351 352 url_obj.query = obfuscate_qs(url_obj.query)
352 353 cleaned_uri = str(url_obj)
353 354 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
354 355
355 356 if not test_uri.endswith('info/refs'):
356 357 test_uri = test_uri.rstrip('/') + '/info/refs'
357 358
358 359 o = self._build_opener(url)
359 360 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
360 361
361 362 q = {"service": 'git-upload-pack'}
362 363 qs = '?%s' % urllib.urlencode(q)
363 364 cu = "%s%s" % (test_uri, qs)
364 365 req = urllib2.Request(cu, None, {})
365 366
366 367 try:
367 368 log.debug("Trying to open URL %s", cleaned_uri)
368 369 resp = o.open(req)
369 370 if resp.code != 200:
370 371 raise exceptions.URLError()('Return Code is not 200')
371 372 except Exception as e:
372 373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 374 # means it cannot be cloned
374 375 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
375 376
376 377 # now detect if it's proper git repo
377 378 gitdata = resp.read()
378 379 if 'service=git-upload-pack' in gitdata:
379 380 pass
380 381 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
381 382 # old style git can return some other format !
382 383 pass
383 384 else:
384 385 raise exceptions.URLError()(
385 386 "url [%s] does not look like an git" % (cleaned_uri,))
386 387
387 388 return True
388 389
389 390 @reraise_safe_exceptions
390 391 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
391 392 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
392 393 remote_refs = self.pull(wire, url, apply_refs=False)
393 394 repo = self._factory.repo(wire)
394 395 if isinstance(valid_refs, list):
395 396 valid_refs = tuple(valid_refs)
396 397
397 398 for k in remote_refs:
398 399 # only parse heads/tags and skip so called deferred tags
399 400 if k.startswith(valid_refs) and not k.endswith(deferred):
400 401 repo[k] = remote_refs[k]
401 402
402 403 if update_after_clone:
403 404 # we want to checkout HEAD
404 405 repo["HEAD"] = remote_refs["HEAD"]
405 406 index.build_index_from_tree(repo.path, repo.index_path(),
406 407 repo.object_store, repo["HEAD"].tree)
407 408
408 409 @reraise_safe_exceptions
409 410 def branch(self, wire, commit_id):
410 411 cache_on, context_uid, repo_id = self._cache_on(wire)
411 412 @self.region.conditional_cache_on_arguments(condition=cache_on)
412 413 def _branch(_context_uid, _repo_id, _commit_id):
413 414 regex = re.compile('^refs/heads')
414 415
415 416 def filter_with(ref):
416 417 return regex.match(ref[0]) and ref[1] == _commit_id
417 418
418 419 branches = filter(filter_with, self.get_refs(wire).items())
419 420 return [x[0].split('refs/heads/')[-1] for x in branches]
420 421
421 422 return _branch(context_uid, repo_id, commit_id)
422 423
423 424 @reraise_safe_exceptions
424 425 def commit_branches(self, wire, commit_id):
425 426 cache_on, context_uid, repo_id = self._cache_on(wire)
426 427 @self.region.conditional_cache_on_arguments(condition=cache_on)
427 428 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 429 repo_init = self._factory.repo_libgit2(wire)
429 430 with repo_init as repo:
430 431 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 432 return branches
432 433
433 434 return _commit_branches(context_uid, repo_id, commit_id)
434 435
435 436 @reraise_safe_exceptions
436 437 def add_object(self, wire, content):
437 438 repo_init = self._factory.repo_libgit2(wire)
438 439 with repo_init as repo:
439 440 blob = objects.Blob()
440 441 blob.set_raw_string(content)
441 442 repo.object_store.add_object(blob)
442 443 return blob.id
443 444
444 445 # TODO: this is quite complex, check if that can be simplified
445 446 @reraise_safe_exceptions
446 447 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
447 448 repo = self._factory.repo(wire)
448 449 object_store = repo.object_store
449 450
450 451 # Create tree and populates it with blobs
451 452 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
452 453
453 454 for node in updated:
454 455 # Compute subdirs if needed
455 456 dirpath, nodename = vcspath.split(node['path'])
456 457 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
457 458 parent = commit_tree
458 459 ancestors = [('', parent)]
459 460
460 461 # Tries to dig for the deepest existing tree
461 462 while dirnames:
462 463 curdir = dirnames.pop(0)
463 464 try:
464 465 dir_id = parent[curdir][1]
465 466 except KeyError:
466 467 # put curdir back into dirnames and stops
467 468 dirnames.insert(0, curdir)
468 469 break
469 470 else:
470 471 # If found, updates parent
471 472 parent = repo[dir_id]
472 473 ancestors.append((curdir, parent))
473 474 # Now parent is deepest existing tree and we need to create
474 475 # subtrees for dirnames (in reverse order)
475 476 # [this only applies for nodes from added]
476 477 new_trees = []
477 478
478 479 blob = objects.Blob.from_string(node['content'])
479 480
480 481 if dirnames:
481 482 # If there are trees which should be created we need to build
482 483 # them now (in reverse order)
483 484 reversed_dirnames = list(reversed(dirnames))
484 485 curtree = objects.Tree()
485 486 curtree[node['node_path']] = node['mode'], blob.id
486 487 new_trees.append(curtree)
487 488 for dirname in reversed_dirnames[:-1]:
488 489 newtree = objects.Tree()
489 490 newtree[dirname] = (DIR_STAT, curtree.id)
490 491 new_trees.append(newtree)
491 492 curtree = newtree
492 493 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
493 494 else:
494 495 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
495 496
496 497 new_trees.append(parent)
497 498 # Update ancestors
498 499 reversed_ancestors = reversed(
499 500 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
500 501 for parent, tree, path in reversed_ancestors:
501 502 parent[path] = (DIR_STAT, tree.id)
502 503 object_store.add_object(tree)
503 504
504 505 object_store.add_object(blob)
505 506 for tree in new_trees:
506 507 object_store.add_object(tree)
507 508
508 509 for node_path in removed:
509 510 paths = node_path.split('/')
510 511 tree = commit_tree
511 512 trees = [tree]
512 513 # Traverse deep into the forest...
513 514 for path in paths:
514 515 try:
515 516 obj = repo[tree[path][1]]
516 517 if isinstance(obj, objects.Tree):
517 518 trees.append(obj)
518 519 tree = obj
519 520 except KeyError:
520 521 break
521 522 # Cut down the blob and all rotten trees on the way back...
522 523 for path, tree in reversed(zip(paths, trees)):
523 524 del tree[path]
524 525 if tree:
525 526 # This tree still has elements - don't remove it or any
526 527 # of it's parents
527 528 break
528 529
529 530 object_store.add_object(commit_tree)
530 531
531 532 # Create commit
532 533 commit = objects.Commit()
533 534 commit.tree = commit_tree.id
534 535 for k, v in commit_data.iteritems():
535 536 setattr(commit, k, v)
536 537 object_store.add_object(commit)
537 538
538 539 self.create_branch(wire, branch, commit.id)
539 540
540 541 # dulwich set-ref
541 542 ref = 'refs/heads/%s' % branch
542 543 repo.refs[ref] = commit.id
543 544
544 545 return commit.id
545 546
546 547 @reraise_safe_exceptions
547 548 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
548 549 if url != 'default' and '://' not in url:
549 550 client = LocalGitClient(url)
550 551 else:
551 552 url_obj = url_parser(url)
552 553 o = self._build_opener(url)
553 554 url, _ = url_obj.authinfo()
554 555 client = HttpGitClient(base_url=url, opener=o)
555 556 repo = self._factory.repo(wire)
556 557
557 558 determine_wants = repo.object_store.determine_wants_all
558 559 if refs:
559 560 def determine_wants_requested(references):
560 561 return [references[r] for r in references if r in refs]
561 562 determine_wants = determine_wants_requested
562 563
563 564 try:
564 565 remote_refs = client.fetch(
565 566 path=url, target=repo, determine_wants=determine_wants)
566 567 except NotGitRepository as e:
567 568 log.warning(
568 569 'Trying to fetch from "%s" failed, not a Git repository.', url)
569 570 # Exception can contain unicode which we convert
570 571 raise exceptions.AbortException(e)(repr(e))
571 572
572 573 # mikhail: client.fetch() returns all the remote refs, but fetches only
573 574 # refs filtered by `determine_wants` function. We need to filter result
574 575 # as well
575 576 if refs:
576 577 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
577 578
578 579 if apply_refs:
579 580 # TODO: johbo: Needs proper test coverage with a git repository
580 581 # that contains a tag object, so that we would end up with
581 582 # a peeled ref at this point.
582 583 for k in remote_refs:
583 584 if k.endswith(PEELED_REF_MARKER):
584 585 log.debug("Skipping peeled reference %s", k)
585 586 continue
586 587 repo[k] = remote_refs[k]
587 588
588 589 if refs and not update_after:
589 590 # mikhail: explicitly set the head to the last ref.
590 591 repo['HEAD'] = remote_refs[refs[-1]]
591 592
592 593 if update_after:
593 594 # we want to checkout HEAD
594 595 repo["HEAD"] = remote_refs["HEAD"]
595 596 index.build_index_from_tree(repo.path, repo.index_path(),
596 597 repo.object_store, repo["HEAD"].tree)
597 598 return remote_refs
598 599
599 600 @reraise_safe_exceptions
600 601 def sync_fetch(self, wire, url, refs=None, all_refs=False):
601 602 repo = self._factory.repo(wire)
602 603 if refs and not isinstance(refs, (list, tuple)):
603 604 refs = [refs]
604 605
605 606 config = self._wire_to_config(wire)
606 607 # get all remote refs we'll use to fetch later
607 608 cmd = ['ls-remote']
608 609 if not all_refs:
609 610 cmd += ['--heads', '--tags']
610 611 cmd += [url]
611 612 output, __ = self.run_git_command(
612 613 wire, cmd, fail_on_stderr=False,
613 614 _copts=self._remote_conf(config),
614 615 extra_env={'GIT_TERMINAL_PROMPT': '0'})
615 616
616 617 remote_refs = collections.OrderedDict()
617 618 fetch_refs = []
618 619
619 620 for ref_line in output.splitlines():
620 621 sha, ref = ref_line.split('\t')
621 622 sha = sha.strip()
622 623 if ref in remote_refs:
623 624 # duplicate, skip
624 625 continue
625 626 if ref.endswith(PEELED_REF_MARKER):
626 627 log.debug("Skipping peeled reference %s", ref)
627 628 continue
628 629 # don't sync HEAD
629 630 if ref in ['HEAD']:
630 631 continue
631 632
632 633 remote_refs[ref] = sha
633 634
634 635 if refs and sha in refs:
635 636 # we filter fetch using our specified refs
636 637 fetch_refs.append('{}:{}'.format(ref, ref))
637 638 elif not refs:
638 639 fetch_refs.append('{}:{}'.format(ref, ref))
639 640 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640 641
641 642 if fetch_refs:
642 643 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
643 644 fetch_refs_chunks = list(chunk)
644 645 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
645 646 _out, _err = self.run_git_command(
646 647 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
647 648 fail_on_stderr=False,
648 649 _copts=self._remote_conf(config),
649 650 extra_env={'GIT_TERMINAL_PROMPT': '0'})
650 651
651 652 return remote_refs
652 653
653 654 @reraise_safe_exceptions
654 655 def sync_push(self, wire, url, refs=None):
655 656 if not self.check_url(url, wire):
656 657 return
657 658 config = self._wire_to_config(wire)
658 659 self._factory.repo(wire)
659 660 self.run_git_command(
660 661 wire, ['push', url, '--mirror'], fail_on_stderr=False,
661 662 _copts=self._remote_conf(config),
662 663 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663 664
664 665 @reraise_safe_exceptions
665 666 def get_remote_refs(self, wire, url):
666 667 repo = Repo(url)
667 668 return repo.get_refs()
668 669
669 670 @reraise_safe_exceptions
670 671 def get_description(self, wire):
671 672 repo = self._factory.repo(wire)
672 673 return repo.get_description()
673 674
674 675 @reraise_safe_exceptions
675 676 def get_missing_revs(self, wire, rev1, rev2, path2):
676 677 repo = self._factory.repo(wire)
677 678 LocalGitClient(thin_packs=False).fetch(path2, repo)
678 679
679 680 wire_remote = wire.copy()
680 681 wire_remote['path'] = path2
681 682 repo_remote = self._factory.repo(wire_remote)
682 683 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
683 684
684 685 revs = [
685 686 x.commit.id
686 687 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
687 688 return revs
688 689
689 690 @reraise_safe_exceptions
690 691 def get_object(self, wire, sha, maybe_unreachable=False):
691 692 cache_on, context_uid, repo_id = self._cache_on(wire)
692 693 @self.region.conditional_cache_on_arguments(condition=cache_on)
693 694 def _get_object(_context_uid, _repo_id, _sha):
694 695 repo_init = self._factory.repo_libgit2(wire)
695 696 with repo_init as repo:
696 697
697 698 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
698 699 try:
699 700 commit = repo.revparse_single(sha)
700 701 except KeyError:
701 702 # NOTE(marcink): KeyError doesn't give us any meaningful information
702 703 # here, we instead give something more explicit
703 704 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
704 705 raise exceptions.LookupException(e)(missing_commit_err)
705 706 except ValueError as e:
706 707 raise exceptions.LookupException(e)(missing_commit_err)
707 708
708 709 is_tag = False
709 710 if isinstance(commit, pygit2.Tag):
710 711 commit = repo.get(commit.target)
711 712 is_tag = True
712 713
713 714 check_dangling = True
714 715 if is_tag:
715 716 check_dangling = False
716 717
717 718 if check_dangling and maybe_unreachable:
718 719 check_dangling = False
719 720
720 721 # we used a reference and it parsed means we're not having a dangling commit
721 722 if sha != commit.hex:
722 723 check_dangling = False
723 724
724 725 if check_dangling:
725 726 # check for dangling commit
726 727 for branch in repo.branches.with_commit(commit.hex):
727 728 if branch:
728 729 break
729 730 else:
730 731 # NOTE(marcink): Empty error doesn't give us any meaningful information
731 732 # here, we instead give something more explicit
732 733 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
733 734 raise exceptions.LookupException(e)(missing_commit_err)
734 735
735 736 commit_id = commit.hex
736 737 type_id = commit.type
737 738
738 739 return {
739 740 'id': commit_id,
740 741 'type': self._type_id_to_name(type_id),
741 742 'commit_id': commit_id,
742 743 'idx': 0
743 744 }
744 745
745 746 return _get_object(context_uid, repo_id, sha)
746 747
747 748 @reraise_safe_exceptions
748 749 def get_refs(self, wire):
749 750 cache_on, context_uid, repo_id = self._cache_on(wire)
750 751 @self.region.conditional_cache_on_arguments(condition=cache_on)
751 752 def _get_refs(_context_uid, _repo_id):
752 753
753 754 repo_init = self._factory.repo_libgit2(wire)
754 755 with repo_init as repo:
755 756 regex = re.compile('^refs/(heads|tags)/')
756 757 return {x.name: x.target.hex for x in
757 758 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
758 759
759 760 return _get_refs(context_uid, repo_id)
760 761
761 762 @reraise_safe_exceptions
762 763 def get_branch_pointers(self, wire):
763 764 cache_on, context_uid, repo_id = self._cache_on(wire)
764 765 @self.region.conditional_cache_on_arguments(condition=cache_on)
765 766 def _get_branch_pointers(_context_uid, _repo_id):
766 767
767 768 repo_init = self._factory.repo_libgit2(wire)
768 769 regex = re.compile('^refs/heads')
769 770 with repo_init as repo:
770 771 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
771 772 return {x.target.hex: x.shorthand for x in branches}
772 773
773 774 return _get_branch_pointers(context_uid, repo_id)
774 775
775 776 @reraise_safe_exceptions
776 777 def head(self, wire, show_exc=True):
777 778 cache_on, context_uid, repo_id = self._cache_on(wire)
778 779 @self.region.conditional_cache_on_arguments(condition=cache_on)
779 780 def _head(_context_uid, _repo_id, _show_exc):
780 781 repo_init = self._factory.repo_libgit2(wire)
781 782 with repo_init as repo:
782 783 try:
783 784 return repo.head.peel().hex
784 785 except Exception:
785 786 if show_exc:
786 787 raise
787 788 return _head(context_uid, repo_id, show_exc)
788 789
789 790 @reraise_safe_exceptions
790 791 def init(self, wire):
791 792 repo_path = str_to_dulwich(wire['path'])
792 793 self.repo = Repo.init(repo_path)
793 794
794 795 @reraise_safe_exceptions
795 796 def init_bare(self, wire):
796 797 repo_path = str_to_dulwich(wire['path'])
797 798 self.repo = Repo.init_bare(repo_path)
798 799
799 800 @reraise_safe_exceptions
800 801 def revision(self, wire, rev):
801 802
802 803 cache_on, context_uid, repo_id = self._cache_on(wire)
803 804 @self.region.conditional_cache_on_arguments(condition=cache_on)
804 805 def _revision(_context_uid, _repo_id, _rev):
805 806 repo_init = self._factory.repo_libgit2(wire)
806 807 with repo_init as repo:
807 808 commit = repo[rev]
808 809 obj_data = {
809 810 'id': commit.id.hex,
810 811 }
811 812 # tree objects itself don't have tree_id attribute
812 813 if hasattr(commit, 'tree_id'):
813 814 obj_data['tree'] = commit.tree_id.hex
814 815
815 816 return obj_data
816 817 return _revision(context_uid, repo_id, rev)
817 818
818 819 @reraise_safe_exceptions
819 820 def date(self, wire, commit_id):
820 821 cache_on, context_uid, repo_id = self._cache_on(wire)
821 822 @self.region.conditional_cache_on_arguments(condition=cache_on)
822 823 def _date(_repo_id, _commit_id):
823 824 repo_init = self._factory.repo_libgit2(wire)
824 825 with repo_init as repo:
825 826 commit = repo[commit_id]
826 827
827 828 if hasattr(commit, 'commit_time'):
828 829 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
829 830 else:
830 831 commit = commit.get_object()
831 832 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
832 833
833 834 # TODO(marcink): check dulwich difference of offset vs timezone
834 835 return [commit_time, commit_time_offset]
835 836 return _date(repo_id, commit_id)
836 837
837 838 @reraise_safe_exceptions
838 839 def author(self, wire, commit_id):
839 840 cache_on, context_uid, repo_id = self._cache_on(wire)
840 841 @self.region.conditional_cache_on_arguments(condition=cache_on)
841 842 def _author(_repo_id, _commit_id):
842 843 repo_init = self._factory.repo_libgit2(wire)
843 844 with repo_init as repo:
844 845 commit = repo[commit_id]
845 846
846 847 if hasattr(commit, 'author'):
847 848 author = commit.author
848 849 else:
849 850 author = commit.get_object().author
850 851
851 852 if author.email:
852 853 return u"{} <{}>".format(author.name, author.email)
853 854
854 855 try:
855 856 return u"{}".format(author.name)
856 857 except Exception:
857 858 return u"{}".format(safe_unicode(author.raw_name))
858 859
859 860 return _author(repo_id, commit_id)
860 861
861 862 @reraise_safe_exceptions
862 863 def message(self, wire, commit_id):
863 864 cache_on, context_uid, repo_id = self._cache_on(wire)
864 865 @self.region.conditional_cache_on_arguments(condition=cache_on)
865 866 def _message(_repo_id, _commit_id):
866 867 repo_init = self._factory.repo_libgit2(wire)
867 868 with repo_init as repo:
868 869 commit = repo[commit_id]
869 870 return commit.message
870 871 return _message(repo_id, commit_id)
871 872
872 873 @reraise_safe_exceptions
873 874 def parents(self, wire, commit_id):
874 875 cache_on, context_uid, repo_id = self._cache_on(wire)
875 876 @self.region.conditional_cache_on_arguments(condition=cache_on)
876 877 def _parents(_repo_id, _commit_id):
877 878 repo_init = self._factory.repo_libgit2(wire)
878 879 with repo_init as repo:
879 880 commit = repo[commit_id]
880 881 if hasattr(commit, 'parent_ids'):
881 882 parent_ids = commit.parent_ids
882 883 else:
883 884 parent_ids = commit.get_object().parent_ids
884 885
885 886 return [x.hex for x in parent_ids]
886 887 return _parents(repo_id, commit_id)
887 888
888 889 @reraise_safe_exceptions
889 890 def children(self, wire, commit_id):
890 891 cache_on, context_uid, repo_id = self._cache_on(wire)
891 892 @self.region.conditional_cache_on_arguments(condition=cache_on)
892 893 def _children(_repo_id, _commit_id):
893 894 output, __ = self.run_git_command(
894 895 wire, ['rev-list', '--all', '--children'])
895 896
896 897 child_ids = []
897 898 pat = re.compile(r'^%s' % commit_id)
898 899 for l in output.splitlines():
899 900 if pat.match(l):
900 901 found_ids = l.split(' ')[1:]
901 902 child_ids.extend(found_ids)
902 903
903 904 return child_ids
904 905 return _children(repo_id, commit_id)
905 906
906 907 @reraise_safe_exceptions
907 908 def set_refs(self, wire, key, value):
908 909 repo_init = self._factory.repo_libgit2(wire)
909 910 with repo_init as repo:
910 911 repo.references.create(key, value, force=True)
911 912
912 913 @reraise_safe_exceptions
913 914 def create_branch(self, wire, branch_name, commit_id, force=False):
914 915 repo_init = self._factory.repo_libgit2(wire)
915 916 with repo_init as repo:
916 917 commit = repo[commit_id]
917 918
918 919 if force:
919 920 repo.branches.local.create(branch_name, commit, force=force)
920 921 elif not repo.branches.get(branch_name):
921 922 # create only if that branch isn't existing
922 923 repo.branches.local.create(branch_name, commit, force=force)
923 924
924 925 @reraise_safe_exceptions
925 926 def remove_ref(self, wire, key):
926 927 repo_init = self._factory.repo_libgit2(wire)
927 928 with repo_init as repo:
928 929 repo.references.delete(key)
929 930
930 931 @reraise_safe_exceptions
931 932 def tag_remove(self, wire, tag_name):
932 933 repo_init = self._factory.repo_libgit2(wire)
933 934 with repo_init as repo:
934 935 key = 'refs/tags/{}'.format(tag_name)
935 936 repo.references.delete(key)
936 937
937 938 @reraise_safe_exceptions
938 939 def tree_changes(self, wire, source_id, target_id):
939 940 # TODO(marcink): remove this seems it's only used by tests
940 941 repo = self._factory.repo(wire)
941 942 source = repo[source_id].tree if source_id else None
942 943 target = repo[target_id].tree
943 944 result = repo.object_store.tree_changes(source, target)
944 945 return list(result)
945 946
946 947 @reraise_safe_exceptions
947 948 def tree_and_type_for_path(self, wire, commit_id, path):
948 949
949 950 cache_on, context_uid, repo_id = self._cache_on(wire)
950 951 @self.region.conditional_cache_on_arguments(condition=cache_on)
951 952 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
952 953 repo_init = self._factory.repo_libgit2(wire)
953 954
954 955 with repo_init as repo:
955 956 commit = repo[commit_id]
956 957 try:
957 958 tree = commit.tree[path]
958 959 except KeyError:
959 960 return None, None, None
960 961
961 962 return tree.id.hex, tree.type, tree.filemode
962 963 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
963 964
964 965 @reraise_safe_exceptions
965 966 def tree_items(self, wire, tree_id):
966 967 cache_on, context_uid, repo_id = self._cache_on(wire)
967 968 @self.region.conditional_cache_on_arguments(condition=cache_on)
968 969 def _tree_items(_repo_id, _tree_id):
969 970
970 971 repo_init = self._factory.repo_libgit2(wire)
971 972 with repo_init as repo:
972 973 try:
973 974 tree = repo[tree_id]
974 975 except KeyError:
975 976 raise ObjectMissing('No tree with id: {}'.format(tree_id))
976 977
977 978 result = []
978 979 for item in tree:
979 980 item_sha = item.hex
980 981 item_mode = item.filemode
981 982 item_type = item.type
982 983
983 984 if item_type == 'commit':
984 985 # NOTE(marcink): submodules we translate to 'link' for backward compat
985 986 item_type = 'link'
986 987
987 988 result.append((item.name, item_mode, item_sha, item_type))
988 989 return result
989 990 return _tree_items(repo_id, tree_id)
990 991
991 992 @reraise_safe_exceptions
992 993 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
993 994 """
994 995 Old version that uses subprocess to call diff
995 996 """
996 997
997 998 flags = [
998 999 '-U%s' % context, '--patch',
999 1000 '--binary',
1000 1001 '--find-renames',
1001 1002 '--no-indent-heuristic',
1002 1003 # '--indent-heuristic',
1003 1004 #'--full-index',
1004 1005 #'--abbrev=40'
1005 1006 ]
1006 1007
1007 1008 if opt_ignorews:
1008 1009 flags.append('--ignore-all-space')
1009 1010
1010 1011 if commit_id_1 == self.EMPTY_COMMIT:
1011 1012 cmd = ['show'] + flags + [commit_id_2]
1012 1013 else:
1013 1014 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1014 1015
1015 1016 if file_filter:
1016 1017 cmd.extend(['--', file_filter])
1017 1018
1018 1019 diff, __ = self.run_git_command(wire, cmd)
1019 1020 # If we used 'show' command, strip first few lines (until actual diff
1020 1021 # starts)
1021 1022 if commit_id_1 == self.EMPTY_COMMIT:
1022 1023 lines = diff.splitlines()
1023 1024 x = 0
1024 1025 for line in lines:
1025 1026 if line.startswith('diff'):
1026 1027 break
1027 1028 x += 1
1028 1029 # Append new line just like 'diff' command do
1029 1030 diff = '\n'.join(lines[x:]) + '\n'
1030 1031 return diff
1031 1032
1032 1033 @reraise_safe_exceptions
1033 1034 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1034 1035 repo_init = self._factory.repo_libgit2(wire)
1035 1036 with repo_init as repo:
1036 1037 swap = True
1037 1038 flags = 0
1038 1039 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1039 1040
1040 1041 if opt_ignorews:
1041 1042 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1042 1043
1043 1044 if commit_id_1 == self.EMPTY_COMMIT:
1044 1045 comm1 = repo[commit_id_2]
1045 1046 diff_obj = comm1.tree.diff_to_tree(
1046 1047 flags=flags, context_lines=context, swap=swap)
1047 1048
1048 1049 else:
1049 1050 comm1 = repo[commit_id_2]
1050 1051 comm2 = repo[commit_id_1]
1051 1052 diff_obj = comm1.tree.diff_to_tree(
1052 1053 comm2.tree, flags=flags, context_lines=context, swap=swap)
1053 1054 similar_flags = 0
1054 1055 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1055 1056 diff_obj.find_similar(flags=similar_flags)
1056 1057
1057 1058 if file_filter:
1058 1059 for p in diff_obj:
1059 1060 if p.delta.old_file.path == file_filter:
1060 1061 return p.patch or ''
1061 1062 # fo matching path == no diff
1062 1063 return ''
1063 1064 return diff_obj.patch or ''
1064 1065
1065 1066 @reraise_safe_exceptions
1066 1067 def node_history(self, wire, commit_id, path, limit):
1067 1068 cache_on, context_uid, repo_id = self._cache_on(wire)
1068 1069 @self.region.conditional_cache_on_arguments(condition=cache_on)
1069 1070 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1070 1071 # optimize for n==1, rev-list is much faster for that use-case
1071 1072 if limit == 1:
1072 1073 cmd = ['rev-list', '-1', commit_id, '--', path]
1073 1074 else:
1074 1075 cmd = ['log']
1075 1076 if limit:
1076 1077 cmd.extend(['-n', str(safe_int(limit, 0))])
1077 1078 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1078 1079
1079 1080 output, __ = self.run_git_command(wire, cmd)
1080 1081 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1081 1082
1082 1083 return [x for x in commit_ids]
1083 1084 return _node_history(context_uid, repo_id, commit_id, path, limit)
1084 1085
1085 1086 @reraise_safe_exceptions
1086 1087 def node_annotate(self, wire, commit_id, path):
1087 1088
1088 1089 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1089 1090 # -l ==> outputs long shas (and we need all 40 characters)
1090 1091 # --root ==> doesn't put '^' character for boundaries
1091 1092 # -r commit_id ==> blames for the given commit
1092 1093 output, __ = self.run_git_command(wire, cmd)
1093 1094
1094 1095 result = []
1095 1096 for i, blame_line in enumerate(output.split('\n')[:-1]):
1096 1097 line_no = i + 1
1097 1098 commit_id, line = re.split(r' ', blame_line, 1)
1098 1099 result.append((line_no, commit_id, line))
1099 1100 return result
1100 1101
1101 1102 @reraise_safe_exceptions
1102 1103 def update_server_info(self, wire):
1103 1104 repo = self._factory.repo(wire)
1104 1105 update_server_info(repo)
1105 1106
1106 1107 @reraise_safe_exceptions
1107 1108 def get_all_commit_ids(self, wire):
1108 1109
1109 1110 cache_on, context_uid, repo_id = self._cache_on(wire)
1110 1111 @self.region.conditional_cache_on_arguments(condition=cache_on)
1111 1112 def _get_all_commit_ids(_context_uid, _repo_id):
1112 1113
1113 1114 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1114 1115 try:
1115 1116 output, __ = self.run_git_command(wire, cmd)
1116 1117 return output.splitlines()
1117 1118 except Exception:
1118 1119 # Can be raised for empty repositories
1119 1120 return []
1120 1121 return _get_all_commit_ids(context_uid, repo_id)
1121 1122
1122 1123 @reraise_safe_exceptions
1123 1124 def run_git_command(self, wire, cmd, **opts):
1124 1125 path = wire.get('path', None)
1125 1126
1126 1127 if path and os.path.isdir(path):
1127 1128 opts['cwd'] = path
1128 1129
1129 1130 if '_bare' in opts:
1130 1131 _copts = []
1131 1132 del opts['_bare']
1132 1133 else:
1133 1134 _copts = ['-c', 'core.quotepath=false', ]
1134 1135 safe_call = False
1135 1136 if '_safe' in opts:
1136 1137 # no exc on failure
1137 1138 del opts['_safe']
1138 1139 safe_call = True
1139 1140
1140 1141 if '_copts' in opts:
1141 1142 _copts.extend(opts['_copts'] or [])
1142 1143 del opts['_copts']
1143 1144
1144 1145 gitenv = os.environ.copy()
1145 1146 gitenv.update(opts.pop('extra_env', {}))
1146 1147 # need to clean fix GIT_DIR !
1147 1148 if 'GIT_DIR' in gitenv:
1148 1149 del gitenv['GIT_DIR']
1149 1150 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1150 1151 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1151 1152
1152 1153 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1153 1154 _opts = {'env': gitenv, 'shell': False}
1154 1155
1155 1156 proc = None
1156 1157 try:
1157 1158 _opts.update(opts)
1158 1159 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1159 1160
1160 1161 return ''.join(proc), ''.join(proc.error)
1161 1162 except (EnvironmentError, OSError) as err:
1162 1163 cmd = ' '.join(cmd) # human friendly CMD
1163 1164 tb_err = ("Couldn't run git command (%s).\n"
1164 1165 "Original error was:%s\n"
1165 1166 "Call options:%s\n"
1166 1167 % (cmd, err, _opts))
1167 1168 log.exception(tb_err)
1168 1169 if safe_call:
1169 1170 return '', err
1170 1171 else:
1171 1172 raise exceptions.VcsException()(tb_err)
1172 1173 finally:
1173 1174 if proc:
1174 1175 proc.close()
1175 1176
1176 1177 @reraise_safe_exceptions
1177 1178 def install_hooks(self, wire, force=False):
1178 1179 from vcsserver.hook_utils import install_git_hooks
1179 1180 bare = self.bare(wire)
1180 1181 path = wire['path']
1181 1182 return install_git_hooks(path, bare, force_create=force)
1182 1183
1183 1184 @reraise_safe_exceptions
1184 1185 def get_hooks_info(self, wire):
1185 1186 from vcsserver.hook_utils import (
1186 1187 get_git_pre_hook_version, get_git_post_hook_version)
1187 1188 bare = self.bare(wire)
1188 1189 path = wire['path']
1189 1190 return {
1190 1191 'pre_version': get_git_pre_hook_version(path, bare),
1191 1192 'post_version': get_git_post_hook_version(path, bare),
1192 1193 }
1194
1195 @reraise_safe_exceptions
1196 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1197 archive_dir_name, commit_id):
1198
1199 def file_walker(_commit_id, path):
1200 repo_init = self._factory.repo_libgit2(wire)
1201
1202 with repo_init as repo:
1203 commit = repo[commit_id]
1204
1205 if path in ['', '/']:
1206 tree = commit.tree
1207 else:
1208 tree = commit.tree[path.rstrip('/')]
1209 tree_id = tree.id.hex
1210 try:
1211 tree = repo[tree_id]
1212 except KeyError:
1213 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1214
1215 index = LibGit2Index.Index()
1216 index.read_tree(tree)
1217 file_iter = index
1218
1219 for fn in file_iter:
1220 file_path = fn.path
1221 mode = fn.mode
1222 is_link = stat.S_ISLNK(mode)
1223 yield ArchiveNode(file_path, mode, is_link, repo[fn.id].read_raw)
1224
1225 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1226 archive_dir_name, commit_id)
@@ -1,1009 +1,1021 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import functools
18 18 import io
19 19 import logging
20 import os
20 21 import stat
21 22 import urllib
22 23 import urllib2
23 24 import traceback
24 25
25 26 from hgext import largefiles, rebase, purge
26 27 from hgext.strip import strip as hgext_strip
27 28 from mercurial import commands
28 29 from mercurial import unionrepo
29 30 from mercurial import verify
30 31 from mercurial import repair
31 32
32 33 import vcsserver
33 34 from vcsserver import exceptions
34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 36 from vcsserver.hgcompat import (
36 37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 RepoLookupError, InterventionRequired, RequirementError)
41 RepoLookupError, InterventionRequired, RequirementError,
42 alwaysmatcher, patternmatcher, hgutil)
41 43 from vcsserver.vcs_base import RemoteBase
42 44
43 45 log = logging.getLogger(__name__)
44 46
45 47
46 48 def make_ui_from_config(repo_config):
47 49
48 50 class LoggingUI(ui.ui):
49 51 def status(self, *msg, **opts):
50 52 log.info(' '.join(msg).rstrip('\n'))
51 53 super(LoggingUI, self).status(*msg, **opts)
52 54
53 55 def warn(self, *msg, **opts):
54 56 log.warn(' '.join(msg).rstrip('\n'))
55 57 super(LoggingUI, self).warn(*msg, **opts)
56 58
57 59 def error(self, *msg, **opts):
58 60 log.error(' '.join(msg).rstrip('\n'))
59 61 super(LoggingUI, self).error(*msg, **opts)
60 62
61 63 def note(self, *msg, **opts):
62 64 log.info(' '.join(msg).rstrip('\n'))
63 65 super(LoggingUI, self).note(*msg, **opts)
64 66
65 67 def debug(self, *msg, **opts):
66 68 log.debug(' '.join(msg).rstrip('\n'))
67 69 super(LoggingUI, self).debug(*msg, **opts)
68 70
69 71 baseui = LoggingUI()
70 72
71 73 # clean the baseui object
72 74 baseui._ocfg = hgconfig.config()
73 75 baseui._ucfg = hgconfig.config()
74 76 baseui._tcfg = hgconfig.config()
75 77
76 78 for section, option, value in repo_config:
77 79 baseui.setconfig(section, option, value)
78 80
79 81 # make our hgweb quiet so it doesn't print output
80 82 baseui.setconfig('ui', 'quiet', 'true')
81 83
82 84 baseui.setconfig('ui', 'paginate', 'never')
83 85 # for better Error reporting of Mercurial
84 86 baseui.setconfig('ui', 'message-output', 'stderr')
85 87
86 88 # force mercurial to only use 1 thread, otherwise it may try to set a
87 89 # signal in a non-main thread, thus generating a ValueError.
88 90 baseui.setconfig('worker', 'numcpus', 1)
89 91
90 92 # If there is no config for the largefiles extension, we explicitly disable
91 93 # it here. This overrides settings from repositories hgrc file. Recent
92 94 # mercurial versions enable largefiles in hgrc on clone from largefile
93 95 # repo.
94 96 if not baseui.hasconfig('extensions', 'largefiles'):
95 97 log.debug('Explicitly disable largefiles extension for repo.')
96 98 baseui.setconfig('extensions', 'largefiles', '!')
97 99
98 100 return baseui
99 101
100 102
101 103 def reraise_safe_exceptions(func):
102 104 """Decorator for converting mercurial exceptions to something neutral."""
103 105
104 106 def wrapper(*args, **kwargs):
105 107 try:
106 108 return func(*args, **kwargs)
107 109 except (Abort, InterventionRequired) as e:
108 110 raise_from_original(exceptions.AbortException(e))
109 111 except RepoLookupError as e:
110 112 raise_from_original(exceptions.LookupException(e))
111 113 except RequirementError as e:
112 114 raise_from_original(exceptions.RequirementException(e))
113 115 except RepoError as e:
114 116 raise_from_original(exceptions.VcsException(e))
115 117 except LookupError as e:
116 118 raise_from_original(exceptions.LookupException(e))
117 119 except Exception as e:
118 120 if not hasattr(e, '_vcs_kind'):
119 121 log.exception("Unhandled exception in hg remote call")
120 122 raise_from_original(exceptions.UnhandledException(e))
121 123
122 124 raise
123 125 return wrapper
124 126
125 127
126 128 class MercurialFactory(RepoFactory):
127 129 repo_type = 'hg'
128 130
129 131 def _create_config(self, config, hooks=True):
130 132 if not hooks:
131 133 hooks_to_clean = frozenset((
132 134 'changegroup.repo_size', 'preoutgoing.pre_pull',
133 135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
134 136 new_config = []
135 137 for section, option, value in config:
136 138 if section == 'hooks' and option in hooks_to_clean:
137 139 continue
138 140 new_config.append((section, option, value))
139 141 config = new_config
140 142
141 143 baseui = make_ui_from_config(config)
142 144 return baseui
143 145
144 146 def _create_repo(self, wire, create):
145 147 baseui = self._create_config(wire["config"])
146 148 return instance(baseui, wire["path"], create)
147 149
148 150 def repo(self, wire, create=False):
149 151 """
150 152 Get a repository instance for the given path.
151 153 """
152 154 return self._create_repo(wire, create)
153 155
154 156
155 157 def patch_ui_message_output(baseui):
156 158 baseui.setconfig('ui', 'quiet', 'false')
157 159 output = io.BytesIO()
158 160
159 161 def write(data, **unused_kwargs):
160 162 output.write(data)
161 163
162 164 baseui.status = write
163 165 baseui.write = write
164 166 baseui.warn = write
165 167 baseui.debug = write
166 168
167 169 return baseui, output
168 170
169 171
170 172 class HgRemote(RemoteBase):
171 173
172 174 def __init__(self, factory):
173 175 self._factory = factory
174 176 self._bulk_methods = {
175 177 "affected_files": self.ctx_files,
176 178 "author": self.ctx_user,
177 179 "branch": self.ctx_branch,
178 180 "children": self.ctx_children,
179 181 "date": self.ctx_date,
180 182 "message": self.ctx_description,
181 183 "parents": self.ctx_parents,
182 184 "status": self.ctx_status,
183 185 "obsolete": self.ctx_obsolete,
184 186 "phase": self.ctx_phase,
185 187 "hidden": self.ctx_hidden,
186 188 "_file_paths": self.ctx_list,
187 189 }
188 190
189 191 def _get_ctx(self, repo, ref):
190 192 return get_ctx(repo, ref)
191 193
192 194 @reraise_safe_exceptions
193 195 def discover_hg_version(self):
194 196 from mercurial import util
195 197 return util.version()
196 198
197 199 @reraise_safe_exceptions
198 200 def is_empty(self, wire):
199 201 repo = self._factory.repo(wire)
200 202
201 203 try:
202 204 return len(repo) == 0
203 205 except Exception:
204 206 log.exception("failed to read object_store")
205 207 return False
206 208
207 209 @reraise_safe_exceptions
208 def archive_repo(self, archive_path, mtime, file_info, kind):
209 if kind == "tgz":
210 archiver = archival.tarit(archive_path, mtime, "gz")
211 elif kind == "tbz2":
212 archiver = archival.tarit(archive_path, mtime, "bz2")
213 elif kind == 'zip':
214 archiver = archival.zipit(archive_path, mtime)
215 else:
216 raise exceptions.ArchiveException()(
217 'Remote does not support: "%s".' % kind)
218
219 for f_path, f_mode, f_is_link, f_content in file_info:
220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
221 archiver.done()
222
223 @reraise_safe_exceptions
224 210 def bookmarks(self, wire):
225 211 cache_on, context_uid, repo_id = self._cache_on(wire)
226 212 @self.region.conditional_cache_on_arguments(condition=cache_on)
227 213 def _bookmarks(_context_uid, _repo_id):
228 214 repo = self._factory.repo(wire)
229 215 return dict(repo._bookmarks)
230 216
231 217 return _bookmarks(context_uid, repo_id)
232 218
233 219 @reraise_safe_exceptions
234 220 def branches(self, wire, normal, closed):
235 221 cache_on, context_uid, repo_id = self._cache_on(wire)
236 222 @self.region.conditional_cache_on_arguments(condition=cache_on)
237 223 def _branches(_context_uid, _repo_id, _normal, _closed):
238 224 repo = self._factory.repo(wire)
239 225 iter_branches = repo.branchmap().iterbranches()
240 226 bt = {}
241 227 for branch_name, _heads, tip, is_closed in iter_branches:
242 228 if normal and not is_closed:
243 229 bt[branch_name] = tip
244 230 if closed and is_closed:
245 231 bt[branch_name] = tip
246 232
247 233 return bt
248 234
249 235 return _branches(context_uid, repo_id, normal, closed)
250 236
251 237 @reraise_safe_exceptions
252 238 def bulk_request(self, wire, commit_id, pre_load):
253 239 cache_on, context_uid, repo_id = self._cache_on(wire)
254 240 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 241 def _bulk_request(_repo_id, _commit_id, _pre_load):
256 242 result = {}
257 243 for attr in pre_load:
258 244 try:
259 245 method = self._bulk_methods[attr]
260 246 result[attr] = method(wire, commit_id)
261 247 except KeyError as e:
262 248 raise exceptions.VcsException(e)(
263 249 'Unknown bulk attribute: "%s"' % attr)
264 250 return result
265 251
266 252 return _bulk_request(repo_id, commit_id, sorted(pre_load))
267 253
268 254 @reraise_safe_exceptions
269 255 def ctx_branch(self, wire, commit_id):
270 256 cache_on, context_uid, repo_id = self._cache_on(wire)
271 257 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 258 def _ctx_branch(_repo_id, _commit_id):
273 259 repo = self._factory.repo(wire)
274 260 ctx = self._get_ctx(repo, commit_id)
275 261 return ctx.branch()
276 262 return _ctx_branch(repo_id, commit_id)
277 263
278 264 @reraise_safe_exceptions
279 265 def ctx_date(self, wire, commit_id):
280 266 cache_on, context_uid, repo_id = self._cache_on(wire)
281 267 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 268 def _ctx_date(_repo_id, _commit_id):
283 269 repo = self._factory.repo(wire)
284 270 ctx = self._get_ctx(repo, commit_id)
285 271 return ctx.date()
286 272 return _ctx_date(repo_id, commit_id)
287 273
288 274 @reraise_safe_exceptions
289 275 def ctx_description(self, wire, revision):
290 276 repo = self._factory.repo(wire)
291 277 ctx = self._get_ctx(repo, revision)
292 278 return ctx.description()
293 279
294 280 @reraise_safe_exceptions
295 281 def ctx_files(self, wire, commit_id):
296 282 cache_on, context_uid, repo_id = self._cache_on(wire)
297 283 @self.region.conditional_cache_on_arguments(condition=cache_on)
298 284 def _ctx_files(_repo_id, _commit_id):
299 285 repo = self._factory.repo(wire)
300 286 ctx = self._get_ctx(repo, commit_id)
301 287 return ctx.files()
302 288
303 289 return _ctx_files(repo_id, commit_id)
304 290
305 291 @reraise_safe_exceptions
306 292 def ctx_list(self, path, revision):
307 293 repo = self._factory.repo(path)
308 294 ctx = self._get_ctx(repo, revision)
309 295 return list(ctx)
310 296
311 297 @reraise_safe_exceptions
312 298 def ctx_parents(self, wire, commit_id):
313 299 cache_on, context_uid, repo_id = self._cache_on(wire)
314 300 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 301 def _ctx_parents(_repo_id, _commit_id):
316 302 repo = self._factory.repo(wire)
317 303 ctx = self._get_ctx(repo, commit_id)
318 304 return [parent.hex() for parent in ctx.parents()
319 305 if not (parent.hidden() or parent.obsolete())]
320 306
321 307 return _ctx_parents(repo_id, commit_id)
322 308
323 309 @reraise_safe_exceptions
324 310 def ctx_children(self, wire, commit_id):
325 311 cache_on, context_uid, repo_id = self._cache_on(wire)
326 312 @self.region.conditional_cache_on_arguments(condition=cache_on)
327 313 def _ctx_children(_repo_id, _commit_id):
328 314 repo = self._factory.repo(wire)
329 315 ctx = self._get_ctx(repo, commit_id)
330 316 return [child.hex() for child in ctx.children()
331 317 if not (child.hidden() or child.obsolete())]
332 318
333 319 return _ctx_children(repo_id, commit_id)
334 320
335 321 @reraise_safe_exceptions
336 322 def ctx_phase(self, wire, commit_id):
337 323 cache_on, context_uid, repo_id = self._cache_on(wire)
338 324 @self.region.conditional_cache_on_arguments(condition=cache_on)
339 325 def _ctx_phase(_context_uid, _repo_id, _commit_id):
340 326 repo = self._factory.repo(wire)
341 327 ctx = self._get_ctx(repo, commit_id)
342 328 # public=0, draft=1, secret=3
343 329 return ctx.phase()
344 330 return _ctx_phase(context_uid, repo_id, commit_id)
345 331
346 332 @reraise_safe_exceptions
347 333 def ctx_obsolete(self, wire, commit_id):
348 334 cache_on, context_uid, repo_id = self._cache_on(wire)
349 335 @self.region.conditional_cache_on_arguments(condition=cache_on)
350 336 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
351 337 repo = self._factory.repo(wire)
352 338 ctx = self._get_ctx(repo, commit_id)
353 339 return ctx.obsolete()
354 340 return _ctx_obsolete(context_uid, repo_id, commit_id)
355 341
356 342 @reraise_safe_exceptions
357 343 def ctx_hidden(self, wire, commit_id):
358 344 cache_on, context_uid, repo_id = self._cache_on(wire)
359 345 @self.region.conditional_cache_on_arguments(condition=cache_on)
360 346 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
361 347 repo = self._factory.repo(wire)
362 348 ctx = self._get_ctx(repo, commit_id)
363 349 return ctx.hidden()
364 350 return _ctx_hidden(context_uid, repo_id, commit_id)
365 351
366 352 @reraise_safe_exceptions
367 353 def ctx_substate(self, wire, revision):
368 354 repo = self._factory.repo(wire)
369 355 ctx = self._get_ctx(repo, revision)
370 356 return ctx.substate
371 357
372 358 @reraise_safe_exceptions
373 359 def ctx_status(self, wire, revision):
374 360 repo = self._factory.repo(wire)
375 361 ctx = self._get_ctx(repo, revision)
376 362 status = repo[ctx.p1().node()].status(other=ctx.node())
377 363 # object of status (odd, custom named tuple in mercurial) is not
378 364 # correctly serializable, we make it a list, as the underling
379 365 # API expects this to be a list
380 366 return list(status)
381 367
382 368 @reraise_safe_exceptions
383 369 def ctx_user(self, wire, revision):
384 370 repo = self._factory.repo(wire)
385 371 ctx = self._get_ctx(repo, revision)
386 372 return ctx.user()
387 373
388 374 @reraise_safe_exceptions
389 375 def check_url(self, url, config):
390 376 _proto = None
391 377 if '+' in url[:url.find('://')]:
392 378 _proto = url[0:url.find('+')]
393 379 url = url[url.find('+') + 1:]
394 380 handlers = []
395 381 url_obj = url_parser(url)
396 382 test_uri, authinfo = url_obj.authinfo()
397 383 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
398 384 url_obj.query = obfuscate_qs(url_obj.query)
399 385
400 386 cleaned_uri = str(url_obj)
401 387 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
402 388
403 389 if authinfo:
404 390 # create a password manager
405 391 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
406 392 passmgr.add_password(*authinfo)
407 393
408 394 handlers.extend((httpbasicauthhandler(passmgr),
409 395 httpdigestauthhandler(passmgr)))
410 396
411 397 o = urllib2.build_opener(*handlers)
412 398 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
413 399 ('Accept', 'application/mercurial-0.1')]
414 400
415 401 q = {"cmd": 'between'}
416 402 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
417 403 qs = '?%s' % urllib.urlencode(q)
418 404 cu = "%s%s" % (test_uri, qs)
419 405 req = urllib2.Request(cu, None, {})
420 406
421 407 try:
422 408 log.debug("Trying to open URL %s", cleaned_uri)
423 409 resp = o.open(req)
424 410 if resp.code != 200:
425 411 raise exceptions.URLError()('Return Code is not 200')
426 412 except Exception as e:
427 413 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
428 414 # means it cannot be cloned
429 415 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
430 416
431 417 # now check if it's a proper hg repo, but don't do it for svn
432 418 try:
433 419 if _proto == 'svn':
434 420 pass
435 421 else:
436 422 # check for pure hg repos
437 423 log.debug(
438 424 "Verifying if URL is a Mercurial repository: %s",
439 425 cleaned_uri)
440 426 ui = make_ui_from_config(config)
441 427 peer_checker = makepeer(ui, url)
442 428 peer_checker.lookup('tip')
443 429 except Exception as e:
444 430 log.warning("URL is not a valid Mercurial repository: %s",
445 431 cleaned_uri)
446 432 raise exceptions.URLError(e)(
447 433 "url [%s] does not look like an hg repo org_exc: %s"
448 434 % (cleaned_uri, e))
449 435
450 436 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
451 437 return True
452 438
453 439 @reraise_safe_exceptions
454 440 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
455 441 repo = self._factory.repo(wire)
456 442
457 443 if file_filter:
458 444 match_filter = match(file_filter[0], '', [file_filter[1]])
459 445 else:
460 446 match_filter = file_filter
461 447 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
462 448
463 449 try:
464 450 return "".join(patch.diff(
465 451 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
466 452 except RepoLookupError as e:
467 453 raise exceptions.LookupException(e)()
468 454
469 455 @reraise_safe_exceptions
470 456 def node_history(self, wire, revision, path, limit):
471 457 cache_on, context_uid, repo_id = self._cache_on(wire)
472 458 @self.region.conditional_cache_on_arguments(condition=cache_on)
473 459 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
474 460 repo = self._factory.repo(wire)
475 461
476 462 ctx = self._get_ctx(repo, revision)
477 463 fctx = ctx.filectx(path)
478 464
479 465 def history_iter():
480 466 limit_rev = fctx.rev()
481 467 for obj in reversed(list(fctx.filelog())):
482 468 obj = fctx.filectx(obj)
483 469 ctx = obj.changectx()
484 470 if ctx.hidden() or ctx.obsolete():
485 471 continue
486 472
487 473 if limit_rev >= obj.rev():
488 474 yield obj
489 475
490 476 history = []
491 477 for cnt, obj in enumerate(history_iter()):
492 478 if limit and cnt >= limit:
493 479 break
494 480 history.append(hex(obj.node()))
495 481
496 482 return [x for x in history]
497 483 return _node_history(context_uid, repo_id, revision, path, limit)
498 484
499 485 @reraise_safe_exceptions
500 486 def node_history_untill(self, wire, revision, path, limit):
501 487 cache_on, context_uid, repo_id = self._cache_on(wire)
502 488 @self.region.conditional_cache_on_arguments(condition=cache_on)
503 489 def _node_history_until(_context_uid, _repo_id):
504 490 repo = self._factory.repo(wire)
505 491 ctx = self._get_ctx(repo, revision)
506 492 fctx = ctx.filectx(path)
507 493
508 494 file_log = list(fctx.filelog())
509 495 if limit:
510 496 # Limit to the last n items
511 497 file_log = file_log[-limit:]
512 498
513 499 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
514 500 return _node_history_until(context_uid, repo_id, revision, path, limit)
515 501
516 502 @reraise_safe_exceptions
517 503 def fctx_annotate(self, wire, revision, path):
518 504 repo = self._factory.repo(wire)
519 505 ctx = self._get_ctx(repo, revision)
520 506 fctx = ctx.filectx(path)
521 507
522 508 result = []
523 509 for i, annotate_obj in enumerate(fctx.annotate(), 1):
524 510 ln_no = i
525 511 sha = hex(annotate_obj.fctx.node())
526 512 content = annotate_obj.text
527 513 result.append((ln_no, sha, content))
528 514 return result
529 515
530 516 @reraise_safe_exceptions
531 517 def fctx_node_data(self, wire, revision, path):
532 518 repo = self._factory.repo(wire)
533 519 ctx = self._get_ctx(repo, revision)
534 520 fctx = ctx.filectx(path)
535 521 return fctx.data()
536 522
537 523 @reraise_safe_exceptions
538 524 def fctx_flags(self, wire, commit_id, path):
539 525 cache_on, context_uid, repo_id = self._cache_on(wire)
540 526 @self.region.conditional_cache_on_arguments(condition=cache_on)
541 527 def _fctx_flags(_repo_id, _commit_id, _path):
542 528 repo = self._factory.repo(wire)
543 529 ctx = self._get_ctx(repo, commit_id)
544 530 fctx = ctx.filectx(path)
545 531 return fctx.flags()
546 532
547 533 return _fctx_flags(repo_id, commit_id, path)
548 534
549 535 @reraise_safe_exceptions
550 536 def fctx_size(self, wire, commit_id, path):
551 537 cache_on, context_uid, repo_id = self._cache_on(wire)
552 538 @self.region.conditional_cache_on_arguments(condition=cache_on)
553 539 def _fctx_size(_repo_id, _revision, _path):
554 540 repo = self._factory.repo(wire)
555 541 ctx = self._get_ctx(repo, commit_id)
556 542 fctx = ctx.filectx(path)
557 543 return fctx.size()
558 544 return _fctx_size(repo_id, commit_id, path)
559 545
560 546 @reraise_safe_exceptions
561 547 def get_all_commit_ids(self, wire, name):
562 548 cache_on, context_uid, repo_id = self._cache_on(wire)
563 549 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 550 def _get_all_commit_ids(_context_uid, _repo_id, _name):
565 551 repo = self._factory.repo(wire)
566 552 repo = repo.filtered(name)
567 553 revs = map(lambda x: hex(x[7]), repo.changelog.index)
568 554 return revs
569 555 return _get_all_commit_ids(context_uid, repo_id, name)
570 556
571 557 @reraise_safe_exceptions
572 558 def get_config_value(self, wire, section, name, untrusted=False):
573 559 repo = self._factory.repo(wire)
574 560 return repo.ui.config(section, name, untrusted=untrusted)
575 561
576 562 @reraise_safe_exceptions
577 563 def is_large_file(self, wire, commit_id, path):
578 564 cache_on, context_uid, repo_id = self._cache_on(wire)
579 565 @self.region.conditional_cache_on_arguments(condition=cache_on)
580 566 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
581 567 return largefiles.lfutil.isstandin(path)
582 568
583 569 return _is_large_file(context_uid, repo_id, commit_id, path)
584 570
585 571 @reraise_safe_exceptions
586 572 def is_binary(self, wire, revision, path):
587 573 cache_on, context_uid, repo_id = self._cache_on(wire)
588 574
589 575 @self.region.conditional_cache_on_arguments(condition=cache_on)
590 576 def _is_binary(_repo_id, _sha, _path):
591 577 repo = self._factory.repo(wire)
592 578 ctx = self._get_ctx(repo, revision)
593 579 fctx = ctx.filectx(path)
594 580 return fctx.isbinary()
595 581
596 582 return _is_binary(repo_id, revision, path)
597 583
598 584 @reraise_safe_exceptions
599 585 def in_largefiles_store(self, wire, sha):
600 586 repo = self._factory.repo(wire)
601 587 return largefiles.lfutil.instore(repo, sha)
602 588
603 589 @reraise_safe_exceptions
604 590 def in_user_cache(self, wire, sha):
605 591 repo = self._factory.repo(wire)
606 592 return largefiles.lfutil.inusercache(repo.ui, sha)
607 593
608 594 @reraise_safe_exceptions
609 595 def store_path(self, wire, sha):
610 596 repo = self._factory.repo(wire)
611 597 return largefiles.lfutil.storepath(repo, sha)
612 598
613 599 @reraise_safe_exceptions
614 600 def link(self, wire, sha, path):
615 601 repo = self._factory.repo(wire)
616 602 largefiles.lfutil.link(
617 603 largefiles.lfutil.usercachepath(repo.ui, sha), path)
618 604
619 605 @reraise_safe_exceptions
620 606 def localrepository(self, wire, create=False):
621 607 self._factory.repo(wire, create=create)
622 608
623 609 @reraise_safe_exceptions
624 610 def lookup(self, wire, revision, both):
625 611 cache_on, context_uid, repo_id = self._cache_on(wire)
626 612 @self.region.conditional_cache_on_arguments(condition=cache_on)
627 613 def _lookup(_context_uid, _repo_id, _revision, _both):
628 614
629 615 repo = self._factory.repo(wire)
630 616 rev = _revision
631 617 if isinstance(rev, int):
632 618 # NOTE(marcink):
633 619 # since Mercurial doesn't support negative indexes properly
634 620 # we need to shift accordingly by one to get proper index, e.g
635 621 # repo[-1] => repo[-2]
636 622 # repo[0] => repo[-1]
637 623 if rev <= 0:
638 624 rev = rev + -1
639 625 try:
640 626 ctx = self._get_ctx(repo, rev)
641 627 except (TypeError, RepoLookupError) as e:
642 628 e._org_exc_tb = traceback.format_exc()
643 629 raise exceptions.LookupException(e)(rev)
644 630 except LookupError as e:
645 631 e._org_exc_tb = traceback.format_exc()
646 632 raise exceptions.LookupException(e)(e.name)
647 633
648 634 if not both:
649 635 return ctx.hex()
650 636
651 637 ctx = repo[ctx.hex()]
652 638 return ctx.hex(), ctx.rev()
653 639
654 640 return _lookup(context_uid, repo_id, revision, both)
655 641
656 642 @reraise_safe_exceptions
657 643 def sync_push(self, wire, url):
658 644 if not self.check_url(url, wire['config']):
659 645 return
660 646
661 647 repo = self._factory.repo(wire)
662 648
663 649 # Disable any prompts for this repo
664 650 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
665 651
666 652 bookmarks = dict(repo._bookmarks).keys()
667 653 remote = peer(repo, {}, url)
668 654 # Disable any prompts for this remote
669 655 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
670 656
671 657 return exchange.push(
672 658 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
673 659
674 660 @reraise_safe_exceptions
675 661 def revision(self, wire, rev):
676 662 repo = self._factory.repo(wire)
677 663 ctx = self._get_ctx(repo, rev)
678 664 return ctx.rev()
679 665
680 666 @reraise_safe_exceptions
681 667 def rev_range(self, wire, commit_filter):
682 668 cache_on, context_uid, repo_id = self._cache_on(wire)
683 669
684 670 @self.region.conditional_cache_on_arguments(condition=cache_on)
685 671 def _rev_range(_context_uid, _repo_id, _filter):
686 672 repo = self._factory.repo(wire)
687 673 revisions = [rev for rev in revrange(repo, commit_filter)]
688 674 return revisions
689 675
690 676 return _rev_range(context_uid, repo_id, sorted(commit_filter))
691 677
692 678 @reraise_safe_exceptions
693 679 def rev_range_hash(self, wire, node):
694 680 repo = self._factory.repo(wire)
695 681
696 682 def get_revs(repo, rev_opt):
697 683 if rev_opt:
698 684 revs = revrange(repo, rev_opt)
699 685 if len(revs) == 0:
700 686 return (nullrev, nullrev)
701 687 return max(revs), min(revs)
702 688 else:
703 689 return len(repo) - 1, 0
704 690
705 691 stop, start = get_revs(repo, [node + ':'])
706 692 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
707 693 return revs
708 694
709 695 @reraise_safe_exceptions
710 696 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
711 697 other_path = kwargs.pop('other_path', None)
712 698
713 699 # case when we want to compare two independent repositories
714 700 if other_path and other_path != wire["path"]:
715 701 baseui = self._factory._create_config(wire["config"])
716 702 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
717 703 else:
718 704 repo = self._factory.repo(wire)
719 705 return list(repo.revs(rev_spec, *args))
720 706
721 707 @reraise_safe_exceptions
722 708 def verify(self, wire,):
723 709 repo = self._factory.repo(wire)
724 710 baseui = self._factory._create_config(wire['config'])
725 711
726 712 baseui, output = patch_ui_message_output(baseui)
727 713
728 714 repo.ui = baseui
729 715 verify.verify(repo)
730 716 return output.getvalue()
731 717
732 718 @reraise_safe_exceptions
733 719 def hg_update_cache(self, wire,):
734 720 repo = self._factory.repo(wire)
735 721 baseui = self._factory._create_config(wire['config'])
736 722 baseui, output = patch_ui_message_output(baseui)
737 723
738 724 repo.ui = baseui
739 725 with repo.wlock(), repo.lock():
740 726 repo.updatecaches(full=True)
741 727
742 728 return output.getvalue()
743 729
744 730 @reraise_safe_exceptions
745 731 def hg_rebuild_fn_cache(self, wire,):
746 732 repo = self._factory.repo(wire)
747 733 baseui = self._factory._create_config(wire['config'])
748 734 baseui, output = patch_ui_message_output(baseui)
749 735
750 736 repo.ui = baseui
751 737
752 738 repair.rebuildfncache(baseui, repo)
753 739
754 740 return output.getvalue()
755 741
756 742 @reraise_safe_exceptions
757 743 def tags(self, wire):
758 744 cache_on, context_uid, repo_id = self._cache_on(wire)
759 745 @self.region.conditional_cache_on_arguments(condition=cache_on)
760 746 def _tags(_context_uid, _repo_id):
761 747 repo = self._factory.repo(wire)
762 748 return repo.tags()
763 749
764 750 return _tags(context_uid, repo_id)
765 751
766 752 @reraise_safe_exceptions
767 753 def update(self, wire, node=None, clean=False):
768 754 repo = self._factory.repo(wire)
769 755 baseui = self._factory._create_config(wire['config'])
770 756 commands.update(baseui, repo, node=node, clean=clean)
771 757
772 758 @reraise_safe_exceptions
773 759 def identify(self, wire):
774 760 repo = self._factory.repo(wire)
775 761 baseui = self._factory._create_config(wire['config'])
776 762 output = io.BytesIO()
777 763 baseui.write = output.write
778 764 # This is required to get a full node id
779 765 baseui.debugflag = True
780 766 commands.identify(baseui, repo, id=True)
781 767
782 768 return output.getvalue()
783 769
784 770 @reraise_safe_exceptions
785 771 def heads(self, wire, branch=None):
786 772 repo = self._factory.repo(wire)
787 773 baseui = self._factory._create_config(wire['config'])
788 774 output = io.BytesIO()
789 775
790 776 def write(data, **unused_kwargs):
791 777 output.write(data)
792 778
793 779 baseui.write = write
794 780 if branch:
795 781 args = [branch]
796 782 else:
797 783 args = []
798 784 commands.heads(baseui, repo, template='{node} ', *args)
799 785
800 786 return output.getvalue()
801 787
802 788 @reraise_safe_exceptions
803 789 def ancestor(self, wire, revision1, revision2):
804 790 repo = self._factory.repo(wire)
805 791 changelog = repo.changelog
806 792 lookup = repo.lookup
807 793 a = changelog.ancestor(lookup(revision1), lookup(revision2))
808 794 return hex(a)
809 795
810 796 @reraise_safe_exceptions
811 797 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
812 798 baseui = self._factory._create_config(wire["config"], hooks=hooks)
813 799 clone(baseui, source, dest, noupdate=not update_after_clone)
814 800
815 801 @reraise_safe_exceptions
816 802 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
817 803
818 804 repo = self._factory.repo(wire)
819 805 baseui = self._factory._create_config(wire['config'])
820 806 publishing = baseui.configbool('phases', 'publish')
821 807 if publishing:
822 808 new_commit = 'public'
823 809 else:
824 810 new_commit = 'draft'
825 811
826 812 def _filectxfn(_repo, ctx, path):
827 813 """
828 814 Marks given path as added/changed/removed in a given _repo. This is
829 815 for internal mercurial commit function.
830 816 """
831 817
832 818 # check if this path is removed
833 819 if path in removed:
834 820 # returning None is a way to mark node for removal
835 821 return None
836 822
837 823 # check if this path is added
838 824 for node in updated:
839 825 if node['path'] == path:
840 826 return memfilectx(
841 827 _repo,
842 828 changectx=ctx,
843 829 path=node['path'],
844 830 data=node['content'],
845 831 islink=False,
846 832 isexec=bool(node['mode'] & stat.S_IXUSR),
847 833 copysource=False)
848 834
849 835 raise exceptions.AbortException()(
850 836 "Given path haven't been marked as added, "
851 837 "changed or removed (%s)" % path)
852 838
853 839 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
854 840
855 841 commit_ctx = memctx(
856 842 repo=repo,
857 843 parents=parents,
858 844 text=message,
859 845 files=files,
860 846 filectxfn=_filectxfn,
861 847 user=user,
862 848 date=(commit_time, commit_timezone),
863 849 extra=extra)
864 850
865 851 n = repo.commitctx(commit_ctx)
866 852 new_id = hex(n)
867 853
868 854 return new_id
869 855
870 856 @reraise_safe_exceptions
871 857 def pull(self, wire, url, commit_ids=None):
872 858 repo = self._factory.repo(wire)
873 859 # Disable any prompts for this repo
874 860 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
875 861
876 862 remote = peer(repo, {}, url)
877 863 # Disable any prompts for this remote
878 864 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
879 865
880 866 if commit_ids:
881 867 commit_ids = [bin(commit_id) for commit_id in commit_ids]
882 868
883 869 return exchange.pull(
884 870 repo, remote, heads=commit_ids, force=None).cgresult
885 871
886 872 @reraise_safe_exceptions
887 873 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
888 874 repo = self._factory.repo(wire)
889 875 baseui = self._factory._create_config(wire['config'], hooks=hooks)
890 876
891 877 # Mercurial internally has a lot of logic that checks ONLY if
892 878 # option is defined, we just pass those if they are defined then
893 879 opts = {}
894 880 if bookmark:
895 881 opts['bookmark'] = bookmark
896 882 if branch:
897 883 opts['branch'] = branch
898 884 if revision:
899 885 opts['rev'] = revision
900 886
901 887 commands.pull(baseui, repo, source, **opts)
902 888
903 889 @reraise_safe_exceptions
904 890 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
905 891 repo = self._factory.repo(wire)
906 892 baseui = self._factory._create_config(wire['config'], hooks=hooks)
907 893 commands.push(baseui, repo, dest=dest_path, rev=revisions,
908 894 new_branch=push_branches)
909 895
910 896 @reraise_safe_exceptions
911 897 def strip(self, wire, revision, update, backup):
912 898 repo = self._factory.repo(wire)
913 899 ctx = self._get_ctx(repo, revision)
914 900 hgext_strip(
915 901 repo.baseui, repo, ctx.node(), update=update, backup=backup)
916 902
917 903 @reraise_safe_exceptions
918 904 def get_unresolved_files(self, wire):
919 905 repo = self._factory.repo(wire)
920 906
921 907 log.debug('Calculating unresolved files for repo: %s', repo)
922 908 output = io.BytesIO()
923 909
924 910 def write(data, **unused_kwargs):
925 911 output.write(data)
926 912
927 913 baseui = self._factory._create_config(wire['config'])
928 914 baseui.write = write
929 915
930 916 commands.resolve(baseui, repo, list=True)
931 917 unresolved = output.getvalue().splitlines(0)
932 918 return unresolved
933 919
934 920 @reraise_safe_exceptions
935 921 def merge(self, wire, revision):
936 922 repo = self._factory.repo(wire)
937 923 baseui = self._factory._create_config(wire['config'])
938 924 repo.ui.setconfig('ui', 'merge', 'internal:dump')
939 925
940 926 # In case of sub repositories are used mercurial prompts the user in
941 927 # case of merge conflicts or different sub repository sources. By
942 928 # setting the interactive flag to `False` mercurial doesn't prompt the
943 929 # used but instead uses a default value.
944 930 repo.ui.setconfig('ui', 'interactive', False)
945 931 commands.merge(baseui, repo, rev=revision)
946 932
947 933 @reraise_safe_exceptions
948 934 def merge_state(self, wire):
949 935 repo = self._factory.repo(wire)
950 936 repo.ui.setconfig('ui', 'merge', 'internal:dump')
951 937
952 938 # In case of sub repositories are used mercurial prompts the user in
953 939 # case of merge conflicts or different sub repository sources. By
954 940 # setting the interactive flag to `False` mercurial doesn't prompt the
955 941 # used but instead uses a default value.
956 942 repo.ui.setconfig('ui', 'interactive', False)
957 943 ms = hg_merge.mergestate(repo)
958 944 return [x for x in ms.unresolved()]
959 945
960 946 @reraise_safe_exceptions
961 947 def commit(self, wire, message, username, close_branch=False):
962 948 repo = self._factory.repo(wire)
963 949 baseui = self._factory._create_config(wire['config'])
964 950 repo.ui.setconfig('ui', 'username', username)
965 951 commands.commit(baseui, repo, message=message, close_branch=close_branch)
966 952
967 953 @reraise_safe_exceptions
968 954 def rebase(self, wire, source=None, dest=None, abort=False):
969 955 repo = self._factory.repo(wire)
970 956 baseui = self._factory._create_config(wire['config'])
971 957 repo.ui.setconfig('ui', 'merge', 'internal:dump')
972 958 # In case of sub repositories are used mercurial prompts the user in
973 959 # case of merge conflicts or different sub repository sources. By
974 960 # setting the interactive flag to `False` mercurial doesn't prompt the
975 961 # used but instead uses a default value.
976 962 repo.ui.setconfig('ui', 'interactive', False)
977 963 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
978 964
979 965 @reraise_safe_exceptions
980 966 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
981 967 repo = self._factory.repo(wire)
982 968 ctx = self._get_ctx(repo, revision)
983 969 node = ctx.node()
984 970
985 971 date = (tag_time, tag_timezone)
986 972 try:
987 973 hg_tag.tag(repo, name, node, message, local, user, date)
988 974 except Abort as e:
989 975 log.exception("Tag operation aborted")
990 976 # Exception can contain unicode which we convert
991 977 raise exceptions.AbortException(e)(repr(e))
992 978
993 979 @reraise_safe_exceptions
994 980 def bookmark(self, wire, bookmark, revision=None):
995 981 repo = self._factory.repo(wire)
996 982 baseui = self._factory._create_config(wire['config'])
997 983 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
998 984
999 985 @reraise_safe_exceptions
1000 986 def install_hooks(self, wire, force=False):
1001 987 # we don't need any special hooks for Mercurial
1002 988 pass
1003 989
1004 990 @reraise_safe_exceptions
1005 991 def get_hooks_info(self, wire):
1006 992 return {
1007 993 'pre_version': vcsserver.__version__,
1008 994 'post_version': vcsserver.__version__,
1009 995 }
996
997 @reraise_safe_exceptions
998 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
999 archive_dir_name, commit_id):
1000
1001 def file_walker(_commit_id, path):
1002 repo = self._factory.repo(wire)
1003 ctx = repo[_commit_id]
1004 is_root = path in ['', '/']
1005 if is_root:
1006 matcher = alwaysmatcher(badfn=None)
1007 else:
1008 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1009 file_iter = ctx.manifest().walk(matcher)
1010
1011 for fn in file_iter:
1012 file_path = fn
1013 flags = ctx.flags(fn)
1014 mode = b'x' in flags and 0o755 or 0o644
1015 is_link = b'l' in flags
1016
1017 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1018
1019 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1020 archive_dir_name, commit_id)
1021
@@ -1,79 +1,79 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Mercurial libs compatibility
20 20 """
21 21
22 22 import mercurial
23 23 from mercurial import demandimport
24 24 # patch demandimport, due to bug in mercurial when it always triggers
25 25 # demandimport.enable()
26 26 demandimport.enable = lambda *args, **kwargs: 1
27 27
28 28 from mercurial import ui
29 29 from mercurial import patch
30 30 from mercurial import config
31 31 from mercurial import extensions
32 32 from mercurial import scmutil
33 33 from mercurial import archival
34 34 from mercurial import discovery
35 35 from mercurial import unionrepo
36 36 from mercurial import localrepo
37 37 from mercurial import merge as hg_merge
38 38 from mercurial import subrepo
39 39 from mercurial import subrepoutil
40 40 from mercurial import tags as hg_tag
41
41 from mercurial import util as hgutil
42 42 from mercurial.commands import clone, nullid, pull
43 43 from mercurial.context import memctx, memfilectx
44 44 from mercurial.error import (
45 45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
46 46 RequirementError, ProgrammingError)
47 47 from mercurial.hgweb import hgweb_mod
48 48 from mercurial.localrepo import instance
49 from mercurial.match import match
49 from mercurial.match import match, alwaysmatcher, patternmatcher
50 50 from mercurial.mdiff import diffopts
51 51 from mercurial.node import bin, hex
52 52 from mercurial.encoding import tolocal
53 53 from mercurial.discovery import findcommonoutgoing
54 54 from mercurial.hg import peer
55 55 from mercurial.httppeer import makepeer
56 56 from mercurial.util import url as hg_url
57 57 from mercurial.scmutil import revrange, revsymbol
58 58 from mercurial.node import nullrev
59 59 from mercurial import exchange
60 60 from hgext import largefiles
61 61
62 62 # those authnadlers are patched for python 2.6.5 bug an
63 63 # infinit looping when given invalid resources
64 64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
65 65
66 66
67 67 def get_ctx(repo, ref):
68 68 try:
69 69 ctx = repo[ref]
70 70 except ProgrammingError:
71 71 # we're unable to find the rev using a regular lookup, we fallback
72 72 # to slower, but backward compat revsymbol usage
73 73 ctx = revsymbol(repo, ref)
74 74 except (LookupError, RepoLookupError):
75 75 # Similar case as above but only for refs that are not numeric
76 76 if isinstance(ref, (int, long)):
77 77 raise
78 78 ctx = revsymbol(repo, ref)
79 79 return ctx
@@ -1,688 +1,692 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import base64
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import wsgiref.util
25 25 import traceback
26 26 import tempfile
27 import resource
27 28 from itertools import chain
28 29 from cStringIO import StringIO
29 30
30 31 import simplejson as json
31 32 import msgpack
32 33 from pyramid.config import Configurator
33 34 from pyramid.settings import asbool, aslist
34 35 from pyramid.wsgi import wsgiapp
35 36 from pyramid.compat import configparser
36 37 from pyramid.response import Response
37 38
38 39 from vcsserver.utils import safe_int
39 40
40 41 log = logging.getLogger(__name__)
41 42
42 43 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
43 44 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
44 45
45 46 try:
46 47 locale.setlocale(locale.LC_ALL, '')
47 48 except locale.Error as e:
48 49 log.error(
49 50 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
50 51 os.environ['LC_ALL'] = 'C'
51 52
52 53 import vcsserver
53 54 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
54 55 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
55 56 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
56 57 from vcsserver.echo_stub.echo_app import EchoApp
57 58 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
58 59 from vcsserver.lib.exc_tracking import store_exception
59 60 from vcsserver.server import VcsServer
60 61
61 62 try:
62 63 from vcsserver.git import GitFactory, GitRemote
63 64 except ImportError:
64 65 GitFactory = None
65 66 GitRemote = None
66 67
67 68 try:
68 69 from vcsserver.hg import MercurialFactory, HgRemote
69 70 except ImportError:
70 71 MercurialFactory = None
71 72 HgRemote = None
72 73
73 74 try:
74 75 from vcsserver.svn import SubversionFactory, SvnRemote
75 76 except ImportError:
76 77 SubversionFactory = None
77 78 SvnRemote = None
78 79
79 80
80 81 def _is_request_chunked(environ):
81 82 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
82 83 return stream
83 84
84 85
85 86 def _int_setting(settings, name, default):
86 87 settings[name] = int(settings.get(name, default))
87 88 return settings[name]
88 89
89 90
90 91 def _bool_setting(settings, name, default):
91 92 input_val = settings.get(name, default)
92 93 if isinstance(input_val, unicode):
93 94 input_val = input_val.encode('utf8')
94 95 settings[name] = asbool(input_val)
95 96 return settings[name]
96 97
97 98
98 99 def _list_setting(settings, name, default):
99 100 raw_value = settings.get(name, default)
100 101
101 102 # Otherwise we assume it uses pyramids space/newline separation.
102 103 settings[name] = aslist(raw_value)
103 104 return settings[name]
104 105
105 106
106 107 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
107 108 value = settings.get(name, default)
108 109
109 110 if default_when_empty and not value:
110 111 # use default value when value is empty
111 112 value = default
112 113
113 114 if lower:
114 115 value = value.lower()
115 116 settings[name] = value
116 117 return settings[name]
117 118
118 119
119 120 class VCS(object):
120 121 def __init__(self, locale_conf=None, cache_config=None):
121 122 self.locale = locale_conf
122 123 self.cache_config = cache_config
123 124 self._configure_locale()
124 125
126 maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
127 log.info('Max file descriptors value: %s', maxfd)
128
125 129 if GitFactory and GitRemote:
126 130 git_factory = GitFactory()
127 131 self._git_remote = GitRemote(git_factory)
128 132 else:
129 133 log.info("Git client import failed")
130 134
131 135 if MercurialFactory and HgRemote:
132 136 hg_factory = MercurialFactory()
133 137 self._hg_remote = HgRemote(hg_factory)
134 138 else:
135 139 log.info("Mercurial client import failed")
136 140
137 141 if SubversionFactory and SvnRemote:
138 142 svn_factory = SubversionFactory()
139 143
140 144 # hg factory is used for svn url validation
141 145 hg_factory = MercurialFactory()
142 146 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
143 147 else:
144 148 log.info("Subversion client import failed")
145 149
146 150 self._vcsserver = VcsServer()
147 151
148 152 def _configure_locale(self):
149 153 if self.locale:
150 154 log.info('Settings locale: `LC_ALL` to %s', self.locale)
151 155 else:
152 156 log.info(
153 157 'Configuring locale subsystem based on environment variables')
154 158 try:
155 159 # If self.locale is the empty string, then the locale
156 160 # module will use the environment variables. See the
157 161 # documentation of the package `locale`.
158 162 locale.setlocale(locale.LC_ALL, self.locale)
159 163
160 164 language_code, encoding = locale.getlocale()
161 165 log.info(
162 166 'Locale set to language code "%s" with encoding "%s".',
163 167 language_code, encoding)
164 168 except locale.Error:
165 169 log.exception(
166 170 'Cannot set locale, not configuring the locale system')
167 171
168 172
169 173 class WsgiProxy(object):
170 174 def __init__(self, wsgi):
171 175 self.wsgi = wsgi
172 176
173 177 def __call__(self, environ, start_response):
174 178 input_data = environ['wsgi.input'].read()
175 179 input_data = msgpack.unpackb(input_data)
176 180
177 181 error = None
178 182 try:
179 183 data, status, headers = self.wsgi.handle(
180 184 input_data['environment'], input_data['input_data'],
181 185 *input_data['args'], **input_data['kwargs'])
182 186 except Exception as e:
183 187 data, status, headers = [], None, None
184 188 error = {
185 189 'message': str(e),
186 190 '_vcs_kind': getattr(e, '_vcs_kind', None)
187 191 }
188 192
189 193 start_response(200, {})
190 194 return self._iterator(error, status, headers, data)
191 195
192 196 def _iterator(self, error, status, headers, data):
193 197 initial_data = [
194 198 error,
195 199 status,
196 200 headers,
197 201 ]
198 202
199 203 for d in chain(initial_data, data):
200 204 yield msgpack.packb(d)
201 205
202 206
203 207 def not_found(request):
204 208 return {'status': '404 NOT FOUND'}
205 209
206 210
207 211 class VCSViewPredicate(object):
208 212 def __init__(self, val, config):
209 213 self.remotes = val
210 214
211 215 def text(self):
212 216 return 'vcs view method = %s' % (self.remotes.keys(),)
213 217
214 218 phash = text
215 219
216 220 def __call__(self, context, request):
217 221 """
218 222 View predicate that returns true if given backend is supported by
219 223 defined remotes.
220 224 """
221 225 backend = request.matchdict.get('backend')
222 226 return backend in self.remotes
223 227
224 228
225 229 class HTTPApplication(object):
226 230 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
227 231
228 232 remote_wsgi = remote_wsgi
229 233 _use_echo_app = False
230 234
231 235 def __init__(self, settings=None, global_config=None):
232 236 self._sanitize_settings_and_apply_defaults(settings)
233 237
234 238 self.config = Configurator(settings=settings)
235 239 self.global_config = global_config
236 240 self.config.include('vcsserver.lib.rc_cache')
237 241
238 242 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 243 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
240 244 self._remotes = {
241 245 'hg': vcs._hg_remote,
242 246 'git': vcs._git_remote,
243 247 'svn': vcs._svn_remote,
244 248 'server': vcs._vcsserver,
245 249 }
246 250 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
247 251 self._use_echo_app = True
248 252 log.warning("Using EchoApp for VCS operations.")
249 253 self.remote_wsgi = remote_wsgi_stub
250 254
251 255 self._configure_settings(global_config, settings)
252 256 self._configure()
253 257
254 258 def _configure_settings(self, global_config, app_settings):
255 259 """
256 260 Configure the settings module.
257 261 """
258 262 settings_merged = global_config.copy()
259 263 settings_merged.update(app_settings)
260 264
261 265 git_path = app_settings.get('git_path', None)
262 266 if git_path:
263 267 settings.GIT_EXECUTABLE = git_path
264 268 binary_dir = app_settings.get('core.binary_dir', None)
265 269 if binary_dir:
266 270 settings.BINARY_DIR = binary_dir
267 271
268 272 # Store the settings to make them available to other modules.
269 273 vcsserver.PYRAMID_SETTINGS = settings_merged
270 274 vcsserver.CONFIG = settings_merged
271 275
272 276 def _sanitize_settings_and_apply_defaults(self, settings):
273 277 temp_store = tempfile.gettempdir()
274 278 default_cache_dir = os.path.join(temp_store, 'rc_cache')
275 279
276 280 # save default, cache dir, and use it for all backends later.
277 281 default_cache_dir = _string_setting(
278 282 settings,
279 283 'cache_dir',
280 284 default_cache_dir, lower=False, default_when_empty=True)
281 285
282 286 # ensure we have our dir created
283 287 if not os.path.isdir(default_cache_dir):
284 288 os.makedirs(default_cache_dir, mode=0o755)
285 289
286 290 # exception store cache
287 291 _string_setting(
288 292 settings,
289 293 'exception_tracker.store_path',
290 294 temp_store, lower=False, default_when_empty=True)
291 295
292 296 # repo_object cache
293 297 _string_setting(
294 298 settings,
295 299 'rc_cache.repo_object.backend',
296 300 'dogpile.cache.rc.file_namespace', lower=False)
297 301 _int_setting(
298 302 settings,
299 303 'rc_cache.repo_object.expiration_time',
300 304 30 * 24 * 60 * 60)
301 305 _string_setting(
302 306 settings,
303 307 'rc_cache.repo_object.arguments.filename',
304 308 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
305 309
306 310 def _configure(self):
307 311 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
308 312
309 313 self.config.add_route('service', '/_service')
310 314 self.config.add_route('status', '/status')
311 315 self.config.add_route('hg_proxy', '/proxy/hg')
312 316 self.config.add_route('git_proxy', '/proxy/git')
313 317
314 318 # rpc methods
315 319 self.config.add_route('vcs', '/{backend}')
316 320
317 321 # streaming rpc remote methods
318 322 self.config.add_route('vcs_stream', '/{backend}/stream')
319 323
320 324 # vcs operations clone/push as streaming
321 325 self.config.add_route('stream_git', '/stream/git/*repo_name')
322 326 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
323 327
324 328 self.config.add_view(self.status_view, route_name='status', renderer='json')
325 329 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
326 330
327 331 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
328 332 self.config.add_view(self.git_proxy(), route_name='git_proxy')
329 333 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
330 334 vcs_view=self._remotes)
331 335 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
332 336 vcs_view=self._remotes)
333 337
334 338 self.config.add_view(self.hg_stream(), route_name='stream_hg')
335 339 self.config.add_view(self.git_stream(), route_name='stream_git')
336 340
337 341 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
338 342
339 343 self.config.add_notfound_view(not_found, renderer='json')
340 344
341 345 self.config.add_view(self.handle_vcs_exception, context=Exception)
342 346
343 347 self.config.add_tween(
344 348 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
345 349 )
346 350 self.config.add_request_method(
347 351 'vcsserver.lib.request_counter.get_request_counter',
348 352 'request_count')
349 353
350 354 def wsgi_app(self):
351 355 return self.config.make_wsgi_app()
352 356
353 357 def _vcs_view_params(self, request):
354 358 remote = self._remotes[request.matchdict['backend']]
355 359 payload = msgpack.unpackb(request.body, use_list=True)
356 360 method = payload.get('method')
357 361 params = payload['params']
358 362 wire = params.get('wire')
359 363 args = params.get('args')
360 364 kwargs = params.get('kwargs')
361 365 context_uid = None
362 366
363 367 if wire:
364 368 try:
365 369 wire['context'] = context_uid = uuid.UUID(wire['context'])
366 370 except KeyError:
367 371 pass
368 372 args.insert(0, wire)
369 373 repo_state_uid = wire.get('repo_state_uid') if wire else None
370 374
371 375 # NOTE(marcink): trading complexity for slight performance
372 376 if log.isEnabledFor(logging.DEBUG):
373 377 no_args_methods = [
374 'archive_repo'
378
375 379 ]
376 380 if method in no_args_methods:
377 381 call_args = ''
378 382 else:
379 383 call_args = args[1:]
380 384
381 385 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
382 386 method, call_args, kwargs, context_uid, repo_state_uid)
383 387
384 388 return payload, remote, method, args, kwargs
385 389
386 390 def vcs_view(self, request):
387 391
388 392 payload, remote, method, args, kwargs = self._vcs_view_params(request)
389 393 payload_id = payload.get('id')
390 394
391 395 try:
392 396 resp = getattr(remote, method)(*args, **kwargs)
393 397 except Exception as e:
394 398 exc_info = list(sys.exc_info())
395 399 exc_type, exc_value, exc_traceback = exc_info
396 400
397 401 org_exc = getattr(e, '_org_exc', None)
398 402 org_exc_name = None
399 403 org_exc_tb = ''
400 404 if org_exc:
401 405 org_exc_name = org_exc.__class__.__name__
402 406 org_exc_tb = getattr(e, '_org_exc_tb', '')
403 407 # replace our "faked" exception with our org
404 408 exc_info[0] = org_exc.__class__
405 409 exc_info[1] = org_exc
406 410
407 411 should_store_exc = True
408 412 if org_exc:
409 413 def get_exc_fqn(_exc_obj):
410 414 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
411 415 return module_name + '.' + org_exc_name
412 416
413 417 exc_fqn = get_exc_fqn(org_exc)
414 418
415 419 if exc_fqn in ['mercurial.error.RepoLookupError',
416 420 'vcsserver.exceptions.RefNotFoundException']:
417 421 should_store_exc = False
418 422
419 423 if should_store_exc:
420 424 store_exception(id(exc_info), exc_info)
421 425
422 426 tb_info = ''.join(
423 427 traceback.format_exception(exc_type, exc_value, exc_traceback))
424 428
425 429 type_ = e.__class__.__name__
426 430 if type_ not in self.ALLOWED_EXCEPTIONS:
427 431 type_ = None
428 432
429 433 resp = {
430 434 'id': payload_id,
431 435 'error': {
432 436 'message': e.message,
433 437 'traceback': tb_info,
434 438 'org_exc': org_exc_name,
435 439 'org_exc_tb': org_exc_tb,
436 440 'type': type_
437 441 }
438 442 }
439 443 try:
440 444 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
441 445 except AttributeError:
442 446 pass
443 447 else:
444 448 resp = {
445 449 'id': payload_id,
446 450 'result': resp
447 451 }
448 452
449 453 return resp
450 454
451 455 def vcs_stream_view(self, request):
452 456 payload, remote, method, args, kwargs = self._vcs_view_params(request)
453 457 # this method has a stream: marker we remove it here
454 458 method = method.split('stream:')[-1]
455 459 chunk_size = safe_int(payload.get('chunk_size')) or 4096
456 460
457 461 try:
458 462 resp = getattr(remote, method)(*args, **kwargs)
459 463 except Exception as e:
460 464 raise
461 465
462 466 def get_chunked_data(method_resp):
463 467 stream = StringIO(method_resp)
464 468 while 1:
465 469 chunk = stream.read(chunk_size)
466 470 if not chunk:
467 471 break
468 472 yield chunk
469 473
470 474 response = Response(app_iter=get_chunked_data(resp))
471 475 response.content_type = 'application/octet-stream'
472 476
473 477 return response
474 478
475 479 def status_view(self, request):
476 480 import vcsserver
477 481 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
478 482 'pid': os.getpid()}
479 483
480 484 def service_view(self, request):
481 485 import vcsserver
482 486
483 487 payload = msgpack.unpackb(request.body, use_list=True)
484 488 server_config, app_config = {}, {}
485 489
486 490 try:
487 491 path = self.global_config['__file__']
488 492 config = configparser.RawConfigParser()
489 493
490 494 config.read(path)
491 495
492 496 if config.has_section('server:main'):
493 497 server_config = dict(config.items('server:main'))
494 498 if config.has_section('app:main'):
495 499 app_config = dict(config.items('app:main'))
496 500
497 501 except Exception:
498 502 log.exception('Failed to read .ini file for display')
499 503
500 504 environ = os.environ.items()
501 505
502 506 resp = {
503 507 'id': payload.get('id'),
504 508 'result': dict(
505 509 version=vcsserver.__version__,
506 510 config=server_config,
507 511 app_config=app_config,
508 512 environ=environ,
509 513 payload=payload,
510 514 )
511 515 }
512 516 return resp
513 517
514 518 def _msgpack_renderer_factory(self, info):
515 519 def _render(value, system):
516 520 request = system.get('request')
517 521 if request is not None:
518 522 response = request.response
519 523 ct = response.content_type
520 524 if ct == response.default_content_type:
521 525 response.content_type = 'application/x-msgpack'
522 526 return msgpack.packb(value)
523 527 return _render
524 528
525 529 def set_env_from_config(self, environ, config):
526 530 dict_conf = {}
527 531 try:
528 532 for elem in config:
529 533 if elem[0] == 'rhodecode':
530 534 dict_conf = json.loads(elem[2])
531 535 break
532 536 except Exception:
533 537 log.exception('Failed to fetch SCM CONFIG')
534 538 return
535 539
536 540 username = dict_conf.get('username')
537 541 if username:
538 542 environ['REMOTE_USER'] = username
539 543 # mercurial specific, some extension api rely on this
540 544 environ['HGUSER'] = username
541 545
542 546 ip = dict_conf.get('ip')
543 547 if ip:
544 548 environ['REMOTE_HOST'] = ip
545 549
546 550 if _is_request_chunked(environ):
547 551 # set the compatibility flag for webob
548 552 environ['wsgi.input_terminated'] = True
549 553
550 554 def hg_proxy(self):
551 555 @wsgiapp
552 556 def _hg_proxy(environ, start_response):
553 557 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
554 558 return app(environ, start_response)
555 559 return _hg_proxy
556 560
557 561 def git_proxy(self):
558 562 @wsgiapp
559 563 def _git_proxy(environ, start_response):
560 564 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
561 565 return app(environ, start_response)
562 566 return _git_proxy
563 567
564 568 def hg_stream(self):
565 569 if self._use_echo_app:
566 570 @wsgiapp
567 571 def _hg_stream(environ, start_response):
568 572 app = EchoApp('fake_path', 'fake_name', None)
569 573 return app(environ, start_response)
570 574 return _hg_stream
571 575 else:
572 576 @wsgiapp
573 577 def _hg_stream(environ, start_response):
574 578 log.debug('http-app: handling hg stream')
575 579 repo_path = environ['HTTP_X_RC_REPO_PATH']
576 580 repo_name = environ['HTTP_X_RC_REPO_NAME']
577 581 packed_config = base64.b64decode(
578 582 environ['HTTP_X_RC_REPO_CONFIG'])
579 583 config = msgpack.unpackb(packed_config)
580 584 app = scm_app.create_hg_wsgi_app(
581 585 repo_path, repo_name, config)
582 586
583 587 # Consistent path information for hgweb
584 588 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
585 589 environ['REPO_NAME'] = repo_name
586 590 self.set_env_from_config(environ, config)
587 591
588 592 log.debug('http-app: starting app handler '
589 593 'with %s and process request', app)
590 594 return app(environ, ResponseFilter(start_response))
591 595 return _hg_stream
592 596
593 597 def git_stream(self):
594 598 if self._use_echo_app:
595 599 @wsgiapp
596 600 def _git_stream(environ, start_response):
597 601 app = EchoApp('fake_path', 'fake_name', None)
598 602 return app(environ, start_response)
599 603 return _git_stream
600 604 else:
601 605 @wsgiapp
602 606 def _git_stream(environ, start_response):
603 607 log.debug('http-app: handling git stream')
604 608 repo_path = environ['HTTP_X_RC_REPO_PATH']
605 609 repo_name = environ['HTTP_X_RC_REPO_NAME']
606 610 packed_config = base64.b64decode(
607 611 environ['HTTP_X_RC_REPO_CONFIG'])
608 612 config = msgpack.unpackb(packed_config)
609 613
610 614 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
611 615 self.set_env_from_config(environ, config)
612 616
613 617 content_type = environ.get('CONTENT_TYPE', '')
614 618
615 619 path = environ['PATH_INFO']
616 620 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
617 621 log.debug(
618 622 'LFS: Detecting if request `%s` is LFS server path based '
619 623 'on content type:`%s`, is_lfs:%s',
620 624 path, content_type, is_lfs_request)
621 625
622 626 if not is_lfs_request:
623 627 # fallback detection by path
624 628 if GIT_LFS_PROTO_PAT.match(path):
625 629 is_lfs_request = True
626 630 log.debug(
627 631 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
628 632 path, is_lfs_request)
629 633
630 634 if is_lfs_request:
631 635 app = scm_app.create_git_lfs_wsgi_app(
632 636 repo_path, repo_name, config)
633 637 else:
634 638 app = scm_app.create_git_wsgi_app(
635 639 repo_path, repo_name, config)
636 640
637 641 log.debug('http-app: starting app handler '
638 642 'with %s and process request', app)
639 643
640 644 return app(environ, start_response)
641 645
642 646 return _git_stream
643 647
644 648 def handle_vcs_exception(self, exception, request):
645 649 _vcs_kind = getattr(exception, '_vcs_kind', '')
646 650 if _vcs_kind == 'repo_locked':
647 651 # Get custom repo-locked status code if present.
648 652 status_code = request.headers.get('X-RC-Locked-Status-Code')
649 653 return HTTPRepoLocked(
650 654 title=exception.message, status_code=status_code)
651 655
652 656 elif _vcs_kind == 'repo_branch_protected':
653 657 # Get custom repo-branch-protected status code if present.
654 658 return HTTPRepoBranchProtected(title=exception.message)
655 659
656 660 exc_info = request.exc_info
657 661 store_exception(id(exc_info), exc_info)
658 662
659 663 traceback_info = 'unavailable'
660 664 if request.exc_info:
661 665 exc_type, exc_value, exc_tb = request.exc_info
662 666 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
663 667
664 668 log.error(
665 669 'error occurred handling this request for path: %s, \n tb: %s',
666 670 request.path, traceback_info)
667 671 raise exception
668 672
669 673
670 674 class ResponseFilter(object):
671 675
672 676 def __init__(self, start_response):
673 677 self._start_response = start_response
674 678
675 679 def __call__(self, status, response_headers, exc_info=None):
676 680 headers = tuple(
677 681 (h, v) for h, v in response_headers
678 682 if not wsgiref.util.is_hop_by_hop(h))
679 683 return self._start_response(status, headers, exc_info)
680 684
681 685
682 686 def main(global_config, **settings):
683 687 if MercurialFactory:
684 688 hgpatches.patch_largefiles_capabilities()
685 689 hgpatches.patch_subrepo_type_mapping()
686 690
687 691 app = HTTPApplication(settings=settings, global_config=global_config)
688 692 return app.wsgi_app()
@@ -1,72 +1,75 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 from dogpile.cache import register_backend
20 20
21 21 register_backend(
22 22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 23 "LRUMemoryBackend")
24 24
25 25 register_backend(
26 26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 27 "FileNamespaceBackend")
28 28
29 29 register_backend(
30 30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 31 "RedisPickleBackend")
32 32
33 33 register_backend(
34 34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 35 "RedisMsgPackBackend")
36 36
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40 from . import region_meta
41 41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
42 42
43 43
44 44 def configure_dogpile_cache(settings):
45 45 cache_dir = settings.get('cache_dir')
46 46 if cache_dir:
47 47 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
48 48
49 49 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
50 50
51 51 # inspect available namespaces
52 52 avail_regions = set()
53 53 for key in rc_cache_data.keys():
54 54 namespace_name = key.split('.', 1)[0]
55 55 avail_regions.add(namespace_name)
56 56 log.debug('dogpile: found following cache regions: %s', avail_regions)
57 57
58 58 # register them into namespace
59 59 for region_name in avail_regions:
60 60 new_region = make_region(
61 61 name=region_name,
62 62 function_key_generator=None
63 63 )
64 64
65 65 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
66 66 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
67 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
67 if log.isEnabledFor(logging.DEBUG):
68 region_args = dict(backend=new_region.actual_backend.__class__,
69 region_invalidator=new_region.region_invalidator.__class__)
70 log.debug('dogpile: registering a new region `%s` %s', region_name, region_args)
68 71 region_meta.dogpile_cache_regions[region_name] = new_region
69 72
70 73
71 74 def includeme(config):
72 75 configure_dogpile_cache(config.registry.settings)
@@ -1,791 +1,856 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 import subprocess
22 import time
22 23 from urllib2 import URLError
23 24 import urlparse
24 25 import logging
25 26 import posixpath as vcspath
26 27 import StringIO
27 28 import urllib
28 29 import traceback
29 30
30 31 import svn.client
31 32 import svn.core
32 33 import svn.delta
33 34 import svn.diff
34 35 import svn.fs
35 36 import svn.repos
36 37
37 38 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver.base import RepoFactory, raise_from_original
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 from vcsserver.exceptions import NoContentException
39 41 from vcsserver.vcs_base import RemoteBase
40 42
41 43 log = logging.getLogger(__name__)
42 44
43 45
44 46 svn_compatible_versions_map = {
45 47 'pre-1.4-compatible': '1.3',
46 48 'pre-1.5-compatible': '1.4',
47 49 'pre-1.6-compatible': '1.5',
48 50 'pre-1.8-compatible': '1.7',
49 51 'pre-1.9-compatible': '1.8',
50 52 }
51 53
52 54 current_compatible_version = '1.12'
53 55
54 56
55 57 def reraise_safe_exceptions(func):
56 58 """Decorator for converting svn exceptions to something neutral."""
57 59 def wrapper(*args, **kwargs):
58 60 try:
59 61 return func(*args, **kwargs)
60 62 except Exception as e:
61 63 if not hasattr(e, '_vcs_kind'):
62 64 log.exception("Unhandled exception in svn remote call")
63 65 raise_from_original(exceptions.UnhandledException(e))
64 66 raise
65 67 return wrapper
66 68
67 69
68 70 class SubversionFactory(RepoFactory):
69 71 repo_type = 'svn'
70 72
71 73 def _create_repo(self, wire, create, compatible_version):
72 74 path = svn.core.svn_path_canonicalize(wire['path'])
73 75 if create:
74 76 fs_config = {'compatible-version': current_compatible_version}
75 77 if compatible_version:
76 78
77 79 compatible_version_string = \
78 80 svn_compatible_versions_map.get(compatible_version) \
79 81 or compatible_version
80 82 fs_config['compatible-version'] = compatible_version_string
81 83
82 84 log.debug('Create SVN repo with config "%s"', fs_config)
83 85 repo = svn.repos.create(path, "", "", None, fs_config)
84 86 else:
85 87 repo = svn.repos.open(path)
86 88
87 89 log.debug('Got SVN object: %s', repo)
88 90 return repo
89 91
90 92 def repo(self, wire, create=False, compatible_version=None):
91 93 """
92 94 Get a repository instance for the given path.
93 95 """
94 96 return self._create_repo(wire, create, compatible_version)
95 97
96 98
97 99 NODE_TYPE_MAPPING = {
98 100 svn.core.svn_node_file: 'file',
99 101 svn.core.svn_node_dir: 'dir',
100 102 }
101 103
102 104
103 105 class SvnRemote(RemoteBase):
104 106
105 107 def __init__(self, factory, hg_factory=None):
106 108 self._factory = factory
107 109 # TODO: Remove once we do not use internal Mercurial objects anymore
108 110 # for subversion
109 111 self._hg_factory = hg_factory
110 112
111 113 @reraise_safe_exceptions
112 114 def discover_svn_version(self):
113 115 try:
114 116 import svn.core
115 117 svn_ver = svn.core.SVN_VERSION
116 118 except ImportError:
117 119 svn_ver = None
118 120 return svn_ver
119 121
120 122 @reraise_safe_exceptions
121 123 def is_empty(self, wire):
122 124
123 125 try:
124 126 return self.lookup(wire, -1) == 0
125 127 except Exception:
126 128 log.exception("failed to read object_store")
127 129 return False
128 130
129 131 def check_url(self, url, config_items):
130 132 # this can throw exception if not installed, but we detect this
131 133 from hgsubversion import svnrepo
132 134
133 135 baseui = self._hg_factory._create_config(config_items)
134 136 # uuid function get's only valid UUID from proper repo, else
135 137 # throws exception
136 138 try:
137 139 svnrepo.svnremoterepo(baseui, url).svn.uuid
138 140 except Exception:
139 141 tb = traceback.format_exc()
140 142 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
141 143 raise URLError(
142 144 '"%s" is not a valid Subversion source url.' % (url, ))
143 145 return True
144 146
145 147 def is_path_valid_repository(self, wire, path):
146 148
147 149 # NOTE(marcink): short circuit the check for SVN repo
148 150 # the repos.open might be expensive to check, but we have one cheap
149 151 # pre condition that we can use, to check for 'format' file
150 152
151 153 if not os.path.isfile(os.path.join(path, 'format')):
152 154 return False
153 155
154 156 try:
155 157 svn.repos.open(path)
156 158 except svn.core.SubversionException:
157 159 tb = traceback.format_exc()
158 160 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
159 161 return False
160 162 return True
161 163
162 164 @reraise_safe_exceptions
163 165 def verify(self, wire,):
164 166 repo_path = wire['path']
165 167 if not self.is_path_valid_repository(wire, repo_path):
166 168 raise Exception(
167 169 "Path %s is not a valid Subversion repository." % repo_path)
168 170
169 171 cmd = ['svnadmin', 'info', repo_path]
170 172 stdout, stderr = subprocessio.run_command(cmd)
171 173 return stdout
172 174
173 175 def lookup(self, wire, revision):
174 176 if revision not in [-1, None, 'HEAD']:
175 177 raise NotImplementedError
176 178 repo = self._factory.repo(wire)
177 179 fs_ptr = svn.repos.fs(repo)
178 180 head = svn.fs.youngest_rev(fs_ptr)
179 181 return head
180 182
181 183 def lookup_interval(self, wire, start_ts, end_ts):
182 184 repo = self._factory.repo(wire)
183 185 fsobj = svn.repos.fs(repo)
184 186 start_rev = None
185 187 end_rev = None
186 188 if start_ts:
187 189 start_ts_svn = apr_time_t(start_ts)
188 190 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
189 191 else:
190 192 start_rev = 1
191 193 if end_ts:
192 194 end_ts_svn = apr_time_t(end_ts)
193 195 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
194 196 else:
195 197 end_rev = svn.fs.youngest_rev(fsobj)
196 198 return start_rev, end_rev
197 199
198 200 def revision_properties(self, wire, revision):
199 201
200 202 cache_on, context_uid, repo_id = self._cache_on(wire)
201 203 @self.region.conditional_cache_on_arguments(condition=cache_on)
202 204 def _revision_properties(_repo_id, _revision):
203 205 repo = self._factory.repo(wire)
204 206 fs_ptr = svn.repos.fs(repo)
205 207 return svn.fs.revision_proplist(fs_ptr, revision)
206 208 return _revision_properties(repo_id, revision)
207 209
208 210 def revision_changes(self, wire, revision):
209 211
210 212 repo = self._factory.repo(wire)
211 213 fsobj = svn.repos.fs(repo)
212 214 rev_root = svn.fs.revision_root(fsobj, revision)
213 215
214 216 editor = svn.repos.ChangeCollector(fsobj, rev_root)
215 217 editor_ptr, editor_baton = svn.delta.make_editor(editor)
216 218 base_dir = ""
217 219 send_deltas = False
218 220 svn.repos.replay2(
219 221 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
220 222 editor_ptr, editor_baton, None)
221 223
222 224 added = []
223 225 changed = []
224 226 removed = []
225 227
226 228 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
227 229 for path, change in editor.changes.iteritems():
228 230 # TODO: Decide what to do with directory nodes. Subversion can add
229 231 # empty directories.
230 232
231 233 if change.item_kind == svn.core.svn_node_dir:
232 234 continue
233 235 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
234 236 added.append(path)
235 237 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
236 238 svn.repos.CHANGE_ACTION_REPLACE]:
237 239 changed.append(path)
238 240 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
239 241 removed.append(path)
240 242 else:
241 243 raise NotImplementedError(
242 244 "Action %s not supported on path %s" % (
243 245 change.action, path))
244 246
245 247 changes = {
246 248 'added': added,
247 249 'changed': changed,
248 250 'removed': removed,
249 251 }
250 252 return changes
251 253
252 254 @reraise_safe_exceptions
253 255 def node_history(self, wire, path, revision, limit):
254 256 cache_on, context_uid, repo_id = self._cache_on(wire)
255 257 @self.region.conditional_cache_on_arguments(condition=cache_on)
256 258 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
257 259 cross_copies = False
258 260 repo = self._factory.repo(wire)
259 261 fsobj = svn.repos.fs(repo)
260 262 rev_root = svn.fs.revision_root(fsobj, revision)
261 263
262 264 history_revisions = []
263 265 history = svn.fs.node_history(rev_root, path)
264 266 history = svn.fs.history_prev(history, cross_copies)
265 267 while history:
266 268 __, node_revision = svn.fs.history_location(history)
267 269 history_revisions.append(node_revision)
268 270 if limit and len(history_revisions) >= limit:
269 271 break
270 272 history = svn.fs.history_prev(history, cross_copies)
271 273 return history_revisions
272 274 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
273 275
274 276 def node_properties(self, wire, path, revision):
275 277 cache_on, context_uid, repo_id = self._cache_on(wire)
276 278 @self.region.conditional_cache_on_arguments(condition=cache_on)
277 279 def _node_properties(_repo_id, _path, _revision):
278 280 repo = self._factory.repo(wire)
279 281 fsobj = svn.repos.fs(repo)
280 282 rev_root = svn.fs.revision_root(fsobj, revision)
281 283 return svn.fs.node_proplist(rev_root, path)
282 284 return _node_properties(repo_id, path, revision)
283 285
284 286 def file_annotate(self, wire, path, revision):
285 287 abs_path = 'file://' + urllib.pathname2url(
286 288 vcspath.join(wire['path'], path))
287 289 file_uri = svn.core.svn_path_canonicalize(abs_path)
288 290
289 291 start_rev = svn_opt_revision_value_t(0)
290 292 peg_rev = svn_opt_revision_value_t(revision)
291 293 end_rev = peg_rev
292 294
293 295 annotations = []
294 296
295 297 def receiver(line_no, revision, author, date, line, pool):
296 298 annotations.append((line_no, revision, line))
297 299
298 300 # TODO: Cannot use blame5, missing typemap function in the swig code
299 301 try:
300 302 svn.client.blame2(
301 303 file_uri, peg_rev, start_rev, end_rev,
302 304 receiver, svn.client.create_context())
303 305 except svn.core.SubversionException as exc:
304 306 log.exception("Error during blame operation.")
305 307 raise Exception(
306 308 "Blame not supported or file does not exist at path %s. "
307 309 "Error %s." % (path, exc))
308 310
309 311 return annotations
310 312
311 313 def get_node_type(self, wire, path, revision=None):
312 314
313 315 cache_on, context_uid, repo_id = self._cache_on(wire)
314 316 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 317 def _get_node_type(_repo_id, _path, _revision):
316 318 repo = self._factory.repo(wire)
317 319 fs_ptr = svn.repos.fs(repo)
318 320 if _revision is None:
319 321 _revision = svn.fs.youngest_rev(fs_ptr)
320 322 root = svn.fs.revision_root(fs_ptr, _revision)
321 323 node = svn.fs.check_path(root, path)
322 324 return NODE_TYPE_MAPPING.get(node, None)
323 325 return _get_node_type(repo_id, path, revision)
324 326
325 327 def get_nodes(self, wire, path, revision=None):
326 328
327 329 cache_on, context_uid, repo_id = self._cache_on(wire)
328 330 @self.region.conditional_cache_on_arguments(condition=cache_on)
329 331 def _get_nodes(_repo_id, _path, _revision):
330 332 repo = self._factory.repo(wire)
331 333 fsobj = svn.repos.fs(repo)
332 334 if _revision is None:
333 335 _revision = svn.fs.youngest_rev(fsobj)
334 336 root = svn.fs.revision_root(fsobj, _revision)
335 337 entries = svn.fs.dir_entries(root, path)
336 338 result = []
337 339 for entry_path, entry_info in entries.iteritems():
338 340 result.append(
339 341 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
340 342 return result
341 343 return _get_nodes(repo_id, path, revision)
342 344
343 345 def get_file_content(self, wire, path, rev=None):
344 346 repo = self._factory.repo(wire)
345 347 fsobj = svn.repos.fs(repo)
346 348 if rev is None:
347 349 rev = svn.fs.youngest_revision(fsobj)
348 350 root = svn.fs.revision_root(fsobj, rev)
349 351 content = svn.core.Stream(svn.fs.file_contents(root, path))
350 352 return content.read()
351 353
352 354 def get_file_size(self, wire, path, revision=None):
353 355
354 356 cache_on, context_uid, repo_id = self._cache_on(wire)
355 357 @self.region.conditional_cache_on_arguments(condition=cache_on)
356 358 def _get_file_size(_repo_id, _path, _revision):
357 359 repo = self._factory.repo(wire)
358 360 fsobj = svn.repos.fs(repo)
359 361 if _revision is None:
360 362 _revision = svn.fs.youngest_revision(fsobj)
361 363 root = svn.fs.revision_root(fsobj, _revision)
362 364 size = svn.fs.file_length(root, path)
363 365 return size
364 366 return _get_file_size(repo_id, path, revision)
365 367
366 368 def create_repository(self, wire, compatible_version=None):
367 369 log.info('Creating Subversion repository in path "%s"', wire['path'])
368 370 self._factory.repo(wire, create=True,
369 371 compatible_version=compatible_version)
370 372
371 373 def get_url_and_credentials(self, src_url):
372 374 obj = urlparse.urlparse(src_url)
373 375 username = obj.username or None
374 376 password = obj.password or None
375 377 return username, password, src_url
376 378
377 379 def import_remote_repository(self, wire, src_url):
378 380 repo_path = wire['path']
379 381 if not self.is_path_valid_repository(wire, repo_path):
380 382 raise Exception(
381 383 "Path %s is not a valid Subversion repository." % repo_path)
382 384
383 385 username, password, src_url = self.get_url_and_credentials(src_url)
384 386 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
385 387 '--trust-server-cert-failures=unknown-ca']
386 388 if username and password:
387 389 rdump_cmd += ['--username', username, '--password', password]
388 390 rdump_cmd += [src_url]
389 391
390 392 rdump = subprocess.Popen(
391 393 rdump_cmd,
392 394 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
393 395 load = subprocess.Popen(
394 396 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
395 397
396 398 # TODO: johbo: This can be a very long operation, might be better
397 399 # to track some kind of status and provide an api to check if the
398 400 # import is done.
399 401 rdump.wait()
400 402 load.wait()
401 403
402 404 log.debug('Return process ended with code: %s', rdump.returncode)
403 405 if rdump.returncode != 0:
404 406 errors = rdump.stderr.read()
405 407 log.error('svnrdump dump failed: statuscode %s: message: %s',
406 408 rdump.returncode, errors)
407 409 reason = 'UNKNOWN'
408 410 if 'svnrdump: E230001:' in errors:
409 411 reason = 'INVALID_CERTIFICATE'
410 412
411 413 if reason == 'UNKNOWN':
412 414 reason = 'UNKNOWN:{}'.format(errors)
413 415 raise Exception(
414 416 'Failed to dump the remote repository from %s. Reason:%s' % (
415 417 src_url, reason))
416 418 if load.returncode != 0:
417 419 raise Exception(
418 420 'Failed to load the dump of remote repository from %s.' %
419 421 (src_url, ))
420 422
421 423 def commit(self, wire, message, author, timestamp, updated, removed):
422 424 assert isinstance(message, str)
423 425 assert isinstance(author, str)
424 426
425 427 repo = self._factory.repo(wire)
426 428 fsobj = svn.repos.fs(repo)
427 429
428 430 rev = svn.fs.youngest_rev(fsobj)
429 431 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
430 432 txn_root = svn.fs.txn_root(txn)
431 433
432 434 for node in updated:
433 435 TxnNodeProcessor(node, txn_root).update()
434 436 for node in removed:
435 437 TxnNodeProcessor(node, txn_root).remove()
436 438
437 439 commit_id = svn.repos.fs_commit_txn(repo, txn)
438 440
439 441 if timestamp:
440 442 apr_time = apr_time_t(timestamp)
441 443 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
442 444 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
443 445
444 446 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
445 447 return commit_id
446 448
447 449 def diff(self, wire, rev1, rev2, path1=None, path2=None,
448 450 ignore_whitespace=False, context=3):
449 451
450 452 wire.update(cache=False)
451 453 repo = self._factory.repo(wire)
452 454 diff_creator = SvnDiffer(
453 455 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
454 456 try:
455 457 return diff_creator.generate_diff()
456 458 except svn.core.SubversionException as e:
457 459 log.exception(
458 460 "Error during diff operation operation. "
459 461 "Path might not exist %s, %s" % (path1, path2))
460 462 return ""
461 463
462 464 @reraise_safe_exceptions
463 465 def is_large_file(self, wire, path):
464 466 return False
465 467
466 468 @reraise_safe_exceptions
467 469 def is_binary(self, wire, rev, path):
468 470 cache_on, context_uid, repo_id = self._cache_on(wire)
469 471
470 472 @self.region.conditional_cache_on_arguments(condition=cache_on)
471 473 def _is_binary(_repo_id, _rev, _path):
472 474 raw_bytes = self.get_file_content(wire, path, rev)
473 475 return raw_bytes and '\0' in raw_bytes
474 476
475 477 return _is_binary(repo_id, rev, path)
476 478
477 479 @reraise_safe_exceptions
478 480 def run_svn_command(self, wire, cmd, **opts):
479 481 path = wire.get('path', None)
480 482
481 483 if path and os.path.isdir(path):
482 484 opts['cwd'] = path
483 485
484 486 safe_call = False
485 487 if '_safe' in opts:
486 488 safe_call = True
487 489
488 490 svnenv = os.environ.copy()
489 491 svnenv.update(opts.pop('extra_env', {}))
490 492
491 493 _opts = {'env': svnenv, 'shell': False}
492 494
493 495 try:
494 496 _opts.update(opts)
495 497 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
496 498
497 499 return ''.join(p), ''.join(p.error)
498 500 except (EnvironmentError, OSError) as err:
499 501 cmd = ' '.join(cmd) # human friendly CMD
500 502 tb_err = ("Couldn't run svn command (%s).\n"
501 503 "Original error was:%s\n"
502 504 "Call options:%s\n"
503 505 % (cmd, err, _opts))
504 506 log.exception(tb_err)
505 507 if safe_call:
506 508 return '', err
507 509 else:
508 510 raise exceptions.VcsException()(tb_err)
509 511
510 512 @reraise_safe_exceptions
511 513 def install_hooks(self, wire, force=False):
512 514 from vcsserver.hook_utils import install_svn_hooks
513 515 repo_path = wire['path']
514 516 binary_dir = settings.BINARY_DIR
515 517 executable = None
516 518 if binary_dir:
517 519 executable = os.path.join(binary_dir, 'python')
518 520 return install_svn_hooks(
519 521 repo_path, executable=executable, force_create=force)
520 522
521 523 @reraise_safe_exceptions
522 524 def get_hooks_info(self, wire):
523 525 from vcsserver.hook_utils import (
524 526 get_svn_pre_hook_version, get_svn_post_hook_version)
525 527 repo_path = wire['path']
526 528 return {
527 529 'pre_version': get_svn_pre_hook_version(repo_path),
528 530 'post_version': get_svn_post_hook_version(repo_path),
529 531 }
530 532
533 @reraise_safe_exceptions
534 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
535 archive_dir_name, commit_id):
536
537 def walk_tree(root, root_dir, _commit_id):
538 """
539 Special recursive svn repo walker
540 """
541
542 filemode_default = 0o100644
543 filemode_executable = 0o100755
544
545 file_iter = svn.fs.dir_entries(root, root_dir)
546 for f_name in file_iter:
547 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
548
549 if f_type == 'dir':
550 # return only DIR, and then all entries in that dir
551 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
552 new_root = os.path.join(root_dir, f_name)
553 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
554 yield _f_name, _f_data, _f_type
555 else:
556 f_path = os.path.join(root_dir, f_name).rstrip('/')
557 prop_list = svn.fs.node_proplist(root, f_path)
558
559 f_mode = filemode_default
560 if prop_list.get('svn:executable'):
561 f_mode = filemode_executable
562
563 f_is_link = False
564 if prop_list.get('svn:special'):
565 f_is_link = True
566
567 data = {
568 'is_link': f_is_link,
569 'mode': f_mode,
570 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
571 }
572
573 yield f_path, data, f_type
574
575 def file_walker(_commit_id, path):
576 repo = self._factory.repo(wire)
577 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
578
579 def no_content():
580 raise NoContentException()
581
582 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
583 file_path = f_name
584
585 if f_type == 'dir':
586 mode = f_data['mode']
587 yield ArchiveNode(file_path, mode, False, no_content)
588 else:
589 mode = f_data['mode']
590 is_link = f_data['is_link']
591 data_stream = f_data['content_stream']
592 yield ArchiveNode(file_path, mode, is_link, data_stream)
593
594 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
595 archive_dir_name, commit_id)
596
531 597
532 598 class SvnDiffer(object):
533 599 """
534 600 Utility to create diffs based on difflib and the Subversion api
535 601 """
536 602
537 603 binary_content = False
538 604
539 605 def __init__(
540 606 self, repo, src_rev, src_path, tgt_rev, tgt_path,
541 607 ignore_whitespace, context):
542 608 self.repo = repo
543 609 self.ignore_whitespace = ignore_whitespace
544 610 self.context = context
545 611
546 612 fsobj = svn.repos.fs(repo)
547 613
548 614 self.tgt_rev = tgt_rev
549 615 self.tgt_path = tgt_path or ''
550 616 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
551 617 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
552 618
553 619 self.src_rev = src_rev
554 620 self.src_path = src_path or self.tgt_path
555 621 self.src_root = svn.fs.revision_root(fsobj, src_rev)
556 622 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
557 623
558 624 self._validate()
559 625
560 626 def _validate(self):
561 627 if (self.tgt_kind != svn.core.svn_node_none and
562 628 self.src_kind != svn.core.svn_node_none and
563 629 self.src_kind != self.tgt_kind):
564 630 # TODO: johbo: proper error handling
565 631 raise Exception(
566 632 "Source and target are not compatible for diff generation. "
567 633 "Source type: %s, target type: %s" %
568 634 (self.src_kind, self.tgt_kind))
569 635
570 636 def generate_diff(self):
571 637 buf = StringIO.StringIO()
572 638 if self.tgt_kind == svn.core.svn_node_dir:
573 639 self._generate_dir_diff(buf)
574 640 else:
575 641 self._generate_file_diff(buf)
576 642 return buf.getvalue()
577 643
578 644 def _generate_dir_diff(self, buf):
579 645 editor = DiffChangeEditor()
580 646 editor_ptr, editor_baton = svn.delta.make_editor(editor)
581 647 svn.repos.dir_delta2(
582 648 self.src_root,
583 649 self.src_path,
584 650 '', # src_entry
585 651 self.tgt_root,
586 652 self.tgt_path,
587 653 editor_ptr, editor_baton,
588 654 authorization_callback_allow_all,
589 655 False, # text_deltas
590 656 svn.core.svn_depth_infinity, # depth
591 657 False, # entry_props
592 658 False, # ignore_ancestry
593 659 )
594 660
595 661 for path, __, change in sorted(editor.changes):
596 662 self._generate_node_diff(
597 663 buf, change, path, self.tgt_path, path, self.src_path)
598 664
599 665 def _generate_file_diff(self, buf):
600 666 change = None
601 667 if self.src_kind == svn.core.svn_node_none:
602 668 change = "add"
603 669 elif self.tgt_kind == svn.core.svn_node_none:
604 670 change = "delete"
605 671 tgt_base, tgt_path = vcspath.split(self.tgt_path)
606 672 src_base, src_path = vcspath.split(self.src_path)
607 673 self._generate_node_diff(
608 674 buf, change, tgt_path, tgt_base, src_path, src_base)
609 675
610 676 def _generate_node_diff(
611 677 self, buf, change, tgt_path, tgt_base, src_path, src_base):
612 678
613 679 if self.src_rev == self.tgt_rev and tgt_base == src_base:
614 680 # makes consistent behaviour with git/hg to return empty diff if
615 681 # we compare same revisions
616 682 return
617 683
618 684 tgt_full_path = vcspath.join(tgt_base, tgt_path)
619 685 src_full_path = vcspath.join(src_base, src_path)
620 686
621 687 self.binary_content = False
622 688 mime_type = self._get_mime_type(tgt_full_path)
623 689
624 690 if mime_type and not mime_type.startswith('text'):
625 691 self.binary_content = True
626 692 buf.write("=" * 67 + '\n')
627 693 buf.write("Cannot display: file marked as a binary type.\n")
628 694 buf.write("svn:mime-type = %s\n" % mime_type)
629 695 buf.write("Index: %s\n" % (tgt_path, ))
630 696 buf.write("=" * 67 + '\n')
631 697 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
632 698 'tgt_path': tgt_path})
633 699
634 700 if change == 'add':
635 701 # TODO: johbo: SVN is missing a zero here compared to git
636 702 buf.write("new file mode 10644\n")
637 703
638 704 #TODO(marcink): intro to binary detection of svn patches
639 705 # if self.binary_content:
640 706 # buf.write('GIT binary patch\n')
641 707
642 708 buf.write("--- /dev/null\t(revision 0)\n")
643 709 src_lines = []
644 710 else:
645 711 if change == 'delete':
646 712 buf.write("deleted file mode 10644\n")
647 713
648 714 #TODO(marcink): intro to binary detection of svn patches
649 715 # if self.binary_content:
650 716 # buf.write('GIT binary patch\n')
651 717
652 718 buf.write("--- a/%s\t(revision %s)\n" % (
653 719 src_path, self.src_rev))
654 720 src_lines = self._svn_readlines(self.src_root, src_full_path)
655 721
656 722 if change == 'delete':
657 723 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
658 724 tgt_lines = []
659 725 else:
660 726 buf.write("+++ b/%s\t(revision %s)\n" % (
661 727 tgt_path, self.tgt_rev))
662 728 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
663 729
664 730 if not self.binary_content:
665 731 udiff = svn_diff.unified_diff(
666 732 src_lines, tgt_lines, context=self.context,
667 733 ignore_blank_lines=self.ignore_whitespace,
668 734 ignore_case=False,
669 735 ignore_space_changes=self.ignore_whitespace)
670 736 buf.writelines(udiff)
671 737
672 738 def _get_mime_type(self, path):
673 739 try:
674 740 mime_type = svn.fs.node_prop(
675 741 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
676 742 except svn.core.SubversionException:
677 743 mime_type = svn.fs.node_prop(
678 744 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
679 745 return mime_type
680 746
681 747 def _svn_readlines(self, fs_root, node_path):
682 748 if self.binary_content:
683 749 return []
684 750 node_kind = svn.fs.check_path(fs_root, node_path)
685 751 if node_kind not in (
686 752 svn.core.svn_node_file, svn.core.svn_node_symlink):
687 753 return []
688 content = svn.core.Stream(
689 svn.fs.file_contents(fs_root, node_path)).read()
754 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
690 755 return content.splitlines(True)
691 756
692 757
693 758 class DiffChangeEditor(svn.delta.Editor):
694 759 """
695 760 Records changes between two given revisions
696 761 """
697 762
698 763 def __init__(self):
699 764 self.changes = []
700 765
701 766 def delete_entry(self, path, revision, parent_baton, pool=None):
702 767 self.changes.append((path, None, 'delete'))
703 768
704 769 def add_file(
705 770 self, path, parent_baton, copyfrom_path, copyfrom_revision,
706 771 file_pool=None):
707 772 self.changes.append((path, 'file', 'add'))
708 773
709 774 def open_file(self, path, parent_baton, base_revision, file_pool=None):
710 775 self.changes.append((path, 'file', 'change'))
711 776
712 777
713 778 def authorization_callback_allow_all(root, path, pool):
714 779 return True
715 780
716 781
717 782 class TxnNodeProcessor(object):
718 783 """
719 784 Utility to process the change of one node within a transaction root.
720 785
721 786 It encapsulates the knowledge of how to add, update or remove
722 787 a node for a given transaction root. The purpose is to support the method
723 788 `SvnRemote.commit`.
724 789 """
725 790
726 791 def __init__(self, node, txn_root):
727 792 assert isinstance(node['path'], str)
728 793
729 794 self.node = node
730 795 self.txn_root = txn_root
731 796
732 797 def update(self):
733 798 self._ensure_parent_dirs()
734 799 self._add_file_if_node_does_not_exist()
735 800 self._update_file_content()
736 801 self._update_file_properties()
737 802
738 803 def remove(self):
739 804 svn.fs.delete(self.txn_root, self.node['path'])
740 805 # TODO: Clean up directory if empty
741 806
742 807 def _ensure_parent_dirs(self):
743 808 curdir = vcspath.dirname(self.node['path'])
744 809 dirs_to_create = []
745 810 while not self._svn_path_exists(curdir):
746 811 dirs_to_create.append(curdir)
747 812 curdir = vcspath.dirname(curdir)
748 813
749 814 for curdir in reversed(dirs_to_create):
750 815 log.debug('Creating missing directory "%s"', curdir)
751 816 svn.fs.make_dir(self.txn_root, curdir)
752 817
753 818 def _svn_path_exists(self, path):
754 819 path_status = svn.fs.check_path(self.txn_root, path)
755 820 return path_status != svn.core.svn_node_none
756 821
757 822 def _add_file_if_node_does_not_exist(self):
758 823 kind = svn.fs.check_path(self.txn_root, self.node['path'])
759 824 if kind == svn.core.svn_node_none:
760 825 svn.fs.make_file(self.txn_root, self.node['path'])
761 826
762 827 def _update_file_content(self):
763 828 assert isinstance(self.node['content'], str)
764 829 handler, baton = svn.fs.apply_textdelta(
765 830 self.txn_root, self.node['path'], None, None)
766 831 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
767 832
768 833 def _update_file_properties(self):
769 834 properties = self.node.get('properties', {})
770 835 for key, value in properties.iteritems():
771 836 svn.fs.change_node_prop(
772 837 self.txn_root, self.node['path'], key, value)
773 838
774 839
775 840 def apr_time_t(timestamp):
776 841 """
777 842 Convert a Python timestamp into APR timestamp type apr_time_t
778 843 """
779 844 return timestamp * 1E6
780 845
781 846
782 847 def svn_opt_revision_value_t(num):
783 848 """
784 849 Put `num` into a `svn_opt_revision_value_t` structure.
785 850 """
786 851 value = svn.core.svn_opt_revision_value_t()
787 852 value.number = num
788 853 revision = svn.core.svn_opt_revision_t()
789 854 revision.kind = svn.core.svn_opt_revision_number
790 855 revision.value = value
791 856 return revision
General Comments 0
You need to be logged in to leave comments. Login now