##// END OF EJS Templates
release: Merge default into stable for release preparation
super-admin -
r992:2a5ae811 merge stable
parent child Browse files
Show More
@@ -1,5 +1,5 b''
1 1 [bumpversion]
2 current_version = 4.26.0
2 current_version = 4.27.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.26.0
12 state = in_progress
13 version = 4.27.0
16 14
@@ -1,1103 +1,1103 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "atomicwrites" = super.buildPythonPackage {
8 8 name = "atomicwrites-1.3.0";
9 9 doCheck = false;
10 10 src = fetchurl {
11 11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
12 12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.mit ];
16 16 };
17 17 };
18 18 "attrs" = super.buildPythonPackage {
19 19 name = "attrs-19.3.0";
20 20 doCheck = false;
21 21 src = fetchurl {
22 22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
23 23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
24 24 };
25 25 meta = {
26 26 license = [ pkgs.lib.licenses.mit ];
27 27 };
28 28 };
29 29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 30 name = "backports.shutil-get-terminal-size-1.0.0";
31 31 doCheck = false;
32 32 src = fetchurl {
33 33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 35 };
36 36 meta = {
37 37 license = [ pkgs.lib.licenses.mit ];
38 38 };
39 39 };
40 40 "beautifulsoup4" = super.buildPythonPackage {
41 41 name = "beautifulsoup4-4.6.3";
42 42 doCheck = false;
43 43 src = fetchurl {
44 44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 46 };
47 47 meta = {
48 48 license = [ pkgs.lib.licenses.mit ];
49 49 };
50 50 };
51 51 "cffi" = super.buildPythonPackage {
52 52 name = "cffi-1.12.3";
53 53 doCheck = false;
54 54 propagatedBuildInputs = [
55 55 self."pycparser"
56 56 ];
57 57 src = fetchurl {
58 58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
59 59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.mit ];
63 63 };
64 64 };
65 65 "configobj" = super.buildPythonPackage {
66 66 name = "configobj-5.0.6";
67 67 doCheck = false;
68 68 propagatedBuildInputs = [
69 69 self."six"
70 70 ];
71 71 src = fetchurl {
72 72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
73 73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.bsdOriginal ];
77 77 };
78 78 };
79 79 "configparser" = super.buildPythonPackage {
80 80 name = "configparser-4.0.2";
81 81 doCheck = false;
82 82 src = fetchurl {
83 83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
84 84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
85 85 };
86 86 meta = {
87 87 license = [ pkgs.lib.licenses.mit ];
88 88 };
89 89 };
90 90 "contextlib2" = super.buildPythonPackage {
91 91 name = "contextlib2-0.6.0.post1";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
95 95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.psfl ];
99 99 };
100 100 };
101 101 "cov-core" = super.buildPythonPackage {
102 102 name = "cov-core-1.15.0";
103 103 doCheck = false;
104 104 propagatedBuildInputs = [
105 105 self."coverage"
106 106 ];
107 107 src = fetchurl {
108 108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
109 109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
110 110 };
111 111 meta = {
112 112 license = [ pkgs.lib.licenses.mit ];
113 113 };
114 114 };
115 115 "coverage" = super.buildPythonPackage {
116 116 name = "coverage-4.5.4";
117 117 doCheck = false;
118 118 src = fetchurl {
119 119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
120 120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
121 121 };
122 122 meta = {
123 123 license = [ pkgs.lib.licenses.asl20 ];
124 124 };
125 125 };
126 126 "decorator" = super.buildPythonPackage {
127 127 name = "decorator-4.1.2";
128 128 doCheck = false;
129 129 src = fetchurl {
130 130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
131 131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
132 132 };
133 133 meta = {
134 134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
135 135 };
136 136 };
137 137 "dogpile.cache" = super.buildPythonPackage {
138 138 name = "dogpile.cache-0.9.0";
139 139 doCheck = false;
140 140 propagatedBuildInputs = [
141 141 self."decorator"
142 142 ];
143 143 src = fetchurl {
144 144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
145 145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
146 146 };
147 147 meta = {
148 148 license = [ pkgs.lib.licenses.bsdOriginal ];
149 149 };
150 150 };
151 151 "dogpile.core" = super.buildPythonPackage {
152 152 name = "dogpile.core-0.4.1";
153 153 doCheck = false;
154 154 src = fetchurl {
155 155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
156 156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
157 157 };
158 158 meta = {
159 159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 160 };
161 161 };
162 162 "dulwich" = super.buildPythonPackage {
163 163 name = "dulwich-0.13.0";
164 164 doCheck = false;
165 165 src = fetchurl {
166 166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
167 167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
168 168 };
169 169 meta = {
170 170 license = [ pkgs.lib.licenses.gpl2Plus ];
171 171 };
172 172 };
173 173 "enum34" = super.buildPythonPackage {
174 174 name = "enum34-1.1.10";
175 175 doCheck = false;
176 176 src = fetchurl {
177 177 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
178 178 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
179 179 };
180 180 meta = {
181 181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 182 };
183 183 };
184 184 "funcsigs" = super.buildPythonPackage {
185 185 name = "funcsigs-1.0.2";
186 186 doCheck = false;
187 187 src = fetchurl {
188 188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
189 189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
190 190 };
191 191 meta = {
192 192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
193 193 };
194 194 };
195 195 "gevent" = super.buildPythonPackage {
196 196 name = "gevent-1.5.0";
197 197 doCheck = false;
198 198 propagatedBuildInputs = [
199 199 self."greenlet"
200 200 ];
201 201 src = fetchurl {
202 202 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
203 203 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
204 204 };
205 205 meta = {
206 206 license = [ pkgs.lib.licenses.mit ];
207 207 };
208 208 };
209 209 "gprof2dot" = super.buildPythonPackage {
210 210 name = "gprof2dot-2017.9.19";
211 211 doCheck = false;
212 212 src = fetchurl {
213 213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
214 214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
215 215 };
216 216 meta = {
217 217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
218 218 };
219 219 };
220 220 "greenlet" = super.buildPythonPackage {
221 221 name = "greenlet-0.4.15";
222 222 doCheck = false;
223 223 src = fetchurl {
224 224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
225 225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
226 226 };
227 227 meta = {
228 228 license = [ pkgs.lib.licenses.mit ];
229 229 };
230 230 };
231 231 "gunicorn" = super.buildPythonPackage {
232 232 name = "gunicorn-19.9.0";
233 233 doCheck = false;
234 234 src = fetchurl {
235 235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
236 236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
237 237 };
238 238 meta = {
239 239 license = [ pkgs.lib.licenses.mit ];
240 240 };
241 241 };
242 242 "hg-evolve" = super.buildPythonPackage {
243 243 name = "hg-evolve-9.1.0";
244 244 doCheck = false;
245 245 src = fetchurl {
246 246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
247 247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
248 248 };
249 249 meta = {
250 250 license = [ { fullName = "GPLv2+"; } ];
251 251 };
252 252 };
253 253 "hgsubversion" = super.buildPythonPackage {
254 254 name = "hgsubversion-1.9.3";
255 255 doCheck = false;
256 256 propagatedBuildInputs = [
257 257 self."mercurial"
258 258 self."subvertpy"
259 259 ];
260 260 src = fetchurl {
261 261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
262 262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
263 263 };
264 264 meta = {
265 265 license = [ pkgs.lib.licenses.gpl1 ];
266 266 };
267 267 };
268 268 "hupper" = super.buildPythonPackage {
269 269 name = "hupper-1.10.2";
270 270 doCheck = false;
271 271 src = fetchurl {
272 272 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
273 273 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
274 274 };
275 275 meta = {
276 276 license = [ pkgs.lib.licenses.mit ];
277 277 };
278 278 };
279 279 "importlib-metadata" = super.buildPythonPackage {
280 280 name = "importlib-metadata-1.6.0";
281 281 doCheck = false;
282 282 propagatedBuildInputs = [
283 283 self."zipp"
284 284 self."pathlib2"
285 285 self."contextlib2"
286 286 self."configparser"
287 287 ];
288 288 src = fetchurl {
289 289 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
290 290 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
291 291 };
292 292 meta = {
293 293 license = [ pkgs.lib.licenses.asl20 ];
294 294 };
295 295 };
296 296 "ipdb" = super.buildPythonPackage {
297 297 name = "ipdb-0.13.2";
298 298 doCheck = false;
299 299 propagatedBuildInputs = [
300 300 self."setuptools"
301 301 self."ipython"
302 302 ];
303 303 src = fetchurl {
304 304 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
305 305 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
306 306 };
307 307 meta = {
308 308 license = [ pkgs.lib.licenses.bsdOriginal ];
309 309 };
310 310 };
311 311 "ipython" = super.buildPythonPackage {
312 312 name = "ipython-5.1.0";
313 313 doCheck = false;
314 314 propagatedBuildInputs = [
315 315 self."setuptools"
316 316 self."decorator"
317 317 self."pickleshare"
318 318 self."simplegeneric"
319 319 self."traitlets"
320 320 self."prompt-toolkit"
321 321 self."pygments"
322 322 self."pexpect"
323 323 self."backports.shutil-get-terminal-size"
324 324 self."pathlib2"
325 325 self."pexpect"
326 326 ];
327 327 src = fetchurl {
328 328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
329 329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
330 330 };
331 331 meta = {
332 332 license = [ pkgs.lib.licenses.bsdOriginal ];
333 333 };
334 334 };
335 335 "ipython-genutils" = super.buildPythonPackage {
336 336 name = "ipython-genutils-0.2.0";
337 337 doCheck = false;
338 338 src = fetchurl {
339 339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
340 340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
341 341 };
342 342 meta = {
343 343 license = [ pkgs.lib.licenses.bsdOriginal ];
344 344 };
345 345 };
346 346 "mako" = super.buildPythonPackage {
347 347 name = "mako-1.1.0";
348 348 doCheck = false;
349 349 propagatedBuildInputs = [
350 350 self."markupsafe"
351 351 ];
352 352 src = fetchurl {
353 353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
354 354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
355 355 };
356 356 meta = {
357 357 license = [ pkgs.lib.licenses.mit ];
358 358 };
359 359 };
360 360 "markupsafe" = super.buildPythonPackage {
361 361 name = "markupsafe-1.1.1";
362 362 doCheck = false;
363 363 src = fetchurl {
364 364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
365 365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
366 366 };
367 367 meta = {
368 368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
369 369 };
370 370 };
371 371 "mercurial" = super.buildPythonPackage {
372 372 name = "mercurial-5.1.1";
373 373 doCheck = false;
374 374 src = fetchurl {
375 375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
376 376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
380 380 };
381 381 };
382 382 "mock" = super.buildPythonPackage {
383 383 name = "mock-3.0.5";
384 384 doCheck = false;
385 385 propagatedBuildInputs = [
386 386 self."six"
387 387 self."funcsigs"
388 388 ];
389 389 src = fetchurl {
390 390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
391 391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
392 392 };
393 393 meta = {
394 394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
395 395 };
396 396 };
397 397 "more-itertools" = super.buildPythonPackage {
398 398 name = "more-itertools-5.0.0";
399 399 doCheck = false;
400 400 propagatedBuildInputs = [
401 401 self."six"
402 402 ];
403 403 src = fetchurl {
404 404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
405 405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
406 406 };
407 407 meta = {
408 408 license = [ pkgs.lib.licenses.mit ];
409 409 };
410 410 };
411 411 "msgpack-python" = super.buildPythonPackage {
412 412 name = "msgpack-python-0.5.6";
413 413 doCheck = false;
414 414 src = fetchurl {
415 415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
416 416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
417 417 };
418 418 meta = {
419 419 license = [ pkgs.lib.licenses.asl20 ];
420 420 };
421 421 };
422 422 "packaging" = super.buildPythonPackage {
423 423 name = "packaging-20.3";
424 424 doCheck = false;
425 425 propagatedBuildInputs = [
426 426 self."pyparsing"
427 427 self."six"
428 428 ];
429 429 src = fetchurl {
430 430 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
431 431 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
432 432 };
433 433 meta = {
434 434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
435 435 };
436 436 };
437 437 "pastedeploy" = super.buildPythonPackage {
438 438 name = "pastedeploy-2.1.0";
439 439 doCheck = false;
440 440 src = fetchurl {
441 441 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
442 442 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
443 443 };
444 444 meta = {
445 445 license = [ pkgs.lib.licenses.mit ];
446 446 };
447 447 };
448 448 "pathlib2" = super.buildPythonPackage {
449 449 name = "pathlib2-2.3.5";
450 450 doCheck = false;
451 451 propagatedBuildInputs = [
452 452 self."six"
453 453 self."scandir"
454 454 ];
455 455 src = fetchurl {
456 456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
457 457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
458 458 };
459 459 meta = {
460 460 license = [ pkgs.lib.licenses.mit ];
461 461 };
462 462 };
463 463 "pexpect" = super.buildPythonPackage {
464 464 name = "pexpect-4.8.0";
465 465 doCheck = false;
466 466 propagatedBuildInputs = [
467 467 self."ptyprocess"
468 468 ];
469 469 src = fetchurl {
470 470 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
471 471 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
472 472 };
473 473 meta = {
474 474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
475 475 };
476 476 };
477 477 "pickleshare" = super.buildPythonPackage {
478 478 name = "pickleshare-0.7.5";
479 479 doCheck = false;
480 480 propagatedBuildInputs = [
481 481 self."pathlib2"
482 482 ];
483 483 src = fetchurl {
484 484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
485 485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
486 486 };
487 487 meta = {
488 488 license = [ pkgs.lib.licenses.mit ];
489 489 };
490 490 };
491 491 "plaster" = super.buildPythonPackage {
492 492 name = "plaster-1.0";
493 493 doCheck = false;
494 494 propagatedBuildInputs = [
495 495 self."setuptools"
496 496 ];
497 497 src = fetchurl {
498 498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
499 499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
500 500 };
501 501 meta = {
502 502 license = [ pkgs.lib.licenses.mit ];
503 503 };
504 504 };
505 505 "plaster-pastedeploy" = super.buildPythonPackage {
506 506 name = "plaster-pastedeploy-0.7";
507 507 doCheck = false;
508 508 propagatedBuildInputs = [
509 509 self."pastedeploy"
510 510 self."plaster"
511 511 ];
512 512 src = fetchurl {
513 513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
514 514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
515 515 };
516 516 meta = {
517 517 license = [ pkgs.lib.licenses.mit ];
518 518 };
519 519 };
520 520 "pluggy" = super.buildPythonPackage {
521 521 name = "pluggy-0.13.1";
522 522 doCheck = false;
523 523 propagatedBuildInputs = [
524 524 self."importlib-metadata"
525 525 ];
526 526 src = fetchurl {
527 527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
528 528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
529 529 };
530 530 meta = {
531 531 license = [ pkgs.lib.licenses.mit ];
532 532 };
533 533 };
534 534 "prompt-toolkit" = super.buildPythonPackage {
535 535 name = "prompt-toolkit-1.0.18";
536 536 doCheck = false;
537 537 propagatedBuildInputs = [
538 538 self."six"
539 539 self."wcwidth"
540 540 ];
541 541 src = fetchurl {
542 542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
543 543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
544 544 };
545 545 meta = {
546 546 license = [ pkgs.lib.licenses.bsdOriginal ];
547 547 };
548 548 };
549 549 "psutil" = super.buildPythonPackage {
550 550 name = "psutil-5.7.0";
551 551 doCheck = false;
552 552 src = fetchurl {
553 553 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
554 554 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
555 555 };
556 556 meta = {
557 557 license = [ pkgs.lib.licenses.bsdOriginal ];
558 558 };
559 559 };
560 560 "ptyprocess" = super.buildPythonPackage {
561 561 name = "ptyprocess-0.6.0";
562 562 doCheck = false;
563 563 src = fetchurl {
564 564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
565 565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
566 566 };
567 567 meta = {
568 568 license = [ ];
569 569 };
570 570 };
571 571 "py" = super.buildPythonPackage {
572 572 name = "py-1.8.0";
573 573 doCheck = false;
574 574 src = fetchurl {
575 575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
576 576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
577 577 };
578 578 meta = {
579 579 license = [ pkgs.lib.licenses.mit ];
580 580 };
581 581 };
582 582 "pycparser" = super.buildPythonPackage {
583 583 name = "pycparser-2.20";
584 584 doCheck = false;
585 585 src = fetchurl {
586 586 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
587 587 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
588 588 };
589 589 meta = {
590 590 license = [ pkgs.lib.licenses.bsdOriginal ];
591 591 };
592 592 };
593 593 "pygit2" = super.buildPythonPackage {
594 594 name = "pygit2-0.28.2";
595 595 doCheck = false;
596 596 propagatedBuildInputs = [
597 597 self."cffi"
598 598 self."six"
599 599 ];
600 600 src = fetchurl {
601 601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
602 602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
603 603 };
604 604 meta = {
605 605 license = [ { fullName = "GPLv2 with linking exception"; } ];
606 606 };
607 607 };
608 608 "pygments" = super.buildPythonPackage {
609 609 name = "pygments-2.4.2";
610 610 doCheck = false;
611 611 src = fetchurl {
612 612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
613 613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
614 614 };
615 615 meta = {
616 616 license = [ pkgs.lib.licenses.bsdOriginal ];
617 617 };
618 618 };
619 619 "pyparsing" = super.buildPythonPackage {
620 620 name = "pyparsing-2.4.7";
621 621 doCheck = false;
622 622 src = fetchurl {
623 623 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
624 624 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
625 625 };
626 626 meta = {
627 627 license = [ pkgs.lib.licenses.mit ];
628 628 };
629 629 };
630 630 "pyramid" = super.buildPythonPackage {
631 631 name = "pyramid-1.10.4";
632 632 doCheck = false;
633 633 propagatedBuildInputs = [
634 634 self."hupper"
635 635 self."plaster"
636 636 self."plaster-pastedeploy"
637 637 self."setuptools"
638 638 self."translationstring"
639 639 self."venusian"
640 640 self."webob"
641 641 self."zope.deprecation"
642 642 self."zope.interface"
643 643 self."repoze.lru"
644 644 ];
645 645 src = fetchurl {
646 646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
647 647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
648 648 };
649 649 meta = {
650 650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
651 651 };
652 652 };
653 653 "pyramid-mako" = super.buildPythonPackage {
654 654 name = "pyramid-mako-1.1.0";
655 655 doCheck = false;
656 656 propagatedBuildInputs = [
657 657 self."pyramid"
658 658 self."mako"
659 659 ];
660 660 src = fetchurl {
661 661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
662 662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
663 663 };
664 664 meta = {
665 665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
666 666 };
667 667 };
668 668 "pytest" = super.buildPythonPackage {
669 669 name = "pytest-4.6.5";
670 670 doCheck = false;
671 671 propagatedBuildInputs = [
672 672 self."py"
673 673 self."six"
674 674 self."packaging"
675 675 self."attrs"
676 676 self."atomicwrites"
677 677 self."pluggy"
678 678 self."importlib-metadata"
679 679 self."wcwidth"
680 680 self."funcsigs"
681 681 self."pathlib2"
682 682 self."more-itertools"
683 683 ];
684 684 src = fetchurl {
685 685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
686 686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
687 687 };
688 688 meta = {
689 689 license = [ pkgs.lib.licenses.mit ];
690 690 };
691 691 };
692 692 "pytest-cov" = super.buildPythonPackage {
693 693 name = "pytest-cov-2.7.1";
694 694 doCheck = false;
695 695 propagatedBuildInputs = [
696 696 self."pytest"
697 697 self."coverage"
698 698 ];
699 699 src = fetchurl {
700 700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
701 701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
702 702 };
703 703 meta = {
704 704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
705 705 };
706 706 };
707 707 "pytest-profiling" = super.buildPythonPackage {
708 708 name = "pytest-profiling-1.7.0";
709 709 doCheck = false;
710 710 propagatedBuildInputs = [
711 711 self."six"
712 712 self."pytest"
713 713 self."gprof2dot"
714 714 ];
715 715 src = fetchurl {
716 716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
717 717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
718 718 };
719 719 meta = {
720 720 license = [ pkgs.lib.licenses.mit ];
721 721 };
722 722 };
723 723 "pytest-runner" = super.buildPythonPackage {
724 724 name = "pytest-runner-5.1";
725 725 doCheck = false;
726 726 src = fetchurl {
727 727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
728 728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
729 729 };
730 730 meta = {
731 731 license = [ pkgs.lib.licenses.mit ];
732 732 };
733 733 };
734 734 "pytest-sugar" = super.buildPythonPackage {
735 735 name = "pytest-sugar-0.9.2";
736 736 doCheck = false;
737 737 propagatedBuildInputs = [
738 738 self."pytest"
739 739 self."termcolor"
740 740 self."packaging"
741 741 ];
742 742 src = fetchurl {
743 743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
744 744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
745 745 };
746 746 meta = {
747 747 license = [ pkgs.lib.licenses.bsdOriginal ];
748 748 };
749 749 };
750 750 "pytest-timeout" = super.buildPythonPackage {
751 751 name = "pytest-timeout-1.3.3";
752 752 doCheck = false;
753 753 propagatedBuildInputs = [
754 754 self."pytest"
755 755 ];
756 756 src = fetchurl {
757 757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
758 758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
759 759 };
760 760 meta = {
761 761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
762 762 };
763 763 };
764 764 "redis" = super.buildPythonPackage {
765 765 name = "redis-3.5.3";
766 766 doCheck = false;
767 767 src = fetchurl {
768 768 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
769 769 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
770 770 };
771 771 meta = {
772 772 license = [ pkgs.lib.licenses.mit ];
773 773 };
774 774 };
775 775 "repoze.lru" = super.buildPythonPackage {
776 776 name = "repoze.lru-0.7";
777 777 doCheck = false;
778 778 src = fetchurl {
779 779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
780 780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
781 781 };
782 782 meta = {
783 783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
784 784 };
785 785 };
786 786 "rhodecode-vcsserver" = super.buildPythonPackage {
787 name = "rhodecode-vcsserver-4.26.0";
787 name = "rhodecode-vcsserver-4.27.0";
788 788 buildInputs = [
789 789 self."pytest"
790 790 self."py"
791 791 self."pytest-cov"
792 792 self."pytest-sugar"
793 793 self."pytest-runner"
794 794 self."pytest-profiling"
795 795 self."pytest-timeout"
796 796 self."gprof2dot"
797 797 self."mock"
798 798 self."cov-core"
799 799 self."coverage"
800 800 self."webtest"
801 801 self."beautifulsoup4"
802 802 self."configobj"
803 803 ];
804 804 doCheck = true;
805 805 propagatedBuildInputs = [
806 806 self."configobj"
807 807 self."dogpile.cache"
808 808 self."dogpile.core"
809 809 self."decorator"
810 810 self."dulwich"
811 811 self."hgsubversion"
812 812 self."hg-evolve"
813 813 self."mako"
814 814 self."markupsafe"
815 815 self."mercurial"
816 816 self."msgpack-python"
817 817 self."pastedeploy"
818 818 self."pyramid"
819 819 self."pyramid-mako"
820 820 self."pygit2"
821 821 self."repoze.lru"
822 822 self."redis"
823 823 self."simplejson"
824 824 self."subprocess32"
825 825 self."subvertpy"
826 826 self."six"
827 827 self."translationstring"
828 828 self."webob"
829 829 self."zope.deprecation"
830 830 self."zope.interface"
831 831 self."gevent"
832 832 self."greenlet"
833 833 self."gunicorn"
834 834 self."waitress"
835 835 self."ipdb"
836 836 self."ipython"
837 837 self."pytest"
838 838 self."py"
839 839 self."pytest-cov"
840 840 self."pytest-sugar"
841 841 self."pytest-runner"
842 842 self."pytest-profiling"
843 843 self."pytest-timeout"
844 844 self."gprof2dot"
845 845 self."mock"
846 846 self."cov-core"
847 847 self."coverage"
848 848 self."webtest"
849 849 self."beautifulsoup4"
850 850 ];
851 851 src = ./.;
852 852 meta = {
853 853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
854 854 };
855 855 };
856 856 "scandir" = super.buildPythonPackage {
857 857 name = "scandir-1.10.0";
858 858 doCheck = false;
859 859 src = fetchurl {
860 860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
861 861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
862 862 };
863 863 meta = {
864 864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
865 865 };
866 866 };
867 867 "setproctitle" = super.buildPythonPackage {
868 868 name = "setproctitle-1.1.10";
869 869 doCheck = false;
870 870 src = fetchurl {
871 871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
872 872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
873 873 };
874 874 meta = {
875 875 license = [ pkgs.lib.licenses.bsdOriginal ];
876 876 };
877 877 };
878 878 "setuptools" = super.buildPythonPackage {
879 879 name = "setuptools-44.1.0";
880 880 doCheck = false;
881 881 src = fetchurl {
882 882 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
883 883 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
884 884 };
885 885 meta = {
886 886 license = [ pkgs.lib.licenses.mit ];
887 887 };
888 888 };
889 889
890 890 "setuptools-scm" = super.buildPythonPackage {
891 891 name = "setuptools-scm-3.5.0";
892 892 doCheck = false;
893 893 src = fetchurl {
894 894 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
895 895 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
896 896 };
897 897 meta = {
898 898 license = [ pkgs.lib.licenses.psfl ];
899 899 };
900 900 };
901 901
902 902 "simplegeneric" = super.buildPythonPackage {
903 903 name = "simplegeneric-0.8.1";
904 904 doCheck = false;
905 905 src = fetchurl {
906 906 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
907 907 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
908 908 };
909 909 meta = {
910 910 license = [ pkgs.lib.licenses.zpl21 ];
911 911 };
912 912 };
913 913 "simplejson" = super.buildPythonPackage {
914 914 name = "simplejson-3.16.0";
915 915 doCheck = false;
916 916 src = fetchurl {
917 917 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
918 918 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
919 919 };
920 920 meta = {
921 921 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
922 922 };
923 923 };
924 924 "six" = super.buildPythonPackage {
925 925 name = "six-1.11.0";
926 926 doCheck = false;
927 927 src = fetchurl {
928 928 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
929 929 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
930 930 };
931 931 meta = {
932 932 license = [ pkgs.lib.licenses.mit ];
933 933 };
934 934 };
935 935 "subprocess32" = super.buildPythonPackage {
936 936 name = "subprocess32-3.5.4";
937 937 doCheck = false;
938 938 src = fetchurl {
939 939 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
940 940 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
941 941 };
942 942 meta = {
943 943 license = [ pkgs.lib.licenses.psfl ];
944 944 };
945 945 };
946 946 "subvertpy" = super.buildPythonPackage {
947 947 name = "subvertpy-0.10.1";
948 948 doCheck = false;
949 949 src = fetchurl {
950 950 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
951 951 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
952 952 };
953 953 meta = {
954 954 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
955 955 };
956 956 };
957 957 "termcolor" = super.buildPythonPackage {
958 958 name = "termcolor-1.1.0";
959 959 doCheck = false;
960 960 src = fetchurl {
961 961 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
962 962 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
963 963 };
964 964 meta = {
965 965 license = [ pkgs.lib.licenses.mit ];
966 966 };
967 967 };
968 968 "traitlets" = super.buildPythonPackage {
969 969 name = "traitlets-4.3.3";
970 970 doCheck = false;
971 971 propagatedBuildInputs = [
972 972 self."ipython-genutils"
973 973 self."six"
974 974 self."decorator"
975 975 self."enum34"
976 976 ];
977 977 src = fetchurl {
978 978 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
979 979 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
980 980 };
981 981 meta = {
982 982 license = [ pkgs.lib.licenses.bsdOriginal ];
983 983 };
984 984 };
985 985 "translationstring" = super.buildPythonPackage {
986 986 name = "translationstring-1.3";
987 987 doCheck = false;
988 988 src = fetchurl {
989 989 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
990 990 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
991 991 };
992 992 meta = {
993 993 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
994 994 };
995 995 };
996 996 "venusian" = super.buildPythonPackage {
997 997 name = "venusian-1.2.0";
998 998 doCheck = false;
999 999 src = fetchurl {
1000 1000 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
1001 1001 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
1002 1002 };
1003 1003 meta = {
1004 1004 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1005 1005 };
1006 1006 };
1007 1007 "waitress" = super.buildPythonPackage {
1008 1008 name = "waitress-1.3.1";
1009 1009 doCheck = false;
1010 1010 src = fetchurl {
1011 1011 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
1012 1012 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
1013 1013 };
1014 1014 meta = {
1015 1015 license = [ pkgs.lib.licenses.zpl21 ];
1016 1016 };
1017 1017 };
1018 1018 "wcwidth" = super.buildPythonPackage {
1019 1019 name = "wcwidth-0.1.9";
1020 1020 doCheck = false;
1021 1021 src = fetchurl {
1022 1022 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
1023 1023 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
1024 1024 };
1025 1025 meta = {
1026 1026 license = [ pkgs.lib.licenses.mit ];
1027 1027 };
1028 1028 };
1029 1029 "webob" = super.buildPythonPackage {
1030 1030 name = "webob-1.8.5";
1031 1031 doCheck = false;
1032 1032 src = fetchurl {
1033 1033 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
1034 1034 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
1035 1035 };
1036 1036 meta = {
1037 1037 license = [ pkgs.lib.licenses.mit ];
1038 1038 };
1039 1039 };
1040 1040 "webtest" = super.buildPythonPackage {
1041 1041 name = "webtest-2.0.34";
1042 1042 doCheck = false;
1043 1043 propagatedBuildInputs = [
1044 1044 self."six"
1045 1045 self."webob"
1046 1046 self."waitress"
1047 1047 self."beautifulsoup4"
1048 1048 ];
1049 1049 src = fetchurl {
1050 1050 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
1051 1051 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
1052 1052 };
1053 1053 meta = {
1054 1054 license = [ pkgs.lib.licenses.mit ];
1055 1055 };
1056 1056 };
1057 1057 "zipp" = super.buildPythonPackage {
1058 1058 name = "zipp-1.2.0";
1059 1059 doCheck = false;
1060 1060 propagatedBuildInputs = [
1061 1061 self."contextlib2"
1062 1062 ];
1063 1063 src = fetchurl {
1064 1064 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
1065 1065 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
1066 1066 };
1067 1067 meta = {
1068 1068 license = [ pkgs.lib.licenses.mit ];
1069 1069 };
1070 1070 };
1071 1071 "zope.deprecation" = super.buildPythonPackage {
1072 1072 name = "zope.deprecation-4.4.0";
1073 1073 doCheck = false;
1074 1074 propagatedBuildInputs = [
1075 1075 self."setuptools"
1076 1076 ];
1077 1077 src = fetchurl {
1078 1078 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
1079 1079 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
1080 1080 };
1081 1081 meta = {
1082 1082 license = [ pkgs.lib.licenses.zpl21 ];
1083 1083 };
1084 1084 };
1085 1085 "zope.interface" = super.buildPythonPackage {
1086 1086 name = "zope.interface-4.6.0";
1087 1087 doCheck = false;
1088 1088 propagatedBuildInputs = [
1089 1089 self."setuptools"
1090 1090 ];
1091 1091 src = fetchurl {
1092 1092 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
1093 1093 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
1094 1094 };
1095 1095 meta = {
1096 1096 license = [ pkgs.lib.licenses.zpl21 ];
1097 1097 };
1098 1098 };
1099 1099
1100 1100 ### Test requirements
1101 1101
1102 1102
1103 1103 }
@@ -1,1 +1,1 b''
1 4.26.0 No newline at end of file
1 4.27.0 No newline at end of file
@@ -1,130 +1,130 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import os
18 18 import sys
19 19 import traceback
20 20 import logging
21 21 import urlparse
22 22
23 23 from vcsserver import exceptions
24 24 from vcsserver.exceptions import NoContentException
25 25 from vcsserver.hgcompat import (archival)
26 from vcsserver.lib.rc_cache import region_meta
26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 30 class RepoFactory(object):
31 31 """
32 32 Utility to create instances of repository
33 33
34 34 It provides internal caching of the `repo` object based on
35 35 the :term:`call context`.
36 36 """
37 37 repo_type = None
38 38
39 39 def __init__(self):
40 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
40 pass
41 41
42 42 def _create_config(self, path, config):
43 43 config = {}
44 44 return config
45 45
46 46 def _create_repo(self, wire, create):
47 47 raise NotImplementedError()
48 48
49 49 def repo(self, wire, create=False):
50 50 raise NotImplementedError()
51 51
52 52
53 53 def obfuscate_qs(query_string):
54 54 if query_string is None:
55 55 return None
56 56
57 57 parsed = []
58 58 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
59 59 if k in ['auth_token', 'api_key']:
60 60 v = "*****"
61 61 parsed.append((k, v))
62 62
63 63 return '&'.join('{}{}'.format(
64 64 k, '={}'.format(v) if v else '') for k, v in parsed)
65 65
66 66
67 67 def raise_from_original(new_type):
68 68 """
69 69 Raise a new exception type with original args and traceback.
70 70 """
71 71 exc_type, exc_value, exc_traceback = sys.exc_info()
72 72 new_exc = new_type(*exc_value.args)
73 73 # store the original traceback into the new exc
74 74 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
75 75
76 76 try:
77 77 raise new_exc, None, exc_traceback
78 78 finally:
79 79 del exc_traceback
80 80
81 81
82 82 class ArchiveNode(object):
83 83 def __init__(self, path, mode, is_link, raw_bytes):
84 84 self.path = path
85 85 self.mode = mode
86 86 self.is_link = is_link
87 87 self.raw_bytes = raw_bytes
88 88
89 89
90 90 def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
91 91 archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
92 92 """
93 93 walker should be a file walker, for example:
94 94 def walker():
95 95 for file_info in files:
96 96 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
97 97 """
98 98 extra_metadata = extra_metadata or {}
99 99
100 100 if kind == "tgz":
101 101 archiver = archival.tarit(archive_dest_path, mtime, "gz")
102 102 elif kind == "tbz2":
103 103 archiver = archival.tarit(archive_dest_path, mtime, "bz2")
104 104 elif kind == 'zip':
105 105 archiver = archival.zipit(archive_dest_path, mtime)
106 106 else:
107 107 raise exceptions.ArchiveException()(
108 108 'Remote does not support: "%s" archive type.' % kind)
109 109
110 110 for f in walker(commit_id, archive_at_path):
111 111 f_path = os.path.join(archive_dir_name, f.path.lstrip('/'))
112 112 try:
113 113 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
114 114 except NoContentException:
115 115 # NOTE(marcink): this is a special case for SVN so we can create "empty"
116 116 # directories which arent supported by archiver
117 117 archiver.addfile(os.path.join(f_path, '.dir'), f.mode, f.is_link, '')
118 118
119 119 if write_metadata:
120 120 metadata = dict([
121 121 ('commit_id', commit_id),
122 122 ('mtime', mtime),
123 123 ])
124 124 metadata.update(extra_metadata)
125 125
126 126 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata.items()]
127 127 f_path = os.path.join(archive_dir_name, '.archival.txt')
128 128 archiver.addfile(f_path, 0o644, False, '\n'.join(meta))
129 129
130 130 return archiver.done()
@@ -1,1226 +1,1281 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import collections
19 19 import logging
20 20 import os
21 21 import posixpath as vcspath
22 22 import re
23 23 import stat
24 24 import traceback
25 25 import urllib
26 26 import urllib2
27 27 from functools import wraps
28 28
29 29 import more_itertools
30 30 import pygit2
31 31 from pygit2 import Repository as LibGit2Repo
32 32 from pygit2 import index as LibGit2Index
33 33 from dulwich import index, objects
34 34 from dulwich.client import HttpGitClient, LocalGitClient
35 35 from dulwich.errors import (
36 36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 37 MissingCommitError, ObjectMissing, HangupException,
38 38 UnexpectedCommandError)
39 39 from dulwich.repo import Repo as DulwichRepo
40 40 from dulwich.server import update_server_info
41 41
42 42 from vcsserver import exceptions, settings, subprocessio
43 43 from vcsserver.utils import safe_str, safe_int, safe_unicode
44 44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 45 from vcsserver.hgcompat import (
46 46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 47 from vcsserver.git_lfs.lib import LFSOidStore
48 48 from vcsserver.vcs_base import RemoteBase
49 49
50 50 DIR_STAT = stat.S_IFDIR
51 51 FILE_MODE = stat.S_IFMT
52 52 GIT_LINK = objects.S_IFGITLINK
53 53 PEELED_REF_MARKER = '^{}'
54 54
55 55
56 56 log = logging.getLogger(__name__)
57 57
58 58
59 59 def str_to_dulwich(value):
60 60 """
61 61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
62 62 """
63 63 return value.decode(settings.WIRE_ENCODING)
64 64
65 65
66 66 def reraise_safe_exceptions(func):
67 67 """Converts Dulwich exceptions to something neutral."""
68 68
69 69 @wraps(func)
70 70 def wrapper(*args, **kwargs):
71 71 try:
72 72 return func(*args, **kwargs)
73 73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
74 74 exc = exceptions.LookupException(org_exc=e)
75 75 raise exc(safe_str(e))
76 76 except (HangupException, UnexpectedCommandError) as e:
77 77 exc = exceptions.VcsException(org_exc=e)
78 78 raise exc(safe_str(e))
79 79 except Exception as e:
80 80 # NOTE(marcink): becuase of how dulwich handles some exceptions
81 81 # (KeyError on empty repos), we cannot track this and catch all
82 82 # exceptions, it's an exceptions from other handlers
83 83 #if not hasattr(e, '_vcs_kind'):
84 84 #log.exception("Unhandled exception in git remote call")
85 85 #raise_from_original(exceptions.UnhandledException)
86 86 raise
87 87 return wrapper
88 88
89 89
90 90 class Repo(DulwichRepo):
91 91 """
92 92 A wrapper for dulwich Repo class.
93 93
94 94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
95 95 "Too many open files" error. We need to close all opened file descriptors
96 96 once the repo object is destroyed.
97 97 """
98 98 def __del__(self):
99 99 if hasattr(self, 'object_store'):
100 100 self.close()
101 101
102 102
103 103 class Repository(LibGit2Repo):
104 104
105 105 def __enter__(self):
106 106 return self
107 107
108 108 def __exit__(self, exc_type, exc_val, exc_tb):
109 109 self.free()
110 110
111 111
112 112 class GitFactory(RepoFactory):
113 113 repo_type = 'git'
114 114
115 115 def _create_repo(self, wire, create, use_libgit2=False):
116 116 if use_libgit2:
117 117 return Repository(wire['path'])
118 118 else:
119 119 repo_path = str_to_dulwich(wire['path'])
120 120 return Repo(repo_path)
121 121
122 122 def repo(self, wire, create=False, use_libgit2=False):
123 123 """
124 124 Get a repository instance for the given path.
125 125 """
126 126 return self._create_repo(wire, create, use_libgit2)
127 127
128 128 def repo_libgit2(self, wire):
129 129 return self.repo(wire, use_libgit2=True)
130 130
131 131
132 132 class GitRemote(RemoteBase):
133 133
134 134 def __init__(self, factory):
135 135 self._factory = factory
136 136 self._bulk_methods = {
137 137 "date": self.date,
138 138 "author": self.author,
139 139 "branch": self.branch,
140 140 "message": self.message,
141 141 "parents": self.parents,
142 142 "_commit": self.revision,
143 143 }
144 144
145 145 def _wire_to_config(self, wire):
146 146 if 'config' in wire:
147 147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
148 148 return {}
149 149
150 150 def _remote_conf(self, config):
151 151 params = [
152 152 '-c', 'core.askpass=""',
153 153 ]
154 154 ssl_cert_dir = config.get('vcs_ssl_dir')
155 155 if ssl_cert_dir:
156 156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
157 157 return params
158 158
159 159 @reraise_safe_exceptions
160 160 def discover_git_version(self):
161 161 stdout, _ = self.run_git_command(
162 162 {}, ['--version'], _bare=True, _safe=True)
163 163 prefix = 'git version'
164 164 if stdout.startswith(prefix):
165 165 stdout = stdout[len(prefix):]
166 166 return stdout.strip()
167 167
168 168 @reraise_safe_exceptions
169 169 def is_empty(self, wire):
170 170 repo_init = self._factory.repo_libgit2(wire)
171 171 with repo_init as repo:
172 172
173 173 try:
174 174 has_head = repo.head.name
175 175 if has_head:
176 176 return False
177 177
178 178 # NOTE(marcink): check again using more expensive method
179 179 return repo.is_empty
180 180 except Exception:
181 181 pass
182 182
183 183 return True
184 184
185 185 @reraise_safe_exceptions
186 186 def assert_correct_path(self, wire):
187 187 cache_on, context_uid, repo_id = self._cache_on(wire)
188 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 region = self._region(wire)
189 @region.conditional_cache_on_arguments(condition=cache_on)
189 190 def _assert_correct_path(_context_uid, _repo_id):
190 191 try:
191 192 repo_init = self._factory.repo_libgit2(wire)
192 193 with repo_init as repo:
193 194 pass
194 195 except pygit2.GitError:
195 196 path = wire.get('path')
196 197 tb = traceback.format_exc()
197 198 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
198 199 return False
199 200
200 201 return True
201 202 return _assert_correct_path(context_uid, repo_id)
202 203
203 204 @reraise_safe_exceptions
204 205 def bare(self, wire):
205 206 repo_init = self._factory.repo_libgit2(wire)
206 207 with repo_init as repo:
207 208 return repo.is_bare
208 209
209 210 @reraise_safe_exceptions
210 211 def blob_as_pretty_string(self, wire, sha):
211 212 repo_init = self._factory.repo_libgit2(wire)
212 213 with repo_init as repo:
213 214 blob_obj = repo[sha]
214 215 blob = blob_obj.data
215 216 return blob
216 217
217 218 @reraise_safe_exceptions
218 219 def blob_raw_length(self, wire, sha):
219 220 cache_on, context_uid, repo_id = self._cache_on(wire)
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
221 region = self._region(wire)
222 @region.conditional_cache_on_arguments(condition=cache_on)
221 223 def _blob_raw_length(_repo_id, _sha):
222 224
223 225 repo_init = self._factory.repo_libgit2(wire)
224 226 with repo_init as repo:
225 227 blob = repo[sha]
226 228 return blob.size
227 229
228 230 return _blob_raw_length(repo_id, sha)
229 231
230 232 def _parse_lfs_pointer(self, raw_content):
231 233
232 234 spec_string = 'version https://git-lfs.github.com/spec'
233 235 if raw_content and raw_content.startswith(spec_string):
234 236 pattern = re.compile(r"""
235 237 (?:\n)?
236 238 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
237 239 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
238 240 ^size[ ](?P<oid_size>[0-9]+)\n
239 241 (?:\n)?
240 242 """, re.VERBOSE | re.MULTILINE)
241 243 match = pattern.match(raw_content)
242 244 if match:
243 245 return match.groupdict()
244 246
245 247 return {}
246 248
247 249 @reraise_safe_exceptions
248 250 def is_large_file(self, wire, commit_id):
249 251 cache_on, context_uid, repo_id = self._cache_on(wire)
250 252
251 @self.region.conditional_cache_on_arguments(condition=cache_on)
253 region = self._region(wire)
254 @region.conditional_cache_on_arguments(condition=cache_on)
252 255 def _is_large_file(_repo_id, _sha):
253 256 repo_init = self._factory.repo_libgit2(wire)
254 257 with repo_init as repo:
255 258 blob = repo[commit_id]
256 259 if blob.is_binary:
257 260 return {}
258 261
259 262 return self._parse_lfs_pointer(blob.data)
260 263
261 264 return _is_large_file(repo_id, commit_id)
262 265
263 266 @reraise_safe_exceptions
264 267 def is_binary(self, wire, tree_id):
265 268 cache_on, context_uid, repo_id = self._cache_on(wire)
266 269
267 @self.region.conditional_cache_on_arguments(condition=cache_on)
270 region = self._region(wire)
271 @region.conditional_cache_on_arguments(condition=cache_on)
268 272 def _is_binary(_repo_id, _tree_id):
269 273 repo_init = self._factory.repo_libgit2(wire)
270 274 with repo_init as repo:
271 275 blob_obj = repo[tree_id]
272 276 return blob_obj.is_binary
273 277
274 278 return _is_binary(repo_id, tree_id)
275 279
276 280 @reraise_safe_exceptions
277 281 def in_largefiles_store(self, wire, oid):
278 282 conf = self._wire_to_config(wire)
279 283 repo_init = self._factory.repo_libgit2(wire)
280 284 with repo_init as repo:
281 285 repo_name = repo.path
282 286
283 287 store_location = conf.get('vcs_git_lfs_store_location')
284 288 if store_location:
285 289
286 290 store = LFSOidStore(
287 291 oid=oid, repo=repo_name, store_location=store_location)
288 292 return store.has_oid()
289 293
290 294 return False
291 295
292 296 @reraise_safe_exceptions
293 297 def store_path(self, wire, oid):
294 298 conf = self._wire_to_config(wire)
295 299 repo_init = self._factory.repo_libgit2(wire)
296 300 with repo_init as repo:
297 301 repo_name = repo.path
298 302
299 303 store_location = conf.get('vcs_git_lfs_store_location')
300 304 if store_location:
301 305 store = LFSOidStore(
302 306 oid=oid, repo=repo_name, store_location=store_location)
303 307 return store.oid_path
304 308 raise ValueError('Unable to fetch oid with path {}'.format(oid))
305 309
306 310 @reraise_safe_exceptions
307 311 def bulk_request(self, wire, rev, pre_load):
308 312 cache_on, context_uid, repo_id = self._cache_on(wire)
309 @self.region.conditional_cache_on_arguments(condition=cache_on)
313 region = self._region(wire)
314 @region.conditional_cache_on_arguments(condition=cache_on)
310 315 def _bulk_request(_repo_id, _rev, _pre_load):
311 316 result = {}
312 317 for attr in pre_load:
313 318 try:
314 319 method = self._bulk_methods[attr]
315 320 args = [wire, rev]
316 321 result[attr] = method(*args)
317 322 except KeyError as e:
318 323 raise exceptions.VcsException(e)(
319 324 "Unknown bulk attribute: %s" % attr)
320 325 return result
321 326
322 327 return _bulk_request(repo_id, rev, sorted(pre_load))
323 328
324 329 def _build_opener(self, url):
325 330 handlers = []
326 331 url_obj = url_parser(url)
327 332 _, authinfo = url_obj.authinfo()
328 333
329 334 if authinfo:
330 335 # create a password manager
331 336 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
332 337 passmgr.add_password(*authinfo)
333 338
334 339 handlers.extend((httpbasicauthhandler(passmgr),
335 340 httpdigestauthhandler(passmgr)))
336 341
337 342 return urllib2.build_opener(*handlers)
338 343
339 344 def _type_id_to_name(self, type_id):
340 345 return {
341 346 1: b'commit',
342 347 2: b'tree',
343 348 3: b'blob',
344 349 4: b'tag'
345 350 }[type_id]
346 351
347 352 @reraise_safe_exceptions
348 353 def check_url(self, url, config):
349 354 url_obj = url_parser(url)
350 355 test_uri, _ = url_obj.authinfo()
351 356 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
352 357 url_obj.query = obfuscate_qs(url_obj.query)
353 358 cleaned_uri = str(url_obj)
354 359 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
355 360
356 361 if not test_uri.endswith('info/refs'):
357 362 test_uri = test_uri.rstrip('/') + '/info/refs'
358 363
359 364 o = self._build_opener(url)
360 365 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
361 366
362 367 q = {"service": 'git-upload-pack'}
363 368 qs = '?%s' % urllib.urlencode(q)
364 369 cu = "%s%s" % (test_uri, qs)
365 370 req = urllib2.Request(cu, None, {})
366 371
367 372 try:
368 373 log.debug("Trying to open URL %s", cleaned_uri)
369 374 resp = o.open(req)
370 375 if resp.code != 200:
371 376 raise exceptions.URLError()('Return Code is not 200')
372 377 except Exception as e:
373 378 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 379 # means it cannot be cloned
375 380 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
376 381
377 382 # now detect if it's proper git repo
378 383 gitdata = resp.read()
379 384 if 'service=git-upload-pack' in gitdata:
380 385 pass
381 386 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
382 387 # old style git can return some other format !
383 388 pass
384 389 else:
385 390 raise exceptions.URLError()(
386 391 "url [%s] does not look like an git" % (cleaned_uri,))
387 392
388 393 return True
389 394
390 395 @reraise_safe_exceptions
391 396 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
392 397 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
393 398 remote_refs = self.pull(wire, url, apply_refs=False)
394 399 repo = self._factory.repo(wire)
395 400 if isinstance(valid_refs, list):
396 401 valid_refs = tuple(valid_refs)
397 402
398 403 for k in remote_refs:
399 404 # only parse heads/tags and skip so called deferred tags
400 405 if k.startswith(valid_refs) and not k.endswith(deferred):
401 406 repo[k] = remote_refs[k]
402 407
403 408 if update_after_clone:
404 409 # we want to checkout HEAD
405 410 repo["HEAD"] = remote_refs["HEAD"]
406 411 index.build_index_from_tree(repo.path, repo.index_path(),
407 412 repo.object_store, repo["HEAD"].tree)
408 413
409 414 @reraise_safe_exceptions
410 415 def branch(self, wire, commit_id):
411 416 cache_on, context_uid, repo_id = self._cache_on(wire)
412 @self.region.conditional_cache_on_arguments(condition=cache_on)
417 region = self._region(wire)
418 @region.conditional_cache_on_arguments(condition=cache_on)
413 419 def _branch(_context_uid, _repo_id, _commit_id):
414 420 regex = re.compile('^refs/heads')
415 421
416 422 def filter_with(ref):
417 423 return regex.match(ref[0]) and ref[1] == _commit_id
418 424
419 425 branches = filter(filter_with, self.get_refs(wire).items())
420 426 return [x[0].split('refs/heads/')[-1] for x in branches]
421 427
422 428 return _branch(context_uid, repo_id, commit_id)
423 429
424 430 @reraise_safe_exceptions
425 431 def commit_branches(self, wire, commit_id):
426 432 cache_on, context_uid, repo_id = self._cache_on(wire)
427 @self.region.conditional_cache_on_arguments(condition=cache_on)
433 region = self._region(wire)
434 @region.conditional_cache_on_arguments(condition=cache_on)
428 435 def _commit_branches(_context_uid, _repo_id, _commit_id):
429 436 repo_init = self._factory.repo_libgit2(wire)
430 437 with repo_init as repo:
431 438 branches = [x for x in repo.branches.with_commit(_commit_id)]
432 439 return branches
433 440
434 441 return _commit_branches(context_uid, repo_id, commit_id)
435 442
436 443 @reraise_safe_exceptions
437 444 def add_object(self, wire, content):
438 445 repo_init = self._factory.repo_libgit2(wire)
439 446 with repo_init as repo:
440 447 blob = objects.Blob()
441 448 blob.set_raw_string(content)
442 449 repo.object_store.add_object(blob)
443 450 return blob.id
444 451
445 452 # TODO: this is quite complex, check if that can be simplified
446 453 @reraise_safe_exceptions
447 454 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
455 # Defines the root tree
456 class _Root(object):
457 def __repr__(self):
458 return 'ROOT TREE'
459 ROOT = _Root()
460
448 461 repo = self._factory.repo(wire)
449 462 object_store = repo.object_store
450 463
451 464 # Create tree and populates it with blobs
452 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
465
466 if commit_tree and repo[commit_tree]:
467 git_commit = repo[commit_data['parents'][0]]
468 commit_tree = repo[git_commit.tree] # root tree
469 else:
470 commit_tree = objects.Tree()
453 471
454 472 for node in updated:
455 473 # Compute subdirs if needed
456 474 dirpath, nodename = vcspath.split(node['path'])
457 475 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
458 476 parent = commit_tree
459 477 ancestors = [('', parent)]
460 478
461 479 # Tries to dig for the deepest existing tree
462 480 while dirnames:
463 481 curdir = dirnames.pop(0)
464 482 try:
465 483 dir_id = parent[curdir][1]
466 484 except KeyError:
467 485 # put curdir back into dirnames and stops
468 486 dirnames.insert(0, curdir)
469 487 break
470 488 else:
471 489 # If found, updates parent
472 490 parent = repo[dir_id]
473 491 ancestors.append((curdir, parent))
474 492 # Now parent is deepest existing tree and we need to create
475 493 # subtrees for dirnames (in reverse order)
476 494 # [this only applies for nodes from added]
477 495 new_trees = []
478 496
479 497 blob = objects.Blob.from_string(node['content'])
480 498
481 499 if dirnames:
482 500 # If there are trees which should be created we need to build
483 501 # them now (in reverse order)
484 502 reversed_dirnames = list(reversed(dirnames))
485 503 curtree = objects.Tree()
486 504 curtree[node['node_path']] = node['mode'], blob.id
487 505 new_trees.append(curtree)
488 506 for dirname in reversed_dirnames[:-1]:
489 507 newtree = objects.Tree()
490 508 newtree[dirname] = (DIR_STAT, curtree.id)
491 509 new_trees.append(newtree)
492 510 curtree = newtree
493 511 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
494 512 else:
495 513 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
496 514
497 515 new_trees.append(parent)
498 516 # Update ancestors
499 517 reversed_ancestors = reversed(
500 518 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
501 519 for parent, tree, path in reversed_ancestors:
502 520 parent[path] = (DIR_STAT, tree.id)
503 521 object_store.add_object(tree)
504 522
505 523 object_store.add_object(blob)
506 524 for tree in new_trees:
507 525 object_store.add_object(tree)
508 526
509 527 for node_path in removed:
510 528 paths = node_path.split('/')
511 tree = commit_tree
512 trees = [tree]
529 tree = commit_tree # start with top-level
530 trees = [{'tree': tree, 'path': ROOT}]
513 531 # Traverse deep into the forest...
532 # resolve final tree by iterating the path.
533 # e.g a/b/c.txt will get
534 # - root as tree then
535 # - 'a' as tree,
536 # - 'b' as tree,
537 # - stop at c as blob.
514 538 for path in paths:
515 539 try:
516 540 obj = repo[tree[path][1]]
517 541 if isinstance(obj, objects.Tree):
518 trees.append(obj)
542 trees.append({'tree': obj, 'path': path})
519 543 tree = obj
520 544 except KeyError:
521 545 break
546 #PROBLEM:
547 """
548 We're not editing same reference tree object
549 """
522 550 # Cut down the blob and all rotten trees on the way back...
523 for path, tree in reversed(zip(paths, trees)):
524 del tree[path]
525 if tree:
551 for path, tree_data in reversed(zip(paths, trees)):
552 tree = tree_data['tree']
553 tree.__delitem__(path)
554 # This operation edits the tree, we need to mark new commit back
555
556 if len(tree) > 0:
526 557 # This tree still has elements - don't remove it or any
527 558 # of it's parents
528 559 break
529 560
530 561 object_store.add_object(commit_tree)
531 562
532 563 # Create commit
533 564 commit = objects.Commit()
534 565 commit.tree = commit_tree.id
535 for k, v in commit_data.iteritems():
566 for k, v in commit_data.items():
536 567 setattr(commit, k, v)
537 568 object_store.add_object(commit)
538 569
539 570 self.create_branch(wire, branch, commit.id)
540 571
541 572 # dulwich set-ref
542 573 ref = 'refs/heads/%s' % branch
543 574 repo.refs[ref] = commit.id
544 575
545 576 return commit.id
546 577
547 578 @reraise_safe_exceptions
548 579 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
549 580 if url != 'default' and '://' not in url:
550 581 client = LocalGitClient(url)
551 582 else:
552 583 url_obj = url_parser(url)
553 584 o = self._build_opener(url)
554 585 url, _ = url_obj.authinfo()
555 586 client = HttpGitClient(base_url=url, opener=o)
556 587 repo = self._factory.repo(wire)
557 588
558 589 determine_wants = repo.object_store.determine_wants_all
559 590 if refs:
560 591 def determine_wants_requested(references):
561 592 return [references[r] for r in references if r in refs]
562 593 determine_wants = determine_wants_requested
563 594
564 595 try:
565 596 remote_refs = client.fetch(
566 597 path=url, target=repo, determine_wants=determine_wants)
567 598 except NotGitRepository as e:
568 599 log.warning(
569 600 'Trying to fetch from "%s" failed, not a Git repository.', url)
570 601 # Exception can contain unicode which we convert
571 602 raise exceptions.AbortException(e)(repr(e))
572 603
573 604 # mikhail: client.fetch() returns all the remote refs, but fetches only
574 605 # refs filtered by `determine_wants` function. We need to filter result
575 606 # as well
576 607 if refs:
577 608 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
578 609
579 610 if apply_refs:
580 611 # TODO: johbo: Needs proper test coverage with a git repository
581 612 # that contains a tag object, so that we would end up with
582 613 # a peeled ref at this point.
583 614 for k in remote_refs:
584 615 if k.endswith(PEELED_REF_MARKER):
585 616 log.debug("Skipping peeled reference %s", k)
586 617 continue
587 618 repo[k] = remote_refs[k]
588 619
589 620 if refs and not update_after:
590 621 # mikhail: explicitly set the head to the last ref.
591 repo['HEAD'] = remote_refs[refs[-1]]
622 repo["HEAD"] = remote_refs[refs[-1]]
592 623
593 624 if update_after:
594 625 # we want to checkout HEAD
595 626 repo["HEAD"] = remote_refs["HEAD"]
596 627 index.build_index_from_tree(repo.path, repo.index_path(),
597 628 repo.object_store, repo["HEAD"].tree)
598 629 return remote_refs
599 630
600 631 @reraise_safe_exceptions
601 632 def sync_fetch(self, wire, url, refs=None, all_refs=False):
602 633 repo = self._factory.repo(wire)
603 634 if refs and not isinstance(refs, (list, tuple)):
604 635 refs = [refs]
605 636
606 637 config = self._wire_to_config(wire)
607 638 # get all remote refs we'll use to fetch later
608 639 cmd = ['ls-remote']
609 640 if not all_refs:
610 641 cmd += ['--heads', '--tags']
611 642 cmd += [url]
612 643 output, __ = self.run_git_command(
613 644 wire, cmd, fail_on_stderr=False,
614 645 _copts=self._remote_conf(config),
615 646 extra_env={'GIT_TERMINAL_PROMPT': '0'})
616 647
617 648 remote_refs = collections.OrderedDict()
618 649 fetch_refs = []
619 650
620 651 for ref_line in output.splitlines():
621 652 sha, ref = ref_line.split('\t')
622 653 sha = sha.strip()
623 654 if ref in remote_refs:
624 655 # duplicate, skip
625 656 continue
626 657 if ref.endswith(PEELED_REF_MARKER):
627 658 log.debug("Skipping peeled reference %s", ref)
628 659 continue
629 660 # don't sync HEAD
630 661 if ref in ['HEAD']:
631 662 continue
632 663
633 664 remote_refs[ref] = sha
634 665
635 666 if refs and sha in refs:
636 667 # we filter fetch using our specified refs
637 668 fetch_refs.append('{}:{}'.format(ref, ref))
638 669 elif not refs:
639 670 fetch_refs.append('{}:{}'.format(ref, ref))
640 671 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
641 672
642 673 if fetch_refs:
643 674 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
644 675 fetch_refs_chunks = list(chunk)
645 676 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
646 677 _out, _err = self.run_git_command(
647 678 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
648 679 fail_on_stderr=False,
649 680 _copts=self._remote_conf(config),
650 681 extra_env={'GIT_TERMINAL_PROMPT': '0'})
651 682
652 683 return remote_refs
653 684
654 685 @reraise_safe_exceptions
655 686 def sync_push(self, wire, url, refs=None):
656 687 if not self.check_url(url, wire):
657 688 return
658 689 config = self._wire_to_config(wire)
659 690 self._factory.repo(wire)
660 691 self.run_git_command(
661 692 wire, ['push', url, '--mirror'], fail_on_stderr=False,
662 693 _copts=self._remote_conf(config),
663 694 extra_env={'GIT_TERMINAL_PROMPT': '0'})
664 695
665 696 @reraise_safe_exceptions
666 697 def get_remote_refs(self, wire, url):
667 698 repo = Repo(url)
668 699 return repo.get_refs()
669 700
670 701 @reraise_safe_exceptions
671 702 def get_description(self, wire):
672 703 repo = self._factory.repo(wire)
673 704 return repo.get_description()
674 705
675 706 @reraise_safe_exceptions
676 707 def get_missing_revs(self, wire, rev1, rev2, path2):
677 708 repo = self._factory.repo(wire)
678 709 LocalGitClient(thin_packs=False).fetch(path2, repo)
679 710
680 711 wire_remote = wire.copy()
681 712 wire_remote['path'] = path2
682 713 repo_remote = self._factory.repo(wire_remote)
683 714 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
684 715
685 716 revs = [
686 717 x.commit.id
687 718 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
688 719 return revs
689 720
690 721 @reraise_safe_exceptions
691 722 def get_object(self, wire, sha, maybe_unreachable=False):
692 723 cache_on, context_uid, repo_id = self._cache_on(wire)
693 @self.region.conditional_cache_on_arguments(condition=cache_on)
724 region = self._region(wire)
725 @region.conditional_cache_on_arguments(condition=cache_on)
694 726 def _get_object(_context_uid, _repo_id, _sha):
695 727 repo_init = self._factory.repo_libgit2(wire)
696 728 with repo_init as repo:
697 729
698 730 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
699 731 try:
700 732 commit = repo.revparse_single(sha)
701 733 except KeyError:
702 734 # NOTE(marcink): KeyError doesn't give us any meaningful information
703 735 # here, we instead give something more explicit
704 736 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
705 737 raise exceptions.LookupException(e)(missing_commit_err)
706 738 except ValueError as e:
707 739 raise exceptions.LookupException(e)(missing_commit_err)
708 740
709 741 is_tag = False
710 742 if isinstance(commit, pygit2.Tag):
711 743 commit = repo.get(commit.target)
712 744 is_tag = True
713 745
714 746 check_dangling = True
715 747 if is_tag:
716 748 check_dangling = False
717 749
718 750 if check_dangling and maybe_unreachable:
719 751 check_dangling = False
720 752
721 753 # we used a reference and it parsed means we're not having a dangling commit
722 754 if sha != commit.hex:
723 755 check_dangling = False
724 756
725 757 if check_dangling:
726 758 # check for dangling commit
727 759 for branch in repo.branches.with_commit(commit.hex):
728 760 if branch:
729 761 break
730 762 else:
731 763 # NOTE(marcink): Empty error doesn't give us any meaningful information
732 764 # here, we instead give something more explicit
733 765 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
734 766 raise exceptions.LookupException(e)(missing_commit_err)
735 767
736 768 commit_id = commit.hex
737 769 type_id = commit.type
738 770
739 771 return {
740 772 'id': commit_id,
741 773 'type': self._type_id_to_name(type_id),
742 774 'commit_id': commit_id,
743 775 'idx': 0
744 776 }
745 777
746 778 return _get_object(context_uid, repo_id, sha)
747 779
748 780 @reraise_safe_exceptions
749 781 def get_refs(self, wire):
750 782 cache_on, context_uid, repo_id = self._cache_on(wire)
751 @self.region.conditional_cache_on_arguments(condition=cache_on)
783 region = self._region(wire)
784 @region.conditional_cache_on_arguments(condition=cache_on)
752 785 def _get_refs(_context_uid, _repo_id):
753 786
754 787 repo_init = self._factory.repo_libgit2(wire)
755 788 with repo_init as repo:
756 789 regex = re.compile('^refs/(heads|tags)/')
757 790 return {x.name: x.target.hex for x in
758 791 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
759 792
760 793 return _get_refs(context_uid, repo_id)
761 794
762 795 @reraise_safe_exceptions
763 796 def get_branch_pointers(self, wire):
764 797 cache_on, context_uid, repo_id = self._cache_on(wire)
765 @self.region.conditional_cache_on_arguments(condition=cache_on)
798 region = self._region(wire)
799 @region.conditional_cache_on_arguments(condition=cache_on)
766 800 def _get_branch_pointers(_context_uid, _repo_id):
767 801
768 802 repo_init = self._factory.repo_libgit2(wire)
769 803 regex = re.compile('^refs/heads')
770 804 with repo_init as repo:
771 805 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
772 806 return {x.target.hex: x.shorthand for x in branches}
773 807
774 808 return _get_branch_pointers(context_uid, repo_id)
775 809
776 810 @reraise_safe_exceptions
777 811 def head(self, wire, show_exc=True):
778 812 cache_on, context_uid, repo_id = self._cache_on(wire)
779 @self.region.conditional_cache_on_arguments(condition=cache_on)
813 region = self._region(wire)
814 @region.conditional_cache_on_arguments(condition=cache_on)
780 815 def _head(_context_uid, _repo_id, _show_exc):
781 816 repo_init = self._factory.repo_libgit2(wire)
782 817 with repo_init as repo:
783 818 try:
784 819 return repo.head.peel().hex
785 820 except Exception:
786 821 if show_exc:
787 822 raise
788 823 return _head(context_uid, repo_id, show_exc)
789 824
790 825 @reraise_safe_exceptions
791 826 def init(self, wire):
792 827 repo_path = str_to_dulwich(wire['path'])
793 828 self.repo = Repo.init(repo_path)
794 829
795 830 @reraise_safe_exceptions
796 831 def init_bare(self, wire):
797 832 repo_path = str_to_dulwich(wire['path'])
798 833 self.repo = Repo.init_bare(repo_path)
799 834
800 835 @reraise_safe_exceptions
801 836 def revision(self, wire, rev):
802 837
803 838 cache_on, context_uid, repo_id = self._cache_on(wire)
804 @self.region.conditional_cache_on_arguments(condition=cache_on)
839 region = self._region(wire)
840 @region.conditional_cache_on_arguments(condition=cache_on)
805 841 def _revision(_context_uid, _repo_id, _rev):
806 842 repo_init = self._factory.repo_libgit2(wire)
807 843 with repo_init as repo:
808 844 commit = repo[rev]
809 845 obj_data = {
810 846 'id': commit.id.hex,
811 847 }
812 848 # tree objects itself don't have tree_id attribute
813 849 if hasattr(commit, 'tree_id'):
814 850 obj_data['tree'] = commit.tree_id.hex
815 851
816 852 return obj_data
817 853 return _revision(context_uid, repo_id, rev)
818 854
819 855 @reraise_safe_exceptions
820 856 def date(self, wire, commit_id):
821 857 cache_on, context_uid, repo_id = self._cache_on(wire)
822 @self.region.conditional_cache_on_arguments(condition=cache_on)
858 region = self._region(wire)
859 @region.conditional_cache_on_arguments(condition=cache_on)
823 860 def _date(_repo_id, _commit_id):
824 861 repo_init = self._factory.repo_libgit2(wire)
825 862 with repo_init as repo:
826 863 commit = repo[commit_id]
827 864
828 865 if hasattr(commit, 'commit_time'):
829 866 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
830 867 else:
831 868 commit = commit.get_object()
832 869 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
833 870
834 871 # TODO(marcink): check dulwich difference of offset vs timezone
835 872 return [commit_time, commit_time_offset]
836 873 return _date(repo_id, commit_id)
837 874
838 875 @reraise_safe_exceptions
839 876 def author(self, wire, commit_id):
840 877 cache_on, context_uid, repo_id = self._cache_on(wire)
841 @self.region.conditional_cache_on_arguments(condition=cache_on)
878 region = self._region(wire)
879 @region.conditional_cache_on_arguments(condition=cache_on)
842 880 def _author(_repo_id, _commit_id):
843 881 repo_init = self._factory.repo_libgit2(wire)
844 882 with repo_init as repo:
845 883 commit = repo[commit_id]
846 884
847 885 if hasattr(commit, 'author'):
848 886 author = commit.author
849 887 else:
850 888 author = commit.get_object().author
851 889
852 890 if author.email:
853 891 return u"{} <{}>".format(author.name, author.email)
854 892
855 893 try:
856 894 return u"{}".format(author.name)
857 895 except Exception:
858 896 return u"{}".format(safe_unicode(author.raw_name))
859 897
860 898 return _author(repo_id, commit_id)
861 899
862 900 @reraise_safe_exceptions
863 901 def message(self, wire, commit_id):
864 902 cache_on, context_uid, repo_id = self._cache_on(wire)
865 @self.region.conditional_cache_on_arguments(condition=cache_on)
903 region = self._region(wire)
904 @region.conditional_cache_on_arguments(condition=cache_on)
866 905 def _message(_repo_id, _commit_id):
867 906 repo_init = self._factory.repo_libgit2(wire)
868 907 with repo_init as repo:
869 908 commit = repo[commit_id]
870 909 return commit.message
871 910 return _message(repo_id, commit_id)
872 911
873 912 @reraise_safe_exceptions
874 913 def parents(self, wire, commit_id):
875 914 cache_on, context_uid, repo_id = self._cache_on(wire)
876 @self.region.conditional_cache_on_arguments(condition=cache_on)
915 region = self._region(wire)
916 @region.conditional_cache_on_arguments(condition=cache_on)
877 917 def _parents(_repo_id, _commit_id):
878 918 repo_init = self._factory.repo_libgit2(wire)
879 919 with repo_init as repo:
880 920 commit = repo[commit_id]
881 921 if hasattr(commit, 'parent_ids'):
882 922 parent_ids = commit.parent_ids
883 923 else:
884 924 parent_ids = commit.get_object().parent_ids
885 925
886 926 return [x.hex for x in parent_ids]
887 927 return _parents(repo_id, commit_id)
888 928
889 929 @reraise_safe_exceptions
890 930 def children(self, wire, commit_id):
891 931 cache_on, context_uid, repo_id = self._cache_on(wire)
892 @self.region.conditional_cache_on_arguments(condition=cache_on)
932 region = self._region(wire)
933 @region.conditional_cache_on_arguments(condition=cache_on)
893 934 def _children(_repo_id, _commit_id):
894 935 output, __ = self.run_git_command(
895 936 wire, ['rev-list', '--all', '--children'])
896 937
897 938 child_ids = []
898 939 pat = re.compile(r'^%s' % commit_id)
899 940 for l in output.splitlines():
900 941 if pat.match(l):
901 942 found_ids = l.split(' ')[1:]
902 943 child_ids.extend(found_ids)
903 944
904 945 return child_ids
905 946 return _children(repo_id, commit_id)
906 947
907 948 @reraise_safe_exceptions
908 949 def set_refs(self, wire, key, value):
909 950 repo_init = self._factory.repo_libgit2(wire)
910 951 with repo_init as repo:
911 952 repo.references.create(key, value, force=True)
912 953
913 954 @reraise_safe_exceptions
914 955 def create_branch(self, wire, branch_name, commit_id, force=False):
915 956 repo_init = self._factory.repo_libgit2(wire)
916 957 with repo_init as repo:
917 958 commit = repo[commit_id]
918 959
919 960 if force:
920 961 repo.branches.local.create(branch_name, commit, force=force)
921 962 elif not repo.branches.get(branch_name):
922 963 # create only if that branch isn't existing
923 964 repo.branches.local.create(branch_name, commit, force=force)
924 965
925 966 @reraise_safe_exceptions
926 967 def remove_ref(self, wire, key):
927 968 repo_init = self._factory.repo_libgit2(wire)
928 969 with repo_init as repo:
929 970 repo.references.delete(key)
930 971
931 972 @reraise_safe_exceptions
932 973 def tag_remove(self, wire, tag_name):
933 974 repo_init = self._factory.repo_libgit2(wire)
934 975 with repo_init as repo:
935 976 key = 'refs/tags/{}'.format(tag_name)
936 977 repo.references.delete(key)
937 978
938 979 @reraise_safe_exceptions
939 980 def tree_changes(self, wire, source_id, target_id):
940 981 # TODO(marcink): remove this seems it's only used by tests
941 982 repo = self._factory.repo(wire)
942 983 source = repo[source_id].tree if source_id else None
943 984 target = repo[target_id].tree
944 985 result = repo.object_store.tree_changes(source, target)
945 986 return list(result)
946 987
947 988 @reraise_safe_exceptions
948 989 def tree_and_type_for_path(self, wire, commit_id, path):
949 990
950 991 cache_on, context_uid, repo_id = self._cache_on(wire)
951 @self.region.conditional_cache_on_arguments(condition=cache_on)
992 region = self._region(wire)
993 @region.conditional_cache_on_arguments(condition=cache_on)
952 994 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
953 995 repo_init = self._factory.repo_libgit2(wire)
954 996
955 997 with repo_init as repo:
956 998 commit = repo[commit_id]
957 999 try:
958 1000 tree = commit.tree[path]
959 1001 except KeyError:
960 1002 return None, None, None
961 1003
962 1004 return tree.id.hex, tree.type, tree.filemode
963 1005 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
964 1006
965 1007 @reraise_safe_exceptions
966 1008 def tree_items(self, wire, tree_id):
967 1009 cache_on, context_uid, repo_id = self._cache_on(wire)
968 @self.region.conditional_cache_on_arguments(condition=cache_on)
1010 region = self._region(wire)
1011 @region.conditional_cache_on_arguments(condition=cache_on)
969 1012 def _tree_items(_repo_id, _tree_id):
970 1013
971 1014 repo_init = self._factory.repo_libgit2(wire)
972 1015 with repo_init as repo:
973 1016 try:
974 1017 tree = repo[tree_id]
975 1018 except KeyError:
976 1019 raise ObjectMissing('No tree with id: {}'.format(tree_id))
977 1020
978 1021 result = []
979 1022 for item in tree:
980 1023 item_sha = item.hex
981 1024 item_mode = item.filemode
982 1025 item_type = item.type
983 1026
984 1027 if item_type == 'commit':
985 1028 # NOTE(marcink): submodules we translate to 'link' for backward compat
986 1029 item_type = 'link'
987 1030
988 1031 result.append((item.name, item_mode, item_sha, item_type))
989 1032 return result
990 1033 return _tree_items(repo_id, tree_id)
991 1034
992 1035 @reraise_safe_exceptions
993 1036 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
994 1037 """
995 1038 Old version that uses subprocess to call diff
996 1039 """
997 1040
998 1041 flags = [
999 1042 '-U%s' % context, '--patch',
1000 1043 '--binary',
1001 1044 '--find-renames',
1002 1045 '--no-indent-heuristic',
1003 1046 # '--indent-heuristic',
1004 1047 #'--full-index',
1005 1048 #'--abbrev=40'
1006 1049 ]
1007 1050
1008 1051 if opt_ignorews:
1009 1052 flags.append('--ignore-all-space')
1010 1053
1011 1054 if commit_id_1 == self.EMPTY_COMMIT:
1012 1055 cmd = ['show'] + flags + [commit_id_2]
1013 1056 else:
1014 1057 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1015 1058
1016 1059 if file_filter:
1017 1060 cmd.extend(['--', file_filter])
1018 1061
1019 1062 diff, __ = self.run_git_command(wire, cmd)
1020 1063 # If we used 'show' command, strip first few lines (until actual diff
1021 1064 # starts)
1022 1065 if commit_id_1 == self.EMPTY_COMMIT:
1023 1066 lines = diff.splitlines()
1024 1067 x = 0
1025 1068 for line in lines:
1026 1069 if line.startswith('diff'):
1027 1070 break
1028 1071 x += 1
1029 1072 # Append new line just like 'diff' command do
1030 1073 diff = '\n'.join(lines[x:]) + '\n'
1031 1074 return diff
1032 1075
1033 1076 @reraise_safe_exceptions
1034 1077 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1035 1078 repo_init = self._factory.repo_libgit2(wire)
1036 1079 with repo_init as repo:
1037 1080 swap = True
1038 1081 flags = 0
1039 1082 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1040 1083
1041 1084 if opt_ignorews:
1042 1085 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1043 1086
1044 1087 if commit_id_1 == self.EMPTY_COMMIT:
1045 1088 comm1 = repo[commit_id_2]
1046 1089 diff_obj = comm1.tree.diff_to_tree(
1047 1090 flags=flags, context_lines=context, swap=swap)
1048 1091
1049 1092 else:
1050 1093 comm1 = repo[commit_id_2]
1051 1094 comm2 = repo[commit_id_1]
1052 1095 diff_obj = comm1.tree.diff_to_tree(
1053 1096 comm2.tree, flags=flags, context_lines=context, swap=swap)
1054 1097 similar_flags = 0
1055 1098 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1056 1099 diff_obj.find_similar(flags=similar_flags)
1057 1100
1058 1101 if file_filter:
1059 1102 for p in diff_obj:
1060 1103 if p.delta.old_file.path == file_filter:
1061 1104 return p.patch or ''
1062 1105 # fo matching path == no diff
1063 1106 return ''
1064 1107 return diff_obj.patch or ''
1065 1108
1066 1109 @reraise_safe_exceptions
1067 1110 def node_history(self, wire, commit_id, path, limit):
1068 1111 cache_on, context_uid, repo_id = self._cache_on(wire)
1069 @self.region.conditional_cache_on_arguments(condition=cache_on)
1112 region = self._region(wire)
1113 @region.conditional_cache_on_arguments(condition=cache_on)
1070 1114 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1071 1115 # optimize for n==1, rev-list is much faster for that use-case
1072 1116 if limit == 1:
1073 1117 cmd = ['rev-list', '-1', commit_id, '--', path]
1074 1118 else:
1075 1119 cmd = ['log']
1076 1120 if limit:
1077 1121 cmd.extend(['-n', str(safe_int(limit, 0))])
1078 1122 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1079 1123
1080 1124 output, __ = self.run_git_command(wire, cmd)
1081 1125 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1082 1126
1083 1127 return [x for x in commit_ids]
1084 1128 return _node_history(context_uid, repo_id, commit_id, path, limit)
1085 1129
1086 1130 @reraise_safe_exceptions
1087 1131 def node_annotate(self, wire, commit_id, path):
1088 1132
1089 1133 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1090 1134 # -l ==> outputs long shas (and we need all 40 characters)
1091 1135 # --root ==> doesn't put '^' character for boundaries
1092 1136 # -r commit_id ==> blames for the given commit
1093 1137 output, __ = self.run_git_command(wire, cmd)
1094 1138
1095 1139 result = []
1096 1140 for i, blame_line in enumerate(output.split('\n')[:-1]):
1097 1141 line_no = i + 1
1098 1142 commit_id, line = re.split(r' ', blame_line, 1)
1099 1143 result.append((line_no, commit_id, line))
1100 1144 return result
1101 1145
1102 1146 @reraise_safe_exceptions
1103 1147 def update_server_info(self, wire):
1104 1148 repo = self._factory.repo(wire)
1105 1149 update_server_info(repo)
1106 1150
1107 1151 @reraise_safe_exceptions
1108 1152 def get_all_commit_ids(self, wire):
1109 1153
1110 1154 cache_on, context_uid, repo_id = self._cache_on(wire)
1111 @self.region.conditional_cache_on_arguments(condition=cache_on)
1155 region = self._region(wire)
1156 @region.conditional_cache_on_arguments(condition=cache_on)
1112 1157 def _get_all_commit_ids(_context_uid, _repo_id):
1113 1158
1114 1159 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1115 1160 try:
1116 1161 output, __ = self.run_git_command(wire, cmd)
1117 1162 return output.splitlines()
1118 1163 except Exception:
1119 1164 # Can be raised for empty repositories
1120 1165 return []
1121 1166 return _get_all_commit_ids(context_uid, repo_id)
1122 1167
1123 1168 @reraise_safe_exceptions
1124 1169 def run_git_command(self, wire, cmd, **opts):
1125 1170 path = wire.get('path', None)
1126 1171
1127 1172 if path and os.path.isdir(path):
1128 1173 opts['cwd'] = path
1129 1174
1130 1175 if '_bare' in opts:
1131 1176 _copts = []
1132 1177 del opts['_bare']
1133 1178 else:
1134 1179 _copts = ['-c', 'core.quotepath=false', ]
1135 1180 safe_call = False
1136 1181 if '_safe' in opts:
1137 1182 # no exc on failure
1138 1183 del opts['_safe']
1139 1184 safe_call = True
1140 1185
1141 1186 if '_copts' in opts:
1142 1187 _copts.extend(opts['_copts'] or [])
1143 1188 del opts['_copts']
1144 1189
1145 1190 gitenv = os.environ.copy()
1146 1191 gitenv.update(opts.pop('extra_env', {}))
1147 1192 # need to clean fix GIT_DIR !
1148 1193 if 'GIT_DIR' in gitenv:
1149 1194 del gitenv['GIT_DIR']
1150 1195 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1151 1196 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1152 1197
1153 1198 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1154 1199 _opts = {'env': gitenv, 'shell': False}
1155 1200
1156 1201 proc = None
1157 1202 try:
1158 1203 _opts.update(opts)
1159 1204 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1160 1205
1161 1206 return ''.join(proc), ''.join(proc.error)
1162 1207 except (EnvironmentError, OSError) as err:
1163 1208 cmd = ' '.join(cmd) # human friendly CMD
1164 1209 tb_err = ("Couldn't run git command (%s).\n"
1165 1210 "Original error was:%s\n"
1166 1211 "Call options:%s\n"
1167 1212 % (cmd, err, _opts))
1168 1213 log.exception(tb_err)
1169 1214 if safe_call:
1170 1215 return '', err
1171 1216 else:
1172 1217 raise exceptions.VcsException()(tb_err)
1173 1218 finally:
1174 1219 if proc:
1175 1220 proc.close()
1176 1221
1177 1222 @reraise_safe_exceptions
1178 1223 def install_hooks(self, wire, force=False):
1179 1224 from vcsserver.hook_utils import install_git_hooks
1180 1225 bare = self.bare(wire)
1181 1226 path = wire['path']
1182 1227 return install_git_hooks(path, bare, force_create=force)
1183 1228
1184 1229 @reraise_safe_exceptions
1185 1230 def get_hooks_info(self, wire):
1186 1231 from vcsserver.hook_utils import (
1187 1232 get_git_pre_hook_version, get_git_post_hook_version)
1188 1233 bare = self.bare(wire)
1189 1234 path = wire['path']
1190 1235 return {
1191 1236 'pre_version': get_git_pre_hook_version(path, bare),
1192 1237 'post_version': get_git_post_hook_version(path, bare),
1193 1238 }
1194 1239
1195 1240 @reraise_safe_exceptions
1241 def set_head_ref(self, wire, head_name):
1242 log.debug('Setting refs/head to `%s`', head_name)
1243 cmd = ['symbolic-ref', 'HEAD', 'refs/heads/%s' % head_name]
1244 output, __ = self.run_git_command(wire, cmd)
1245 return [head_name] + output.splitlines()
1246
1247 @reraise_safe_exceptions
1196 1248 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1197 1249 archive_dir_name, commit_id):
1198 1250
1199 1251 def file_walker(_commit_id, path):
1200 1252 repo_init = self._factory.repo_libgit2(wire)
1201 1253
1202 1254 with repo_init as repo:
1203 1255 commit = repo[commit_id]
1204 1256
1205 1257 if path in ['', '/']:
1206 1258 tree = commit.tree
1207 1259 else:
1208 1260 tree = commit.tree[path.rstrip('/')]
1209 1261 tree_id = tree.id.hex
1210 1262 try:
1211 1263 tree = repo[tree_id]
1212 1264 except KeyError:
1213 1265 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1214 1266
1215 1267 index = LibGit2Index.Index()
1216 1268 index.read_tree(tree)
1217 1269 file_iter = index
1218 1270
1219 1271 for fn in file_iter:
1220 1272 file_path = fn.path
1221 1273 mode = fn.mode
1222 1274 is_link = stat.S_ISLNK(mode)
1223 yield ArchiveNode(file_path, mode, is_link, repo[fn.id].read_raw)
1275 if mode == pygit2.GIT_FILEMODE_COMMIT:
1276 log.debug('Skipping path %s as a commit node', file_path)
1277 continue
1278 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1224 1279
1225 1280 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1226 1281 archive_dir_name, commit_id)
@@ -1,1022 +1,1047 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import functools
18 18 import io
19 19 import logging
20 20 import os
21 21 import stat
22 22 import urllib
23 23 import urllib2
24 24 import traceback
25 25
26 26 from hgext import largefiles, rebase, purge
27 27 from hgext.strip import strip as hgext_strip
28 28 from mercurial import commands
29 29 from mercurial import unionrepo
30 30 from mercurial import verify
31 31 from mercurial import repair
32 32
33 33 import vcsserver
34 34 from vcsserver import exceptions
35 35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
36 36 from vcsserver.hgcompat import (
37 37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 41 RepoLookupError, InterventionRequired, RequirementError,
42 42 alwaysmatcher, patternmatcher, hgutil)
43 43 from vcsserver.vcs_base import RemoteBase
44 44
45 45 log = logging.getLogger(__name__)
46 46
47 47
48 48 def make_ui_from_config(repo_config):
49 49
50 50 class LoggingUI(ui.ui):
51 51 def status(self, *msg, **opts):
52 52 log.info(' '.join(msg).rstrip('\n'))
53 53 super(LoggingUI, self).status(*msg, **opts)
54 54
55 55 def warn(self, *msg, **opts):
56 56 log.warn(' '.join(msg).rstrip('\n'))
57 57 super(LoggingUI, self).warn(*msg, **opts)
58 58
59 59 def error(self, *msg, **opts):
60 60 log.error(' '.join(msg).rstrip('\n'))
61 61 super(LoggingUI, self).error(*msg, **opts)
62 62
63 63 def note(self, *msg, **opts):
64 64 log.info(' '.join(msg).rstrip('\n'))
65 65 super(LoggingUI, self).note(*msg, **opts)
66 66
67 67 def debug(self, *msg, **opts):
68 68 log.debug(' '.join(msg).rstrip('\n'))
69 69 super(LoggingUI, self).debug(*msg, **opts)
70 70
71 71 baseui = LoggingUI()
72 72
73 73 # clean the baseui object
74 74 baseui._ocfg = hgconfig.config()
75 75 baseui._ucfg = hgconfig.config()
76 76 baseui._tcfg = hgconfig.config()
77 77
78 78 for section, option, value in repo_config:
79 79 baseui.setconfig(section, option, value)
80 80
81 81 # make our hgweb quiet so it doesn't print output
82 82 baseui.setconfig('ui', 'quiet', 'true')
83 83
84 84 baseui.setconfig('ui', 'paginate', 'never')
85 85 # for better Error reporting of Mercurial
86 86 baseui.setconfig('ui', 'message-output', 'stderr')
87 87
88 88 # force mercurial to only use 1 thread, otherwise it may try to set a
89 89 # signal in a non-main thread, thus generating a ValueError.
90 90 baseui.setconfig('worker', 'numcpus', 1)
91 91
92 92 # If there is no config for the largefiles extension, we explicitly disable
93 93 # it here. This overrides settings from repositories hgrc file. Recent
94 94 # mercurial versions enable largefiles in hgrc on clone from largefile
95 95 # repo.
96 96 if not baseui.hasconfig('extensions', 'largefiles'):
97 97 log.debug('Explicitly disable largefiles extension for repo.')
98 98 baseui.setconfig('extensions', 'largefiles', '!')
99 99
100 100 return baseui
101 101
102 102
103 103 def reraise_safe_exceptions(func):
104 104 """Decorator for converting mercurial exceptions to something neutral."""
105 105
106 106 def wrapper(*args, **kwargs):
107 107 try:
108 108 return func(*args, **kwargs)
109 109 except (Abort, InterventionRequired) as e:
110 110 raise_from_original(exceptions.AbortException(e))
111 111 except RepoLookupError as e:
112 112 raise_from_original(exceptions.LookupException(e))
113 113 except RequirementError as e:
114 114 raise_from_original(exceptions.RequirementException(e))
115 115 except RepoError as e:
116 116 raise_from_original(exceptions.VcsException(e))
117 117 except LookupError as e:
118 118 raise_from_original(exceptions.LookupException(e))
119 119 except Exception as e:
120 120 if not hasattr(e, '_vcs_kind'):
121 121 log.exception("Unhandled exception in hg remote call")
122 122 raise_from_original(exceptions.UnhandledException(e))
123 123
124 124 raise
125 125 return wrapper
126 126
127 127
128 128 class MercurialFactory(RepoFactory):
129 129 repo_type = 'hg'
130 130
131 131 def _create_config(self, config, hooks=True):
132 132 if not hooks:
133 133 hooks_to_clean = frozenset((
134 134 'changegroup.repo_size', 'preoutgoing.pre_pull',
135 135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
136 136 new_config = []
137 137 for section, option, value in config:
138 138 if section == 'hooks' and option in hooks_to_clean:
139 139 continue
140 140 new_config.append((section, option, value))
141 141 config = new_config
142 142
143 143 baseui = make_ui_from_config(config)
144 144 return baseui
145 145
146 146 def _create_repo(self, wire, create):
147 147 baseui = self._create_config(wire["config"])
148 148 return instance(baseui, wire["path"], create)
149 149
150 150 def repo(self, wire, create=False):
151 151 """
152 152 Get a repository instance for the given path.
153 153 """
154 154 return self._create_repo(wire, create)
155 155
156 156
157 157 def patch_ui_message_output(baseui):
158 158 baseui.setconfig('ui', 'quiet', 'false')
159 159 output = io.BytesIO()
160 160
161 161 def write(data, **unused_kwargs):
162 162 output.write(data)
163 163
164 164 baseui.status = write
165 165 baseui.write = write
166 166 baseui.warn = write
167 167 baseui.debug = write
168 168
169 169 return baseui, output
170 170
171 171
172 172 class HgRemote(RemoteBase):
173 173
174 174 def __init__(self, factory):
175 175 self._factory = factory
176 176 self._bulk_methods = {
177 177 "affected_files": self.ctx_files,
178 178 "author": self.ctx_user,
179 179 "branch": self.ctx_branch,
180 180 "children": self.ctx_children,
181 181 "date": self.ctx_date,
182 182 "message": self.ctx_description,
183 183 "parents": self.ctx_parents,
184 184 "status": self.ctx_status,
185 185 "obsolete": self.ctx_obsolete,
186 186 "phase": self.ctx_phase,
187 187 "hidden": self.ctx_hidden,
188 188 "_file_paths": self.ctx_list,
189 189 }
190 190
191 191 def _get_ctx(self, repo, ref):
192 192 return get_ctx(repo, ref)
193 193
194 194 @reraise_safe_exceptions
195 195 def discover_hg_version(self):
196 196 from mercurial import util
197 197 return util.version()
198 198
199 199 @reraise_safe_exceptions
200 200 def is_empty(self, wire):
201 201 repo = self._factory.repo(wire)
202 202
203 203 try:
204 204 return len(repo) == 0
205 205 except Exception:
206 206 log.exception("failed to read object_store")
207 207 return False
208 208
209 209 @reraise_safe_exceptions
210 210 def bookmarks(self, wire):
211 211 cache_on, context_uid, repo_id = self._cache_on(wire)
212 @self.region.conditional_cache_on_arguments(condition=cache_on)
212 region = self._region(wire)
213 @region.conditional_cache_on_arguments(condition=cache_on)
213 214 def _bookmarks(_context_uid, _repo_id):
214 215 repo = self._factory.repo(wire)
215 216 return dict(repo._bookmarks)
216 217
217 218 return _bookmarks(context_uid, repo_id)
218 219
219 220 @reraise_safe_exceptions
220 221 def branches(self, wire, normal, closed):
221 222 cache_on, context_uid, repo_id = self._cache_on(wire)
222 @self.region.conditional_cache_on_arguments(condition=cache_on)
223 region = self._region(wire)
224 @region.conditional_cache_on_arguments(condition=cache_on)
223 225 def _branches(_context_uid, _repo_id, _normal, _closed):
224 226 repo = self._factory.repo(wire)
225 227 iter_branches = repo.branchmap().iterbranches()
226 228 bt = {}
227 229 for branch_name, _heads, tip, is_closed in iter_branches:
228 230 if normal and not is_closed:
229 231 bt[branch_name] = tip
230 232 if closed and is_closed:
231 233 bt[branch_name] = tip
232 234
233 235 return bt
234 236
235 237 return _branches(context_uid, repo_id, normal, closed)
236 238
237 239 @reraise_safe_exceptions
238 240 def bulk_request(self, wire, commit_id, pre_load):
239 241 cache_on, context_uid, repo_id = self._cache_on(wire)
240 @self.region.conditional_cache_on_arguments(condition=cache_on)
242 region = self._region(wire)
243 @region.conditional_cache_on_arguments(condition=cache_on)
241 244 def _bulk_request(_repo_id, _commit_id, _pre_load):
242 245 result = {}
243 246 for attr in pre_load:
244 247 try:
245 248 method = self._bulk_methods[attr]
246 249 result[attr] = method(wire, commit_id)
247 250 except KeyError as e:
248 251 raise exceptions.VcsException(e)(
249 252 'Unknown bulk attribute: "%s"' % attr)
250 253 return result
251 254
252 255 return _bulk_request(repo_id, commit_id, sorted(pre_load))
253 256
254 257 @reraise_safe_exceptions
255 258 def ctx_branch(self, wire, commit_id):
256 259 cache_on, context_uid, repo_id = self._cache_on(wire)
257 @self.region.conditional_cache_on_arguments(condition=cache_on)
260 region = self._region(wire)
261 @region.conditional_cache_on_arguments(condition=cache_on)
258 262 def _ctx_branch(_repo_id, _commit_id):
259 263 repo = self._factory.repo(wire)
260 264 ctx = self._get_ctx(repo, commit_id)
261 265 return ctx.branch()
262 266 return _ctx_branch(repo_id, commit_id)
263 267
264 268 @reraise_safe_exceptions
265 269 def ctx_date(self, wire, commit_id):
266 270 cache_on, context_uid, repo_id = self._cache_on(wire)
267 @self.region.conditional_cache_on_arguments(condition=cache_on)
271 region = self._region(wire)
272 @region.conditional_cache_on_arguments(condition=cache_on)
268 273 def _ctx_date(_repo_id, _commit_id):
269 274 repo = self._factory.repo(wire)
270 275 ctx = self._get_ctx(repo, commit_id)
271 276 return ctx.date()
272 277 return _ctx_date(repo_id, commit_id)
273 278
274 279 @reraise_safe_exceptions
275 280 def ctx_description(self, wire, revision):
276 281 repo = self._factory.repo(wire)
277 282 ctx = self._get_ctx(repo, revision)
278 283 return ctx.description()
279 284
280 285 @reraise_safe_exceptions
281 286 def ctx_files(self, wire, commit_id):
282 287 cache_on, context_uid, repo_id = self._cache_on(wire)
283 @self.region.conditional_cache_on_arguments(condition=cache_on)
288 region = self._region(wire)
289 @region.conditional_cache_on_arguments(condition=cache_on)
284 290 def _ctx_files(_repo_id, _commit_id):
285 291 repo = self._factory.repo(wire)
286 292 ctx = self._get_ctx(repo, commit_id)
287 293 return ctx.files()
288 294
289 295 return _ctx_files(repo_id, commit_id)
290 296
291 297 @reraise_safe_exceptions
292 298 def ctx_list(self, path, revision):
293 299 repo = self._factory.repo(path)
294 300 ctx = self._get_ctx(repo, revision)
295 301 return list(ctx)
296 302
297 303 @reraise_safe_exceptions
298 304 def ctx_parents(self, wire, commit_id):
299 305 cache_on, context_uid, repo_id = self._cache_on(wire)
300 @self.region.conditional_cache_on_arguments(condition=cache_on)
306 region = self._region(wire)
307 @region.conditional_cache_on_arguments(condition=cache_on)
301 308 def _ctx_parents(_repo_id, _commit_id):
302 309 repo = self._factory.repo(wire)
303 310 ctx = self._get_ctx(repo, commit_id)
304 311 return [parent.hex() for parent in ctx.parents()
305 312 if not (parent.hidden() or parent.obsolete())]
306 313
307 314 return _ctx_parents(repo_id, commit_id)
308 315
309 316 @reraise_safe_exceptions
310 317 def ctx_children(self, wire, commit_id):
311 318 cache_on, context_uid, repo_id = self._cache_on(wire)
312 @self.region.conditional_cache_on_arguments(condition=cache_on)
319 region = self._region(wire)
320 @region.conditional_cache_on_arguments(condition=cache_on)
313 321 def _ctx_children(_repo_id, _commit_id):
314 322 repo = self._factory.repo(wire)
315 323 ctx = self._get_ctx(repo, commit_id)
316 324 return [child.hex() for child in ctx.children()
317 325 if not (child.hidden() or child.obsolete())]
318 326
319 327 return _ctx_children(repo_id, commit_id)
320 328
321 329 @reraise_safe_exceptions
322 330 def ctx_phase(self, wire, commit_id):
323 331 cache_on, context_uid, repo_id = self._cache_on(wire)
324 @self.region.conditional_cache_on_arguments(condition=cache_on)
332 region = self._region(wire)
333 @region.conditional_cache_on_arguments(condition=cache_on)
325 334 def _ctx_phase(_context_uid, _repo_id, _commit_id):
326 335 repo = self._factory.repo(wire)
327 336 ctx = self._get_ctx(repo, commit_id)
328 337 # public=0, draft=1, secret=3
329 338 return ctx.phase()
330 339 return _ctx_phase(context_uid, repo_id, commit_id)
331 340
332 341 @reraise_safe_exceptions
333 342 def ctx_obsolete(self, wire, commit_id):
334 343 cache_on, context_uid, repo_id = self._cache_on(wire)
335 @self.region.conditional_cache_on_arguments(condition=cache_on)
344 region = self._region(wire)
345 @region.conditional_cache_on_arguments(condition=cache_on)
336 346 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
337 347 repo = self._factory.repo(wire)
338 348 ctx = self._get_ctx(repo, commit_id)
339 349 return ctx.obsolete()
340 350 return _ctx_obsolete(context_uid, repo_id, commit_id)
341 351
342 352 @reraise_safe_exceptions
343 353 def ctx_hidden(self, wire, commit_id):
344 354 cache_on, context_uid, repo_id = self._cache_on(wire)
345 @self.region.conditional_cache_on_arguments(condition=cache_on)
355 region = self._region(wire)
356 @region.conditional_cache_on_arguments(condition=cache_on)
346 357 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
347 358 repo = self._factory.repo(wire)
348 359 ctx = self._get_ctx(repo, commit_id)
349 360 return ctx.hidden()
350 361 return _ctx_hidden(context_uid, repo_id, commit_id)
351 362
352 363 @reraise_safe_exceptions
353 364 def ctx_substate(self, wire, revision):
354 365 repo = self._factory.repo(wire)
355 366 ctx = self._get_ctx(repo, revision)
356 367 return ctx.substate
357 368
358 369 @reraise_safe_exceptions
359 370 def ctx_status(self, wire, revision):
360 371 repo = self._factory.repo(wire)
361 372 ctx = self._get_ctx(repo, revision)
362 373 status = repo[ctx.p1().node()].status(other=ctx.node())
363 374 # object of status (odd, custom named tuple in mercurial) is not
364 375 # correctly serializable, we make it a list, as the underling
365 376 # API expects this to be a list
366 377 return list(status)
367 378
368 379 @reraise_safe_exceptions
369 380 def ctx_user(self, wire, revision):
370 381 repo = self._factory.repo(wire)
371 382 ctx = self._get_ctx(repo, revision)
372 383 return ctx.user()
373 384
374 385 @reraise_safe_exceptions
375 386 def check_url(self, url, config):
376 387 _proto = None
377 388 if '+' in url[:url.find('://')]:
378 389 _proto = url[0:url.find('+')]
379 390 url = url[url.find('+') + 1:]
380 391 handlers = []
381 392 url_obj = url_parser(url)
382 393 test_uri, authinfo = url_obj.authinfo()
383 394 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
384 395 url_obj.query = obfuscate_qs(url_obj.query)
385 396
386 397 cleaned_uri = str(url_obj)
387 398 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
388 399
389 400 if authinfo:
390 401 # create a password manager
391 402 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
392 403 passmgr.add_password(*authinfo)
393 404
394 405 handlers.extend((httpbasicauthhandler(passmgr),
395 406 httpdigestauthhandler(passmgr)))
396 407
397 408 o = urllib2.build_opener(*handlers)
398 409 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
399 410 ('Accept', 'application/mercurial-0.1')]
400 411
401 412 q = {"cmd": 'between'}
402 413 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
403 414 qs = '?%s' % urllib.urlencode(q)
404 415 cu = "%s%s" % (test_uri, qs)
405 416 req = urllib2.Request(cu, None, {})
406 417
407 418 try:
408 419 log.debug("Trying to open URL %s", cleaned_uri)
409 420 resp = o.open(req)
410 421 if resp.code != 200:
411 422 raise exceptions.URLError()('Return Code is not 200')
412 423 except Exception as e:
413 424 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
414 425 # means it cannot be cloned
415 426 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
416 427
417 428 # now check if it's a proper hg repo, but don't do it for svn
418 429 try:
419 430 if _proto == 'svn':
420 431 pass
421 432 else:
422 433 # check for pure hg repos
423 434 log.debug(
424 435 "Verifying if URL is a Mercurial repository: %s",
425 436 cleaned_uri)
426 437 ui = make_ui_from_config(config)
427 438 peer_checker = makepeer(ui, url)
428 439 peer_checker.lookup('tip')
429 440 except Exception as e:
430 441 log.warning("URL is not a valid Mercurial repository: %s",
431 442 cleaned_uri)
432 443 raise exceptions.URLError(e)(
433 444 "url [%s] does not look like an hg repo org_exc: %s"
434 445 % (cleaned_uri, e))
435 446
436 447 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
437 448 return True
438 449
439 450 @reraise_safe_exceptions
440 451 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
441 452 repo = self._factory.repo(wire)
442 453
443 454 if file_filter:
444 455 match_filter = match(file_filter[0], '', [file_filter[1]])
445 456 else:
446 457 match_filter = file_filter
447 458 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
448 459
449 460 try:
450 461 return "".join(patch.diff(
451 462 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
452 463 except RepoLookupError as e:
453 464 raise exceptions.LookupException(e)()
454 465
455 466 @reraise_safe_exceptions
456 467 def node_history(self, wire, revision, path, limit):
457 468 cache_on, context_uid, repo_id = self._cache_on(wire)
458 @self.region.conditional_cache_on_arguments(condition=cache_on)
469 region = self._region(wire)
470 @region.conditional_cache_on_arguments(condition=cache_on)
459 471 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
460 472 repo = self._factory.repo(wire)
461 473
462 474 ctx = self._get_ctx(repo, revision)
463 475 fctx = ctx.filectx(path)
464 476
465 477 def history_iter():
466 478 limit_rev = fctx.rev()
467 479 for obj in reversed(list(fctx.filelog())):
468 480 obj = fctx.filectx(obj)
469 481 ctx = obj.changectx()
470 482 if ctx.hidden() or ctx.obsolete():
471 483 continue
472 484
473 485 if limit_rev >= obj.rev():
474 486 yield obj
475 487
476 488 history = []
477 489 for cnt, obj in enumerate(history_iter()):
478 490 if limit and cnt >= limit:
479 491 break
480 492 history.append(hex(obj.node()))
481 493
482 494 return [x for x in history]
483 495 return _node_history(context_uid, repo_id, revision, path, limit)
484 496
485 497 @reraise_safe_exceptions
486 498 def node_history_untill(self, wire, revision, path, limit):
487 499 cache_on, context_uid, repo_id = self._cache_on(wire)
488 @self.region.conditional_cache_on_arguments(condition=cache_on)
500 region = self._region(wire)
501 @region.conditional_cache_on_arguments(condition=cache_on)
489 502 def _node_history_until(_context_uid, _repo_id):
490 503 repo = self._factory.repo(wire)
491 504 ctx = self._get_ctx(repo, revision)
492 505 fctx = ctx.filectx(path)
493 506
494 507 file_log = list(fctx.filelog())
495 508 if limit:
496 509 # Limit to the last n items
497 510 file_log = file_log[-limit:]
498 511
499 512 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
500 513 return _node_history_until(context_uid, repo_id, revision, path, limit)
501 514
502 515 @reraise_safe_exceptions
503 516 def fctx_annotate(self, wire, revision, path):
504 517 repo = self._factory.repo(wire)
505 518 ctx = self._get_ctx(repo, revision)
506 519 fctx = ctx.filectx(path)
507 520
508 521 result = []
509 522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
510 523 ln_no = i
511 524 sha = hex(annotate_obj.fctx.node())
512 525 content = annotate_obj.text
513 526 result.append((ln_no, sha, content))
514 527 return result
515 528
516 529 @reraise_safe_exceptions
517 530 def fctx_node_data(self, wire, revision, path):
518 531 repo = self._factory.repo(wire)
519 532 ctx = self._get_ctx(repo, revision)
520 533 fctx = ctx.filectx(path)
521 534 return fctx.data()
522 535
523 536 @reraise_safe_exceptions
524 537 def fctx_flags(self, wire, commit_id, path):
525 538 cache_on, context_uid, repo_id = self._cache_on(wire)
526 @self.region.conditional_cache_on_arguments(condition=cache_on)
539 region = self._region(wire)
540 @region.conditional_cache_on_arguments(condition=cache_on)
527 541 def _fctx_flags(_repo_id, _commit_id, _path):
528 542 repo = self._factory.repo(wire)
529 543 ctx = self._get_ctx(repo, commit_id)
530 544 fctx = ctx.filectx(path)
531 545 return fctx.flags()
532 546
533 547 return _fctx_flags(repo_id, commit_id, path)
534 548
535 549 @reraise_safe_exceptions
536 550 def fctx_size(self, wire, commit_id, path):
537 551 cache_on, context_uid, repo_id = self._cache_on(wire)
538 @self.region.conditional_cache_on_arguments(condition=cache_on)
552 region = self._region(wire)
553 @region.conditional_cache_on_arguments(condition=cache_on)
539 554 def _fctx_size(_repo_id, _revision, _path):
540 555 repo = self._factory.repo(wire)
541 556 ctx = self._get_ctx(repo, commit_id)
542 557 fctx = ctx.filectx(path)
543 558 return fctx.size()
544 559 return _fctx_size(repo_id, commit_id, path)
545 560
546 561 @reraise_safe_exceptions
547 562 def get_all_commit_ids(self, wire, name):
548 563 cache_on, context_uid, repo_id = self._cache_on(wire)
549 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 region = self._region(wire)
565 @region.conditional_cache_on_arguments(condition=cache_on)
550 566 def _get_all_commit_ids(_context_uid, _repo_id, _name):
551 567 repo = self._factory.repo(wire)
552 568 repo = repo.filtered(name)
553 569 revs = map(lambda x: hex(x[7]), repo.changelog.index)
554 570 return revs
555 571 return _get_all_commit_ids(context_uid, repo_id, name)
556 572
557 573 @reraise_safe_exceptions
558 574 def get_config_value(self, wire, section, name, untrusted=False):
559 575 repo = self._factory.repo(wire)
560 576 return repo.ui.config(section, name, untrusted=untrusted)
561 577
562 578 @reraise_safe_exceptions
563 579 def is_large_file(self, wire, commit_id, path):
564 580 cache_on, context_uid, repo_id = self._cache_on(wire)
565 @self.region.conditional_cache_on_arguments(condition=cache_on)
581 region = self._region(wire)
582 @region.conditional_cache_on_arguments(condition=cache_on)
566 583 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
567 584 return largefiles.lfutil.isstandin(path)
568 585
569 586 return _is_large_file(context_uid, repo_id, commit_id, path)
570 587
571 588 @reraise_safe_exceptions
572 589 def is_binary(self, wire, revision, path):
573 590 cache_on, context_uid, repo_id = self._cache_on(wire)
574 591
575 @self.region.conditional_cache_on_arguments(condition=cache_on)
592 region = self._region(wire)
593 @region.conditional_cache_on_arguments(condition=cache_on)
576 594 def _is_binary(_repo_id, _sha, _path):
577 595 repo = self._factory.repo(wire)
578 596 ctx = self._get_ctx(repo, revision)
579 597 fctx = ctx.filectx(path)
580 598 return fctx.isbinary()
581 599
582 600 return _is_binary(repo_id, revision, path)
583 601
584 602 @reraise_safe_exceptions
585 603 def in_largefiles_store(self, wire, sha):
586 604 repo = self._factory.repo(wire)
587 605 return largefiles.lfutil.instore(repo, sha)
588 606
589 607 @reraise_safe_exceptions
590 608 def in_user_cache(self, wire, sha):
591 609 repo = self._factory.repo(wire)
592 610 return largefiles.lfutil.inusercache(repo.ui, sha)
593 611
594 612 @reraise_safe_exceptions
595 613 def store_path(self, wire, sha):
596 614 repo = self._factory.repo(wire)
597 615 return largefiles.lfutil.storepath(repo, sha)
598 616
599 617 @reraise_safe_exceptions
600 618 def link(self, wire, sha, path):
601 619 repo = self._factory.repo(wire)
602 620 largefiles.lfutil.link(
603 621 largefiles.lfutil.usercachepath(repo.ui, sha), path)
604 622
605 623 @reraise_safe_exceptions
606 624 def localrepository(self, wire, create=False):
607 625 self._factory.repo(wire, create=create)
608 626
609 627 @reraise_safe_exceptions
610 628 def lookup(self, wire, revision, both):
611 629 cache_on, context_uid, repo_id = self._cache_on(wire)
612 630
613 @self.region.conditional_cache_on_arguments(condition=cache_on)
631 region = self._region(wire)
632 @region.conditional_cache_on_arguments(condition=cache_on)
614 633 def _lookup(_context_uid, _repo_id, _revision, _both):
615 634
616 635 repo = self._factory.repo(wire)
617 636 rev = _revision
618 637 if isinstance(rev, int):
619 638 # NOTE(marcink):
620 639 # since Mercurial doesn't support negative indexes properly
621 640 # we need to shift accordingly by one to get proper index, e.g
622 641 # repo[-1] => repo[-2]
623 642 # repo[0] => repo[-1]
624 643 if rev <= 0:
625 644 rev = rev + -1
626 645 try:
627 646 ctx = self._get_ctx(repo, rev)
628 647 except (TypeError, RepoLookupError) as e:
629 648 e._org_exc_tb = traceback.format_exc()
630 649 raise exceptions.LookupException(e)(rev)
631 650 except LookupError as e:
632 651 e._org_exc_tb = traceback.format_exc()
633 652 raise exceptions.LookupException(e)(e.name)
634 653
635 654 if not both:
636 655 return ctx.hex()
637 656
638 657 ctx = repo[ctx.hex()]
639 658 return ctx.hex(), ctx.rev()
640 659
641 660 return _lookup(context_uid, repo_id, revision, both)
642 661
643 662 @reraise_safe_exceptions
644 663 def sync_push(self, wire, url):
645 664 if not self.check_url(url, wire['config']):
646 665 return
647 666
648 667 repo = self._factory.repo(wire)
649 668
650 669 # Disable any prompts for this repo
651 670 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
652 671
653 672 bookmarks = dict(repo._bookmarks).keys()
654 673 remote = peer(repo, {}, url)
655 674 # Disable any prompts for this remote
656 675 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
657 676
658 677 return exchange.push(
659 678 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
660 679
661 680 @reraise_safe_exceptions
662 681 def revision(self, wire, rev):
663 682 repo = self._factory.repo(wire)
664 683 ctx = self._get_ctx(repo, rev)
665 684 return ctx.rev()
666 685
667 686 @reraise_safe_exceptions
668 687 def rev_range(self, wire, commit_filter):
669 688 cache_on, context_uid, repo_id = self._cache_on(wire)
670 689
671 @self.region.conditional_cache_on_arguments(condition=cache_on)
690 region = self._region(wire)
691 @region.conditional_cache_on_arguments(condition=cache_on)
672 692 def _rev_range(_context_uid, _repo_id, _filter):
673 693 repo = self._factory.repo(wire)
674 694 revisions = [rev for rev in revrange(repo, commit_filter)]
675 695 return revisions
676 696
677 697 return _rev_range(context_uid, repo_id, sorted(commit_filter))
678 698
679 699 @reraise_safe_exceptions
680 700 def rev_range_hash(self, wire, node):
681 701 repo = self._factory.repo(wire)
682 702
683 703 def get_revs(repo, rev_opt):
684 704 if rev_opt:
685 705 revs = revrange(repo, rev_opt)
686 706 if len(revs) == 0:
687 707 return (nullrev, nullrev)
688 708 return max(revs), min(revs)
689 709 else:
690 710 return len(repo) - 1, 0
691 711
692 712 stop, start = get_revs(repo, [node + ':'])
693 713 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
694 714 return revs
695 715
696 716 @reraise_safe_exceptions
697 717 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
698 718 other_path = kwargs.pop('other_path', None)
699 719
700 720 # case when we want to compare two independent repositories
701 721 if other_path and other_path != wire["path"]:
702 722 baseui = self._factory._create_config(wire["config"])
703 723 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
704 724 else:
705 725 repo = self._factory.repo(wire)
706 726 return list(repo.revs(rev_spec, *args))
707 727
708 728 @reraise_safe_exceptions
709 729 def verify(self, wire,):
710 730 repo = self._factory.repo(wire)
711 731 baseui = self._factory._create_config(wire['config'])
712 732
713 733 baseui, output = patch_ui_message_output(baseui)
714 734
715 735 repo.ui = baseui
716 736 verify.verify(repo)
717 737 return output.getvalue()
718 738
719 739 @reraise_safe_exceptions
720 740 def hg_update_cache(self, wire,):
721 741 repo = self._factory.repo(wire)
722 742 baseui = self._factory._create_config(wire['config'])
723 743 baseui, output = patch_ui_message_output(baseui)
724 744
725 745 repo.ui = baseui
726 746 with repo.wlock(), repo.lock():
727 747 repo.updatecaches(full=True)
728 748
729 749 return output.getvalue()
730 750
731 751 @reraise_safe_exceptions
732 752 def hg_rebuild_fn_cache(self, wire,):
733 753 repo = self._factory.repo(wire)
734 754 baseui = self._factory._create_config(wire['config'])
735 755 baseui, output = patch_ui_message_output(baseui)
736 756
737 757 repo.ui = baseui
738 758
739 759 repair.rebuildfncache(baseui, repo)
740 760
741 761 return output.getvalue()
742 762
743 763 @reraise_safe_exceptions
744 764 def tags(self, wire):
745 765 cache_on, context_uid, repo_id = self._cache_on(wire)
746 @self.region.conditional_cache_on_arguments(condition=cache_on)
766 region = self._region(wire)
767 @region.conditional_cache_on_arguments(condition=cache_on)
747 768 def _tags(_context_uid, _repo_id):
748 769 repo = self._factory.repo(wire)
749 770 return repo.tags()
750 771
751 772 return _tags(context_uid, repo_id)
752 773
753 774 @reraise_safe_exceptions
754 775 def update(self, wire, node=None, clean=False):
755 776 repo = self._factory.repo(wire)
756 777 baseui = self._factory._create_config(wire['config'])
757 778 commands.update(baseui, repo, node=node, clean=clean)
758 779
759 780 @reraise_safe_exceptions
760 781 def identify(self, wire):
761 782 repo = self._factory.repo(wire)
762 783 baseui = self._factory._create_config(wire['config'])
763 784 output = io.BytesIO()
764 785 baseui.write = output.write
765 786 # This is required to get a full node id
766 787 baseui.debugflag = True
767 788 commands.identify(baseui, repo, id=True)
768 789
769 790 return output.getvalue()
770 791
771 792 @reraise_safe_exceptions
772 793 def heads(self, wire, branch=None):
773 794 repo = self._factory.repo(wire)
774 795 baseui = self._factory._create_config(wire['config'])
775 796 output = io.BytesIO()
776 797
777 798 def write(data, **unused_kwargs):
778 799 output.write(data)
779 800
780 801 baseui.write = write
781 802 if branch:
782 803 args = [branch]
783 804 else:
784 805 args = []
785 806 commands.heads(baseui, repo, template='{node} ', *args)
786 807
787 808 return output.getvalue()
788 809
789 810 @reraise_safe_exceptions
790 811 def ancestor(self, wire, revision1, revision2):
791 812 repo = self._factory.repo(wire)
792 813 changelog = repo.changelog
793 814 lookup = repo.lookup
794 815 a = changelog.ancestor(lookup(revision1), lookup(revision2))
795 816 return hex(a)
796 817
797 818 @reraise_safe_exceptions
798 819 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
799 820 baseui = self._factory._create_config(wire["config"], hooks=hooks)
800 821 clone(baseui, source, dest, noupdate=not update_after_clone)
801 822
802 823 @reraise_safe_exceptions
803 824 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
804 825
805 826 repo = self._factory.repo(wire)
806 827 baseui = self._factory._create_config(wire['config'])
807 828 publishing = baseui.configbool('phases', 'publish')
808 829 if publishing:
809 830 new_commit = 'public'
810 831 else:
811 832 new_commit = 'draft'
812 833
813 834 def _filectxfn(_repo, ctx, path):
814 835 """
815 836 Marks given path as added/changed/removed in a given _repo. This is
816 837 for internal mercurial commit function.
817 838 """
818 839
819 840 # check if this path is removed
820 841 if path in removed:
821 842 # returning None is a way to mark node for removal
822 843 return None
823 844
824 845 # check if this path is added
825 846 for node in updated:
826 847 if node['path'] == path:
827 848 return memfilectx(
828 849 _repo,
829 850 changectx=ctx,
830 851 path=node['path'],
831 852 data=node['content'],
832 853 islink=False,
833 854 isexec=bool(node['mode'] & stat.S_IXUSR),
834 855 copysource=False)
835 856
836 857 raise exceptions.AbortException()(
837 858 "Given path haven't been marked as added, "
838 859 "changed or removed (%s)" % path)
839 860
840 861 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
841 862
842 863 commit_ctx = memctx(
843 864 repo=repo,
844 865 parents=parents,
845 866 text=message,
846 867 files=files,
847 868 filectxfn=_filectxfn,
848 869 user=user,
849 870 date=(commit_time, commit_timezone),
850 871 extra=extra)
851 872
852 873 n = repo.commitctx(commit_ctx)
853 874 new_id = hex(n)
854 875
855 876 return new_id
856 877
857 878 @reraise_safe_exceptions
858 879 def pull(self, wire, url, commit_ids=None):
859 880 repo = self._factory.repo(wire)
860 881 # Disable any prompts for this repo
861 882 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
862 883
863 884 remote = peer(repo, {}, url)
864 885 # Disable any prompts for this remote
865 886 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
866 887
867 888 if commit_ids:
868 889 commit_ids = [bin(commit_id) for commit_id in commit_ids]
869 890
870 891 return exchange.pull(
871 892 repo, remote, heads=commit_ids, force=None).cgresult
872 893
873 894 @reraise_safe_exceptions
874 895 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
875 896 repo = self._factory.repo(wire)
876 897 baseui = self._factory._create_config(wire['config'], hooks=hooks)
877 898
878 899 # Mercurial internally has a lot of logic that checks ONLY if
879 900 # option is defined, we just pass those if they are defined then
880 901 opts = {}
881 902 if bookmark:
882 903 opts['bookmark'] = bookmark
883 904 if branch:
884 905 opts['branch'] = branch
885 906 if revision:
886 907 opts['rev'] = revision
887 908
888 909 commands.pull(baseui, repo, source, **opts)
889 910
890 911 @reraise_safe_exceptions
891 912 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
892 913 repo = self._factory.repo(wire)
893 914 baseui = self._factory._create_config(wire['config'], hooks=hooks)
894 915 commands.push(baseui, repo, dest=dest_path, rev=revisions,
895 916 new_branch=push_branches)
896 917
897 918 @reraise_safe_exceptions
898 919 def strip(self, wire, revision, update, backup):
899 920 repo = self._factory.repo(wire)
900 921 ctx = self._get_ctx(repo, revision)
901 922 hgext_strip(
902 923 repo.baseui, repo, ctx.node(), update=update, backup=backup)
903 924
904 925 @reraise_safe_exceptions
905 926 def get_unresolved_files(self, wire):
906 927 repo = self._factory.repo(wire)
907 928
908 929 log.debug('Calculating unresolved files for repo: %s', repo)
909 930 output = io.BytesIO()
910 931
911 932 def write(data, **unused_kwargs):
912 933 output.write(data)
913 934
914 935 baseui = self._factory._create_config(wire['config'])
915 936 baseui.write = write
916 937
917 938 commands.resolve(baseui, repo, list=True)
918 939 unresolved = output.getvalue().splitlines(0)
919 940 return unresolved
920 941
921 942 @reraise_safe_exceptions
922 943 def merge(self, wire, revision):
923 944 repo = self._factory.repo(wire)
924 945 baseui = self._factory._create_config(wire['config'])
925 946 repo.ui.setconfig('ui', 'merge', 'internal:dump')
926 947
927 948 # In case of sub repositories are used mercurial prompts the user in
928 949 # case of merge conflicts or different sub repository sources. By
929 950 # setting the interactive flag to `False` mercurial doesn't prompt the
930 951 # used but instead uses a default value.
931 952 repo.ui.setconfig('ui', 'interactive', False)
932 953 commands.merge(baseui, repo, rev=revision)
933 954
934 955 @reraise_safe_exceptions
935 956 def merge_state(self, wire):
936 957 repo = self._factory.repo(wire)
937 958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
938 959
939 960 # In case of sub repositories are used mercurial prompts the user in
940 961 # case of merge conflicts or different sub repository sources. By
941 962 # setting the interactive flag to `False` mercurial doesn't prompt the
942 963 # used but instead uses a default value.
943 964 repo.ui.setconfig('ui', 'interactive', False)
944 965 ms = hg_merge.mergestate(repo)
945 966 return [x for x in ms.unresolved()]
946 967
947 968 @reraise_safe_exceptions
948 969 def commit(self, wire, message, username, close_branch=False):
949 970 repo = self._factory.repo(wire)
950 971 baseui = self._factory._create_config(wire['config'])
951 972 repo.ui.setconfig('ui', 'username', username)
952 973 commands.commit(baseui, repo, message=message, close_branch=close_branch)
953 974
954 975 @reraise_safe_exceptions
955 976 def rebase(self, wire, source=None, dest=None, abort=False):
956 977 repo = self._factory.repo(wire)
957 978 baseui = self._factory._create_config(wire['config'])
958 979 repo.ui.setconfig('ui', 'merge', 'internal:dump')
959 980 # In case of sub repositories are used mercurial prompts the user in
960 981 # case of merge conflicts or different sub repository sources. By
961 982 # setting the interactive flag to `False` mercurial doesn't prompt the
962 983 # used but instead uses a default value.
963 984 repo.ui.setconfig('ui', 'interactive', False)
964 985 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
965 986
966 987 @reraise_safe_exceptions
967 988 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
968 989 repo = self._factory.repo(wire)
969 990 ctx = self._get_ctx(repo, revision)
970 991 node = ctx.node()
971 992
972 993 date = (tag_time, tag_timezone)
973 994 try:
974 995 hg_tag.tag(repo, name, node, message, local, user, date)
975 996 except Abort as e:
976 997 log.exception("Tag operation aborted")
977 998 # Exception can contain unicode which we convert
978 999 raise exceptions.AbortException(e)(repr(e))
979 1000
980 1001 @reraise_safe_exceptions
981 1002 def bookmark(self, wire, bookmark, revision=None):
982 1003 repo = self._factory.repo(wire)
983 1004 baseui = self._factory._create_config(wire['config'])
984 1005 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
985 1006
986 1007 @reraise_safe_exceptions
987 1008 def install_hooks(self, wire, force=False):
988 1009 # we don't need any special hooks for Mercurial
989 1010 pass
990 1011
991 1012 @reraise_safe_exceptions
992 1013 def get_hooks_info(self, wire):
993 1014 return {
994 1015 'pre_version': vcsserver.__version__,
995 1016 'post_version': vcsserver.__version__,
996 1017 }
997 1018
998 1019 @reraise_safe_exceptions
1020 def set_head_ref(self, wire, head_name):
1021 pass
1022
1023 @reraise_safe_exceptions
999 1024 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1000 1025 archive_dir_name, commit_id):
1001 1026
1002 1027 def file_walker(_commit_id, path):
1003 1028 repo = self._factory.repo(wire)
1004 1029 ctx = repo[_commit_id]
1005 1030 is_root = path in ['', '/']
1006 1031 if is_root:
1007 1032 matcher = alwaysmatcher(badfn=None)
1008 1033 else:
1009 1034 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1010 1035 file_iter = ctx.manifest().walk(matcher)
1011 1036
1012 1037 for fn in file_iter:
1013 1038 file_path = fn
1014 1039 flags = ctx.flags(fn)
1015 1040 mode = b'x' in flags and 0o755 or 0o644
1016 1041 is_link = b'l' in flags
1017 1042
1018 1043 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1019 1044
1020 1045 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1021 1046 archive_dir_name, commit_id)
1022 1047
@@ -1,729 +1,729 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 49 try:
50 50 connection.request('POST', '/', body)
51 51 except Exception:
52 log.error('Connection failed on %s', connection)
52 log.error('Hooks calling Connection failed on %s', connection.__dict__)
53 53 raise
54 54 response = connection.getresponse()
55 55
56 56 response_data = response.read()
57 57
58 58 try:
59 59 return json.loads(response_data)
60 60 except Exception:
61 61 log.exception('Failed to decode hook response json data. '
62 62 'response_code:%s, raw_data:%s',
63 63 response.status, response_data)
64 64 raise
65 65
66 66 def _serialize(self, hook_name, extras):
67 67 data = {
68 68 'method': hook_name,
69 69 'extras': extras
70 70 }
71 71 return json.dumps(data)
72 72
73 73
74 74 class HooksDummyClient(object):
75 75 def __init__(self, hooks_module):
76 76 self._hooks_module = importlib.import_module(hooks_module)
77 77
78 78 def __call__(self, hook_name, extras):
79 79 with self._hooks_module.Hooks() as hooks:
80 80 return getattr(hooks, hook_name)(extras)
81 81
82 82
83 83 class HooksShadowRepoClient(object):
84 84
85 85 def __call__(self, hook_name, extras):
86 86 return {'output': '', 'status': 0}
87 87
88 88
89 89 class RemoteMessageWriter(object):
90 90 """Writer base class."""
91 91 def write(self, message):
92 92 raise NotImplementedError()
93 93
94 94
95 95 class HgMessageWriter(RemoteMessageWriter):
96 96 """Writer that knows how to send messages to mercurial clients."""
97 97
98 98 def __init__(self, ui):
99 99 self.ui = ui
100 100
101 101 def write(self, message):
102 102 # TODO: Check why the quiet flag is set by default.
103 103 old = self.ui.quiet
104 104 self.ui.quiet = False
105 105 self.ui.status(message.encode('utf-8'))
106 106 self.ui.quiet = old
107 107
108 108
109 109 class GitMessageWriter(RemoteMessageWriter):
110 110 """Writer that knows how to send messages to git clients."""
111 111
112 112 def __init__(self, stdout=None):
113 113 self.stdout = stdout or sys.stdout
114 114
115 115 def write(self, message):
116 116 self.stdout.write(message.encode('utf-8'))
117 117
118 118
119 119 class SvnMessageWriter(RemoteMessageWriter):
120 120 """Writer that knows how to send messages to svn clients."""
121 121
122 122 def __init__(self, stderr=None):
123 123 # SVN needs data sent to stderr for back-to-client messaging
124 124 self.stderr = stderr or sys.stderr
125 125
126 126 def write(self, message):
127 127 self.stderr.write(message.encode('utf-8'))
128 128
129 129
130 130 def _handle_exception(result):
131 131 exception_class = result.get('exception')
132 132 exception_traceback = result.get('exception_traceback')
133 133
134 134 if exception_traceback:
135 135 log.error('Got traceback from remote call:%s', exception_traceback)
136 136
137 137 if exception_class == 'HTTPLockedRC':
138 138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 139 elif exception_class == 'HTTPBranchProtected':
140 140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 141 elif exception_class == 'RepositoryError':
142 142 raise exceptions.VcsException()(*result['exception_args'])
143 143 elif exception_class:
144 144 raise Exception('Got remote exception "%s" with args "%s"' %
145 145 (exception_class, result['exception_args']))
146 146
147 147
148 148 def _get_hooks_client(extras):
149 149 hooks_uri = extras.get('hooks_uri')
150 150 is_shadow_repo = extras.get('is_shadow_repo')
151 151 if hooks_uri:
152 152 return HooksHttpClient(extras['hooks_uri'])
153 153 elif is_shadow_repo:
154 154 return HooksShadowRepoClient()
155 155 else:
156 156 return HooksDummyClient(extras['hooks_module'])
157 157
158 158
159 159 def _call_hook(hook_name, extras, writer):
160 160 hooks_client = _get_hooks_client(extras)
161 161 log.debug('Hooks, using client:%s', hooks_client)
162 162 result = hooks_client(hook_name, extras)
163 163 log.debug('Hooks got result: %s', result)
164 164
165 165 _handle_exception(result)
166 166 writer.write(result['output'])
167 167
168 168 return result['status']
169 169
170 170
171 171 def _extras_from_ui(ui):
172 172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 173 if not hook_data:
174 174 # maybe it's inside environ ?
175 175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 176 if env_hook_data:
177 177 hook_data = env_hook_data
178 178
179 179 extras = {}
180 180 if hook_data:
181 181 extras = json.loads(hook_data)
182 182 return extras
183 183
184 184
185 185 def _rev_range_hash(repo, node, check_heads=False):
186 186 from vcsserver.hgcompat import get_ctx
187 187
188 188 commits = []
189 189 revs = []
190 190 start = get_ctx(repo, node).rev()
191 191 end = len(repo)
192 192 for rev in range(start, end):
193 193 revs.append(rev)
194 194 ctx = get_ctx(repo, rev)
195 195 commit_id = mercurial.node.hex(ctx.node())
196 196 branch = ctx.branch()
197 197 commits.append((commit_id, branch))
198 198
199 199 parent_heads = []
200 200 if check_heads:
201 201 parent_heads = _check_heads(repo, start, end, revs)
202 202 return commits, parent_heads
203 203
204 204
205 205 def _check_heads(repo, start, end, commits):
206 206 from vcsserver.hgcompat import get_ctx
207 207 changelog = repo.changelog
208 208 parents = set()
209 209
210 210 for new_rev in commits:
211 211 for p in changelog.parentrevs(new_rev):
212 212 if p == mercurial.node.nullrev:
213 213 continue
214 214 if p < start:
215 215 parents.add(p)
216 216
217 217 for p in parents:
218 218 branch = get_ctx(repo, p).branch()
219 219 # The heads descending from that parent, on the same branch
220 220 parent_heads = set([p])
221 221 reachable = set([p])
222 222 for x in xrange(p + 1, end):
223 223 if get_ctx(repo, x).branch() != branch:
224 224 continue
225 225 for pp in changelog.parentrevs(x):
226 226 if pp in reachable:
227 227 reachable.add(x)
228 228 parent_heads.discard(pp)
229 229 parent_heads.add(x)
230 230 # More than one head? Suggest merging
231 231 if len(parent_heads) > 1:
232 232 return list(parent_heads)
233 233
234 234 return []
235 235
236 236
237 237 def _get_git_env():
238 238 env = {}
239 239 for k, v in os.environ.items():
240 240 if k.startswith('GIT'):
241 241 env[k] = v
242 242
243 243 # serialized version
244 244 return [(k, v) for k, v in env.items()]
245 245
246 246
247 247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 248 env = {}
249 249 for k, v in os.environ.items():
250 250 if k.startswith('HG'):
251 251 env[k] = v
252 252
253 253 env['HG_NODE'] = old_rev
254 254 env['HG_NODE_LAST'] = new_rev
255 255 env['HG_TXNID'] = txnid
256 256 env['HG_PENDING'] = repo_path
257 257
258 258 return [(k, v) for k, v in env.items()]
259 259
260 260
261 261 def repo_size(ui, repo, **kwargs):
262 262 extras = _extras_from_ui(ui)
263 263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264 264
265 265
266 266 def pre_pull(ui, repo, **kwargs):
267 267 extras = _extras_from_ui(ui)
268 268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269 269
270 270
271 271 def pre_pull_ssh(ui, repo, **kwargs):
272 272 extras = _extras_from_ui(ui)
273 273 if extras and extras.get('SSH'):
274 274 return pre_pull(ui, repo, **kwargs)
275 275 return 0
276 276
277 277
278 278 def post_pull(ui, repo, **kwargs):
279 279 extras = _extras_from_ui(ui)
280 280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281 281
282 282
283 283 def post_pull_ssh(ui, repo, **kwargs):
284 284 extras = _extras_from_ui(ui)
285 285 if extras and extras.get('SSH'):
286 286 return post_pull(ui, repo, **kwargs)
287 287 return 0
288 288
289 289
290 290 def pre_push(ui, repo, node=None, **kwargs):
291 291 """
292 292 Mercurial pre_push hook
293 293 """
294 294 extras = _extras_from_ui(ui)
295 295 detect_force_push = extras.get('detect_force_push')
296 296
297 297 rev_data = []
298 298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 299 branches = collections.defaultdict(list)
300 300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 301 for commit_id, branch in commits:
302 302 branches[branch].append(commit_id)
303 303
304 304 for branch, commits in branches.items():
305 305 old_rev = kwargs.get('node_last') or commits[0]
306 306 rev_data.append({
307 307 'total_commits': len(commits),
308 308 'old_rev': old_rev,
309 309 'new_rev': commits[-1],
310 310 'ref': '',
311 311 'type': 'branch',
312 312 'name': branch,
313 313 })
314 314
315 315 for push_ref in rev_data:
316 316 push_ref['multiple_heads'] = _heads
317 317
318 318 repo_path = os.path.join(
319 319 extras.get('repo_store', ''), extras.get('repository', ''))
320 320 push_ref['hg_env'] = _get_hg_env(
321 321 old_rev=push_ref['old_rev'],
322 322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 323 repo_path=repo_path)
324 324
325 325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 326 extras['commit_ids'] = rev_data
327 327
328 328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329 329
330 330
331 331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 332 extras = _extras_from_ui(ui)
333 333 if extras.get('SSH'):
334 334 return pre_push(ui, repo, node, **kwargs)
335 335
336 336 return 0
337 337
338 338
339 339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 340 """
341 341 Mercurial pre_push hook for SSH
342 342 """
343 343 extras = _extras_from_ui(ui)
344 344 if extras.get('SSH'):
345 345 permission = extras['SSH_PERMISSIONS']
346 346
347 347 if 'repository.write' == permission or 'repository.admin' == permission:
348 348 return 0
349 349
350 350 # non-zero ret code
351 351 return 1
352 352
353 353 return 0
354 354
355 355
356 356 def post_push(ui, repo, node, **kwargs):
357 357 """
358 358 Mercurial post_push hook
359 359 """
360 360 extras = _extras_from_ui(ui)
361 361
362 362 commit_ids = []
363 363 branches = []
364 364 bookmarks = []
365 365 tags = []
366 366
367 367 commits, _heads = _rev_range_hash(repo, node)
368 368 for commit_id, branch in commits:
369 369 commit_ids.append(commit_id)
370 370 if branch not in branches:
371 371 branches.append(branch)
372 372
373 373 if hasattr(ui, '_rc_pushkey_branches'):
374 374 bookmarks = ui._rc_pushkey_branches
375 375
376 376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 377 extras['commit_ids'] = commit_ids
378 378 extras['new_refs'] = {
379 379 'branches': branches,
380 380 'bookmarks': bookmarks,
381 381 'tags': tags
382 382 }
383 383
384 384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385 385
386 386
387 387 def post_push_ssh(ui, repo, node, **kwargs):
388 388 """
389 389 Mercurial post_push hook for SSH
390 390 """
391 391 if _extras_from_ui(ui).get('SSH'):
392 392 return post_push(ui, repo, node, **kwargs)
393 393 return 0
394 394
395 395
396 396 def key_push(ui, repo, **kwargs):
397 397 from vcsserver.hgcompat import get_ctx
398 398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 399 # store new bookmarks in our UI object propagated later to post_push
400 400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 401 return
402 402
403 403
404 404 # backward compat
405 405 log_pull_action = post_pull
406 406
407 407 # backward compat
408 408 log_push_action = post_push
409 409
410 410
411 411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 412 """
413 413 Old hook name: keep here for backward compatibility.
414 414
415 415 This is only required when the installed git hooks are not upgraded.
416 416 """
417 417 pass
418 418
419 419
420 420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 421 """
422 422 Old hook name: keep here for backward compatibility.
423 423
424 424 This is only required when the installed git hooks are not upgraded.
425 425 """
426 426 pass
427 427
428 428
429 429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430 430
431 431
432 432 def git_pre_pull(extras):
433 433 """
434 434 Pre pull hook.
435 435
436 436 :param extras: dictionary containing the keys defined in simplevcs
437 437 :type extras: dict
438 438
439 439 :return: status code of the hook. 0 for success.
440 440 :rtype: int
441 441 """
442 442 if 'pull' not in extras['hooks']:
443 443 return HookResponse(0, '')
444 444
445 445 stdout = io.BytesIO()
446 446 try:
447 447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 448 except Exception as error:
449 449 status = 128
450 450 stdout.write('ERROR: %s\n' % str(error))
451 451
452 452 return HookResponse(status, stdout.getvalue())
453 453
454 454
455 455 def git_post_pull(extras):
456 456 """
457 457 Post pull hook.
458 458
459 459 :param extras: dictionary containing the keys defined in simplevcs
460 460 :type extras: dict
461 461
462 462 :return: status code of the hook. 0 for success.
463 463 :rtype: int
464 464 """
465 465 if 'pull' not in extras['hooks']:
466 466 return HookResponse(0, '')
467 467
468 468 stdout = io.BytesIO()
469 469 try:
470 470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 471 except Exception as error:
472 472 status = 128
473 473 stdout.write('ERROR: %s\n' % error)
474 474
475 475 return HookResponse(status, stdout.getvalue())
476 476
477 477
478 478 def _parse_git_ref_lines(revision_lines):
479 479 rev_data = []
480 480 for revision_line in revision_lines or []:
481 481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 482 ref_data = ref.split('/', 2)
483 483 if ref_data[1] in ('tags', 'heads'):
484 484 rev_data.append({
485 485 # NOTE(marcink):
486 486 # we're unable to tell total_commits for git at this point
487 487 # but we set the variable for consistency with GIT
488 488 'total_commits': -1,
489 489 'old_rev': old_rev,
490 490 'new_rev': new_rev,
491 491 'ref': ref,
492 492 'type': ref_data[1],
493 493 'name': ref_data[2],
494 494 })
495 495 return rev_data
496 496
497 497
498 498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 499 """
500 500 Pre push hook.
501 501
502 502 :param extras: dictionary containing the keys defined in simplevcs
503 503 :type extras: dict
504 504
505 505 :return: status code of the hook. 0 for success.
506 506 :rtype: int
507 507 """
508 508 extras = json.loads(env['RC_SCM_DATA'])
509 509 rev_data = _parse_git_ref_lines(revision_lines)
510 510 if 'push' not in extras['hooks']:
511 511 return 0
512 512 empty_commit_id = '0' * 40
513 513
514 514 detect_force_push = extras.get('detect_force_push')
515 515
516 516 for push_ref in rev_data:
517 517 # store our git-env which holds the temp store
518 518 push_ref['git_env'] = _get_git_env()
519 519 push_ref['pruned_sha'] = ''
520 520 if not detect_force_push:
521 521 # don't check for forced-push when we don't need to
522 522 continue
523 523
524 524 type_ = push_ref['type']
525 525 new_branch = push_ref['old_rev'] == empty_commit_id
526 526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 527 if type_ == 'heads' and not (new_branch or delete_branch):
528 528 old_rev = push_ref['old_rev']
529 529 new_rev = push_ref['new_rev']
530 530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 531 stdout, stderr = subprocessio.run_command(
532 532 cmd, env=os.environ.copy())
533 533 # means we're having some non-reachable objects, this forced push was used
534 534 if stdout:
535 535 push_ref['pruned_sha'] = stdout.splitlines()
536 536
537 537 extras['hook_type'] = 'pre_receive'
538 538 extras['commit_ids'] = rev_data
539 539 return _call_hook('pre_push', extras, GitMessageWriter())
540 540
541 541
542 542 def git_post_receive(unused_repo_path, revision_lines, env):
543 543 """
544 544 Post push hook.
545 545
546 546 :param extras: dictionary containing the keys defined in simplevcs
547 547 :type extras: dict
548 548
549 549 :return: status code of the hook. 0 for success.
550 550 :rtype: int
551 551 """
552 552 extras = json.loads(env['RC_SCM_DATA'])
553 553 if 'push' not in extras['hooks']:
554 554 return 0
555 555
556 556 rev_data = _parse_git_ref_lines(revision_lines)
557 557
558 558 git_revs = []
559 559
560 560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 561 # subcommand sets the PATH environment variable so that it point to the
562 562 # correct version of the git executable.
563 563 empty_commit_id = '0' * 40
564 564 branches = []
565 565 tags = []
566 566 for push_ref in rev_data:
567 567 type_ = push_ref['type']
568 568
569 569 if type_ == 'heads':
570 570 if push_ref['old_rev'] == empty_commit_id:
571 571 # starting new branch case
572 572 if push_ref['name'] not in branches:
573 573 branches.append(push_ref['name'])
574 574
575 575 # Fix up head revision if needed
576 576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 577 try:
578 578 subprocessio.run_command(cmd, env=os.environ.copy())
579 579 except Exception:
580 580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
581 581 'refs/heads/%s' % push_ref['name']]
582 582 print("Setting default branch to %s" % push_ref['name'])
583 583 subprocessio.run_command(cmd, env=os.environ.copy())
584 584
585 585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 586 '--format=%(refname)', 'refs/heads/*']
587 587 stdout, stderr = subprocessio.run_command(
588 588 cmd, env=os.environ.copy())
589 589 heads = stdout
590 590 heads = heads.replace(push_ref['ref'], '')
591 591 heads = ' '.join(head for head
592 592 in heads.splitlines() if head) or '.'
593 593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 595 '--not', heads]
596 596 stdout, stderr = subprocessio.run_command(
597 597 cmd, env=os.environ.copy())
598 598 git_revs.extend(stdout.splitlines())
599 599 elif push_ref['new_rev'] == empty_commit_id:
600 600 # delete branch case
601 601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 602 else:
603 603 if push_ref['name'] not in branches:
604 604 branches.append(push_ref['name'])
605 605
606 606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 607 '{old_rev}..{new_rev}'.format(**push_ref),
608 608 '--reverse', '--pretty=format:%H']
609 609 stdout, stderr = subprocessio.run_command(
610 610 cmd, env=os.environ.copy())
611 611 git_revs.extend(stdout.splitlines())
612 612 elif type_ == 'tags':
613 613 if push_ref['name'] not in tags:
614 614 tags.append(push_ref['name'])
615 615 git_revs.append('tag=>%s' % push_ref['name'])
616 616
617 617 extras['hook_type'] = 'post_receive'
618 618 extras['commit_ids'] = git_revs
619 619 extras['new_refs'] = {
620 620 'branches': branches,
621 621 'bookmarks': [],
622 622 'tags': tags,
623 623 }
624 624
625 625 if 'repo_size' in extras['hooks']:
626 626 try:
627 627 _call_hook('repo_size', extras, GitMessageWriter())
628 628 except:
629 629 pass
630 630
631 631 return _call_hook('post_push', extras, GitMessageWriter())
632 632
633 633
634 634 def _get_extras_from_txn_id(path, txn_id):
635 635 extras = {}
636 636 try:
637 637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 638 '-t', txn_id,
639 639 '--revprop', path, 'rc-scm-extras']
640 640 stdout, stderr = subprocessio.run_command(
641 641 cmd, env=os.environ.copy())
642 642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 643 except Exception:
644 644 log.exception('Failed to extract extras info from txn_id')
645 645
646 646 return extras
647 647
648 648
649 649 def _get_extras_from_commit_id(commit_id, path):
650 650 extras = {}
651 651 try:
652 652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 653 '-r', commit_id,
654 654 '--revprop', path, 'rc-scm-extras']
655 655 stdout, stderr = subprocessio.run_command(
656 656 cmd, env=os.environ.copy())
657 657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 658 except Exception:
659 659 log.exception('Failed to extract extras info from commit_id')
660 660
661 661 return extras
662 662
663 663
664 664 def svn_pre_commit(repo_path, commit_data, env):
665 665 path, txn_id = commit_data
666 666 branches = []
667 667 tags = []
668 668
669 669 if env.get('RC_SCM_DATA'):
670 670 extras = json.loads(env['RC_SCM_DATA'])
671 671 else:
672 672 # fallback method to read from TXN-ID stored data
673 673 extras = _get_extras_from_txn_id(path, txn_id)
674 674 if not extras:
675 675 return 0
676 676
677 677 extras['hook_type'] = 'pre_commit'
678 678 extras['commit_ids'] = [txn_id]
679 679 extras['txn_id'] = txn_id
680 680 extras['new_refs'] = {
681 681 'total_commits': 1,
682 682 'branches': branches,
683 683 'bookmarks': [],
684 684 'tags': tags,
685 685 }
686 686
687 687 return _call_hook('pre_push', extras, SvnMessageWriter())
688 688
689 689
690 690 def svn_post_commit(repo_path, commit_data, env):
691 691 """
692 692 commit_data is path, rev, txn_id
693 693 """
694 694 if len(commit_data) == 3:
695 695 path, commit_id, txn_id = commit_data
696 696 elif len(commit_data) == 2:
697 697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 698 'Some functionality might be limited')
699 699 path, commit_id = commit_data
700 700 txn_id = None
701 701
702 702 branches = []
703 703 tags = []
704 704
705 705 if env.get('RC_SCM_DATA'):
706 706 extras = json.loads(env['RC_SCM_DATA'])
707 707 else:
708 708 # fallback method to read from TXN-ID stored data
709 709 extras = _get_extras_from_commit_id(commit_id, path)
710 710 if not extras:
711 711 return 0
712 712
713 713 extras['hook_type'] = 'post_commit'
714 714 extras['commit_ids'] = [commit_id]
715 715 extras['txn_id'] = txn_id
716 716 extras['new_refs'] = {
717 717 'branches': branches,
718 718 'bookmarks': [],
719 719 'tags': tags,
720 720 'total_commits': 1,
721 721 }
722 722
723 723 if 'repo_size' in extras['hooks']:
724 724 try:
725 725 _call_hook('repo_size', extras, SvnMessageWriter())
726 726 except Exception:
727 727 pass
728 728
729 729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,705 +1,705 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import base64
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import wsgiref.util
25 25 import traceback
26 26 import tempfile
27 27 import psutil
28 28 from itertools import chain
29 29 from cStringIO import StringIO
30 30
31 31 import simplejson as json
32 32 import msgpack
33 33 from pyramid.config import Configurator
34 34 from pyramid.settings import asbool, aslist
35 35 from pyramid.wsgi import wsgiapp
36 36 from pyramid.compat import configparser
37 37 from pyramid.response import Response
38 38
39 39 from vcsserver.utils import safe_int
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
44 44 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
45 45
46 46 try:
47 47 locale.setlocale(locale.LC_ALL, '')
48 48 except locale.Error as e:
49 49 log.error(
50 50 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
51 51 os.environ['LC_ALL'] = 'C'
52 52
53 53 import vcsserver
54 54 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
55 55 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
56 56 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
57 57 from vcsserver.echo_stub.echo_app import EchoApp
58 58 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
59 59 from vcsserver.lib.exc_tracking import store_exception
60 60 from vcsserver.server import VcsServer
61 61
62 62 try:
63 63 from vcsserver.git import GitFactory, GitRemote
64 64 except ImportError:
65 65 GitFactory = None
66 66 GitRemote = None
67 67
68 68 try:
69 69 from vcsserver.hg import MercurialFactory, HgRemote
70 70 except ImportError:
71 71 MercurialFactory = None
72 72 HgRemote = None
73 73
74 74 try:
75 75 from vcsserver.svn import SubversionFactory, SvnRemote
76 76 except ImportError:
77 77 SubversionFactory = None
78 78 SvnRemote = None
79 79
80 80
81 81 def _is_request_chunked(environ):
82 82 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
83 83 return stream
84 84
85 85
86 86 def _int_setting(settings, name, default):
87 87 settings[name] = int(settings.get(name, default))
88 88 return settings[name]
89 89
90 90
91 91 def _bool_setting(settings, name, default):
92 92 input_val = settings.get(name, default)
93 93 if isinstance(input_val, unicode):
94 94 input_val = input_val.encode('utf8')
95 95 settings[name] = asbool(input_val)
96 96 return settings[name]
97 97
98 98
99 99 def _list_setting(settings, name, default):
100 100 raw_value = settings.get(name, default)
101 101
102 102 # Otherwise we assume it uses pyramids space/newline separation.
103 103 settings[name] = aslist(raw_value)
104 104 return settings[name]
105 105
106 106
107 107 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
108 108 value = settings.get(name, default)
109 109
110 110 if default_when_empty and not value:
111 111 # use default value when value is empty
112 112 value = default
113 113
114 114 if lower:
115 115 value = value.lower()
116 116 settings[name] = value
117 117 return settings[name]
118 118
119 119
120 120 def log_max_fd():
121 121 try:
122 122 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
123 123 log.info('Max file descriptors value: %s', maxfd)
124 124 except Exception:
125 125 pass
126 126
127 127
128 128 class VCS(object):
129 129 def __init__(self, locale_conf=None, cache_config=None):
130 130 self.locale = locale_conf
131 131 self.cache_config = cache_config
132 132 self._configure_locale()
133 133
134 134 log_max_fd()
135 135
136 136 if GitFactory and GitRemote:
137 137 git_factory = GitFactory()
138 138 self._git_remote = GitRemote(git_factory)
139 139 else:
140 140 log.info("Git client import failed")
141 141
142 142 if MercurialFactory and HgRemote:
143 143 hg_factory = MercurialFactory()
144 144 self._hg_remote = HgRemote(hg_factory)
145 145 else:
146 146 log.info("Mercurial client import failed")
147 147
148 148 if SubversionFactory and SvnRemote:
149 149 svn_factory = SubversionFactory()
150 150
151 151 # hg factory is used for svn url validation
152 152 hg_factory = MercurialFactory()
153 153 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
154 154 else:
155 155 log.info("Subversion client import failed")
156 156
157 157 self._vcsserver = VcsServer()
158 158
159 159 def _configure_locale(self):
160 160 if self.locale:
161 161 log.info('Settings locale: `LC_ALL` to %s', self.locale)
162 162 else:
163 163 log.info(
164 164 'Configuring locale subsystem based on environment variables')
165 165 try:
166 166 # If self.locale is the empty string, then the locale
167 167 # module will use the environment variables. See the
168 168 # documentation of the package `locale`.
169 169 locale.setlocale(locale.LC_ALL, self.locale)
170 170
171 171 language_code, encoding = locale.getlocale()
172 172 log.info(
173 173 'Locale set to language code "%s" with encoding "%s".',
174 174 language_code, encoding)
175 175 except locale.Error:
176 176 log.exception(
177 177 'Cannot set locale, not configuring the locale system')
178 178
179 179
180 180 class WsgiProxy(object):
181 181 def __init__(self, wsgi):
182 182 self.wsgi = wsgi
183 183
184 184 def __call__(self, environ, start_response):
185 185 input_data = environ['wsgi.input'].read()
186 186 input_data = msgpack.unpackb(input_data)
187 187
188 188 error = None
189 189 try:
190 190 data, status, headers = self.wsgi.handle(
191 191 input_data['environment'], input_data['input_data'],
192 192 *input_data['args'], **input_data['kwargs'])
193 193 except Exception as e:
194 194 data, status, headers = [], None, None
195 195 error = {
196 196 'message': str(e),
197 197 '_vcs_kind': getattr(e, '_vcs_kind', None)
198 198 }
199 199
200 200 start_response(200, {})
201 201 return self._iterator(error, status, headers, data)
202 202
203 203 def _iterator(self, error, status, headers, data):
204 204 initial_data = [
205 205 error,
206 206 status,
207 207 headers,
208 208 ]
209 209
210 210 for d in chain(initial_data, data):
211 211 yield msgpack.packb(d)
212 212
213 213
214 214 def not_found(request):
215 215 return {'status': '404 NOT FOUND'}
216 216
217 217
218 218 class VCSViewPredicate(object):
219 219 def __init__(self, val, config):
220 220 self.remotes = val
221 221
222 222 def text(self):
223 223 return 'vcs view method = %s' % (self.remotes.keys(),)
224 224
225 225 phash = text
226 226
227 227 def __call__(self, context, request):
228 228 """
229 229 View predicate that returns true if given backend is supported by
230 230 defined remotes.
231 231 """
232 232 backend = request.matchdict.get('backend')
233 233 return backend in self.remotes
234 234
235 235
236 236 class HTTPApplication(object):
237 237 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
238 238
239 239 remote_wsgi = remote_wsgi
240 240 _use_echo_app = False
241 241
242 242 def __init__(self, settings=None, global_config=None):
243 243 self._sanitize_settings_and_apply_defaults(settings)
244 244
245 245 self.config = Configurator(settings=settings)
246 246 self.global_config = global_config
247 247 self.config.include('vcsserver.lib.rc_cache')
248 248
249 249 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
250 250 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
251 251 self._remotes = {
252 252 'hg': vcs._hg_remote,
253 253 'git': vcs._git_remote,
254 254 'svn': vcs._svn_remote,
255 255 'server': vcs._vcsserver,
256 256 }
257 257 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
258 258 self._use_echo_app = True
259 259 log.warning("Using EchoApp for VCS operations.")
260 260 self.remote_wsgi = remote_wsgi_stub
261 261
262 262 self._configure_settings(global_config, settings)
263 263
264 264 self._configure()
265 265
266 266 def _configure_settings(self, global_config, app_settings):
267 267 """
268 268 Configure the settings module.
269 269 """
270 270 settings_merged = global_config.copy()
271 271 settings_merged.update(app_settings)
272 272
273 273 git_path = app_settings.get('git_path', None)
274 274 if git_path:
275 275 settings.GIT_EXECUTABLE = git_path
276 276 binary_dir = app_settings.get('core.binary_dir', None)
277 277 if binary_dir:
278 278 settings.BINARY_DIR = binary_dir
279 279
280 280 # Store the settings to make them available to other modules.
281 281 vcsserver.PYRAMID_SETTINGS = settings_merged
282 282 vcsserver.CONFIG = settings_merged
283 283
284 284 def _sanitize_settings_and_apply_defaults(self, settings):
285 285 temp_store = tempfile.gettempdir()
286 286 default_cache_dir = os.path.join(temp_store, 'rc_cache')
287 287
288 288 # save default, cache dir, and use it for all backends later.
289 289 default_cache_dir = _string_setting(
290 290 settings,
291 291 'cache_dir',
292 292 default_cache_dir, lower=False, default_when_empty=True)
293 293
294 294 # ensure we have our dir created
295 295 if not os.path.isdir(default_cache_dir):
296 296 os.makedirs(default_cache_dir, mode=0o755)
297 297
298 298 # exception store cache
299 299 _string_setting(
300 300 settings,
301 301 'exception_tracker.store_path',
302 302 temp_store, lower=False, default_when_empty=True)
303 303
304 304 # repo_object cache
305 305 _string_setting(
306 306 settings,
307 307 'rc_cache.repo_object.backend',
308 308 'dogpile.cache.rc.file_namespace', lower=False)
309 309 _int_setting(
310 310 settings,
311 311 'rc_cache.repo_object.expiration_time',
312 312 30 * 24 * 60 * 60)
313 313 _string_setting(
314 314 settings,
315 315 'rc_cache.repo_object.arguments.filename',
316 316 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
317 317
318 318 def _configure(self):
319 319 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
320 320
321 321 self.config.add_route('service', '/_service')
322 322 self.config.add_route('status', '/status')
323 323 self.config.add_route('hg_proxy', '/proxy/hg')
324 324 self.config.add_route('git_proxy', '/proxy/git')
325 325
326 326 # rpc methods
327 327 self.config.add_route('vcs', '/{backend}')
328 328
329 329 # streaming rpc remote methods
330 330 self.config.add_route('vcs_stream', '/{backend}/stream')
331 331
332 332 # vcs operations clone/push as streaming
333 333 self.config.add_route('stream_git', '/stream/git/*repo_name')
334 334 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
335 335
336 336 self.config.add_view(self.status_view, route_name='status', renderer='json')
337 337 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
338 338
339 339 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
340 340 self.config.add_view(self.git_proxy(), route_name='git_proxy')
341 341 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
342 342 vcs_view=self._remotes)
343 343 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
344 344 vcs_view=self._remotes)
345 345
346 346 self.config.add_view(self.hg_stream(), route_name='stream_hg')
347 347 self.config.add_view(self.git_stream(), route_name='stream_git')
348 348
349 349 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
350 350
351 351 self.config.add_notfound_view(not_found, renderer='json')
352 352
353 353 self.config.add_view(self.handle_vcs_exception, context=Exception)
354 354
355 355 self.config.add_tween(
356 356 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
357 357 )
358 358 self.config.add_request_method(
359 359 'vcsserver.lib.request_counter.get_request_counter',
360 360 'request_count')
361 361
362 362 self.config.add_request_method(
363 363 'vcsserver.lib._vendor.statsd.get_statsd_client',
364 364 'statsd', reify=True)
365 365
366 366 def wsgi_app(self):
367 367 return self.config.make_wsgi_app()
368 368
369 369 def _vcs_view_params(self, request):
370 370 remote = self._remotes[request.matchdict['backend']]
371 371 payload = msgpack.unpackb(request.body, use_list=True)
372 372 method = payload.get('method')
373 373 params = payload['params']
374 374 wire = params.get('wire')
375 375 args = params.get('args')
376 376 kwargs = params.get('kwargs')
377 377 context_uid = None
378 378
379 379 if wire:
380 380 try:
381 381 wire['context'] = context_uid = uuid.UUID(wire['context'])
382 382 except KeyError:
383 383 pass
384 384 args.insert(0, wire)
385 385 repo_state_uid = wire.get('repo_state_uid') if wire else None
386 386
387 387 # NOTE(marcink): trading complexity for slight performance
388 388 if log.isEnabledFor(logging.DEBUG):
389 389 no_args_methods = [
390 390
391 391 ]
392 392 if method in no_args_methods:
393 393 call_args = ''
394 394 else:
395 395 call_args = args[1:]
396 396
397 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
397 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
398 398 method, call_args, kwargs, context_uid, repo_state_uid)
399 399
400 400 return payload, remote, method, args, kwargs
401 401
402 402 def vcs_view(self, request):
403 403
404 404 payload, remote, method, args, kwargs = self._vcs_view_params(request)
405 405 payload_id = payload.get('id')
406 406
407 407 try:
408 408 resp = getattr(remote, method)(*args, **kwargs)
409 409 except Exception as e:
410 410 exc_info = list(sys.exc_info())
411 411 exc_type, exc_value, exc_traceback = exc_info
412 412
413 413 org_exc = getattr(e, '_org_exc', None)
414 414 org_exc_name = None
415 415 org_exc_tb = ''
416 416 if org_exc:
417 417 org_exc_name = org_exc.__class__.__name__
418 418 org_exc_tb = getattr(e, '_org_exc_tb', '')
419 419 # replace our "faked" exception with our org
420 420 exc_info[0] = org_exc.__class__
421 421 exc_info[1] = org_exc
422 422
423 423 should_store_exc = True
424 424 if org_exc:
425 425 def get_exc_fqn(_exc_obj):
426 426 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
427 427 return module_name + '.' + org_exc_name
428 428
429 429 exc_fqn = get_exc_fqn(org_exc)
430 430
431 431 if exc_fqn in ['mercurial.error.RepoLookupError',
432 432 'vcsserver.exceptions.RefNotFoundException']:
433 433 should_store_exc = False
434 434
435 435 if should_store_exc:
436 436 store_exception(id(exc_info), exc_info, request_path=request.path)
437 437
438 438 tb_info = ''.join(
439 439 traceback.format_exception(exc_type, exc_value, exc_traceback))
440 440
441 441 type_ = e.__class__.__name__
442 442 if type_ not in self.ALLOWED_EXCEPTIONS:
443 443 type_ = None
444 444
445 445 resp = {
446 446 'id': payload_id,
447 447 'error': {
448 448 'message': e.message,
449 449 'traceback': tb_info,
450 450 'org_exc': org_exc_name,
451 451 'org_exc_tb': org_exc_tb,
452 452 'type': type_
453 453 }
454 454 }
455 455
456 456 try:
457 457 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
458 458 except AttributeError:
459 459 pass
460 460 else:
461 461 resp = {
462 462 'id': payload_id,
463 463 'result': resp
464 464 }
465 465
466 466 return resp
467 467
468 468 def vcs_stream_view(self, request):
469 469 payload, remote, method, args, kwargs = self._vcs_view_params(request)
470 470 # this method has a stream: marker we remove it here
471 471 method = method.split('stream:')[-1]
472 472 chunk_size = safe_int(payload.get('chunk_size')) or 4096
473 473
474 474 try:
475 475 resp = getattr(remote, method)(*args, **kwargs)
476 476 except Exception as e:
477 477 raise
478 478
479 479 def get_chunked_data(method_resp):
480 480 stream = StringIO(method_resp)
481 481 while 1:
482 482 chunk = stream.read(chunk_size)
483 483 if not chunk:
484 484 break
485 485 yield chunk
486 486
487 487 response = Response(app_iter=get_chunked_data(resp))
488 488 response.content_type = 'application/octet-stream'
489 489
490 490 return response
491 491
492 492 def status_view(self, request):
493 493 import vcsserver
494 494 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
495 495 'pid': os.getpid()}
496 496
497 497 def service_view(self, request):
498 498 import vcsserver
499 499
500 500 payload = msgpack.unpackb(request.body, use_list=True)
501 501 server_config, app_config = {}, {}
502 502
503 503 try:
504 504 path = self.global_config['__file__']
505 505 config = configparser.RawConfigParser()
506 506
507 507 config.read(path)
508 508
509 509 if config.has_section('server:main'):
510 510 server_config = dict(config.items('server:main'))
511 511 if config.has_section('app:main'):
512 512 app_config = dict(config.items('app:main'))
513 513
514 514 except Exception:
515 515 log.exception('Failed to read .ini file for display')
516 516
517 517 environ = os.environ.items()
518 518
519 519 resp = {
520 520 'id': payload.get('id'),
521 521 'result': dict(
522 522 version=vcsserver.__version__,
523 523 config=server_config,
524 524 app_config=app_config,
525 525 environ=environ,
526 526 payload=payload,
527 527 )
528 528 }
529 529 return resp
530 530
531 531 def _msgpack_renderer_factory(self, info):
532 532 def _render(value, system):
533 533 request = system.get('request')
534 534 if request is not None:
535 535 response = request.response
536 536 ct = response.content_type
537 537 if ct == response.default_content_type:
538 538 response.content_type = 'application/x-msgpack'
539 539 return msgpack.packb(value)
540 540 return _render
541 541
542 542 def set_env_from_config(self, environ, config):
543 543 dict_conf = {}
544 544 try:
545 545 for elem in config:
546 546 if elem[0] == 'rhodecode':
547 547 dict_conf = json.loads(elem[2])
548 548 break
549 549 except Exception:
550 550 log.exception('Failed to fetch SCM CONFIG')
551 551 return
552 552
553 553 username = dict_conf.get('username')
554 554 if username:
555 555 environ['REMOTE_USER'] = username
556 556 # mercurial specific, some extension api rely on this
557 557 environ['HGUSER'] = username
558 558
559 559 ip = dict_conf.get('ip')
560 560 if ip:
561 561 environ['REMOTE_HOST'] = ip
562 562
563 563 if _is_request_chunked(environ):
564 564 # set the compatibility flag for webob
565 565 environ['wsgi.input_terminated'] = True
566 566
567 567 def hg_proxy(self):
568 568 @wsgiapp
569 569 def _hg_proxy(environ, start_response):
570 570 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
571 571 return app(environ, start_response)
572 572 return _hg_proxy
573 573
574 574 def git_proxy(self):
575 575 @wsgiapp
576 576 def _git_proxy(environ, start_response):
577 577 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
578 578 return app(environ, start_response)
579 579 return _git_proxy
580 580
581 581 def hg_stream(self):
582 582 if self._use_echo_app:
583 583 @wsgiapp
584 584 def _hg_stream(environ, start_response):
585 585 app = EchoApp('fake_path', 'fake_name', None)
586 586 return app(environ, start_response)
587 587 return _hg_stream
588 588 else:
589 589 @wsgiapp
590 590 def _hg_stream(environ, start_response):
591 591 log.debug('http-app: handling hg stream')
592 592 repo_path = environ['HTTP_X_RC_REPO_PATH']
593 593 repo_name = environ['HTTP_X_RC_REPO_NAME']
594 594 packed_config = base64.b64decode(
595 595 environ['HTTP_X_RC_REPO_CONFIG'])
596 596 config = msgpack.unpackb(packed_config)
597 597 app = scm_app.create_hg_wsgi_app(
598 598 repo_path, repo_name, config)
599 599
600 600 # Consistent path information for hgweb
601 601 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
602 602 environ['REPO_NAME'] = repo_name
603 603 self.set_env_from_config(environ, config)
604 604
605 605 log.debug('http-app: starting app handler '
606 606 'with %s and process request', app)
607 607 return app(environ, ResponseFilter(start_response))
608 608 return _hg_stream
609 609
610 610 def git_stream(self):
611 611 if self._use_echo_app:
612 612 @wsgiapp
613 613 def _git_stream(environ, start_response):
614 614 app = EchoApp('fake_path', 'fake_name', None)
615 615 return app(environ, start_response)
616 616 return _git_stream
617 617 else:
618 618 @wsgiapp
619 619 def _git_stream(environ, start_response):
620 620 log.debug('http-app: handling git stream')
621 621 repo_path = environ['HTTP_X_RC_REPO_PATH']
622 622 repo_name = environ['HTTP_X_RC_REPO_NAME']
623 623 packed_config = base64.b64decode(
624 624 environ['HTTP_X_RC_REPO_CONFIG'])
625 625 config = msgpack.unpackb(packed_config)
626 626
627 627 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
628 628 self.set_env_from_config(environ, config)
629 629
630 630 content_type = environ.get('CONTENT_TYPE', '')
631 631
632 632 path = environ['PATH_INFO']
633 633 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
634 634 log.debug(
635 635 'LFS: Detecting if request `%s` is LFS server path based '
636 636 'on content type:`%s`, is_lfs:%s',
637 637 path, content_type, is_lfs_request)
638 638
639 639 if not is_lfs_request:
640 640 # fallback detection by path
641 641 if GIT_LFS_PROTO_PAT.match(path):
642 642 is_lfs_request = True
643 643 log.debug(
644 644 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
645 645 path, is_lfs_request)
646 646
647 647 if is_lfs_request:
648 648 app = scm_app.create_git_lfs_wsgi_app(
649 649 repo_path, repo_name, config)
650 650 else:
651 651 app = scm_app.create_git_wsgi_app(
652 652 repo_path, repo_name, config)
653 653
654 654 log.debug('http-app: starting app handler '
655 655 'with %s and process request', app)
656 656
657 657 return app(environ, start_response)
658 658
659 659 return _git_stream
660 660
661 661 def handle_vcs_exception(self, exception, request):
662 662 _vcs_kind = getattr(exception, '_vcs_kind', '')
663 663 if _vcs_kind == 'repo_locked':
664 664 # Get custom repo-locked status code if present.
665 665 status_code = request.headers.get('X-RC-Locked-Status-Code')
666 666 return HTTPRepoLocked(
667 667 title=exception.message, status_code=status_code)
668 668
669 669 elif _vcs_kind == 'repo_branch_protected':
670 670 # Get custom repo-branch-protected status code if present.
671 671 return HTTPRepoBranchProtected(title=exception.message)
672 672
673 673 exc_info = request.exc_info
674 674 store_exception(id(exc_info), exc_info)
675 675
676 676 traceback_info = 'unavailable'
677 677 if request.exc_info:
678 678 exc_type, exc_value, exc_tb = request.exc_info
679 679 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
680 680
681 681 log.error(
682 682 'error occurred handling this request for path: %s, \n tb: %s',
683 683 request.path, traceback_info)
684 684 raise exception
685 685
686 686
687 687 class ResponseFilter(object):
688 688
689 689 def __init__(self, start_response):
690 690 self._start_response = start_response
691 691
692 692 def __call__(self, status, response_headers, exc_info=None):
693 693 headers = tuple(
694 694 (h, v) for h, v in response_headers
695 695 if not wsgiref.util.is_hop_by_hop(h))
696 696 return self._start_response(status, headers, exc_info)
697 697
698 698
699 699 def main(global_config, **settings):
700 700 if MercurialFactory:
701 701 hgpatches.patch_largefiles_capabilities()
702 702 hgpatches.patch_subrepo_type_mapping()
703 703
704 704 app = HTTPApplication(settings=settings, global_config=global_config)
705 705 return app.wsgi_app()
@@ -1,77 +1,79 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 from dogpile.cache import register_backend
20 20
21 21 register_backend(
22 22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 23 "LRUMemoryBackend")
24 24
25 25 register_backend(
26 26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 27 "FileNamespaceBackend")
28 28
29 29 register_backend(
30 30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 31 "RedisPickleBackend")
32 32
33 33 register_backend(
34 34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 35 "RedisMsgPackBackend")
36 36
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40 from . import region_meta
41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
41 from .utils import (
42 get_default_cache_settings, backend_key_generator, get_or_create_region,
43 clear_cache_namespace, make_region)
42 44
43 45
44 46 def configure_dogpile_cache(settings):
45 47 cache_dir = settings.get('cache_dir')
46 48 if cache_dir:
47 49 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
48 50
49 51 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
50 52
51 53 # inspect available namespaces
52 54 avail_regions = set()
53 55 for key in rc_cache_data.keys():
54 56 namespace_name = key.split('.', 1)[0]
55 57 if namespace_name in avail_regions:
56 58 continue
57 59
58 60 avail_regions.add(namespace_name)
59 61 log.debug('dogpile: found following cache regions: %s', namespace_name)
60 62
61 63 new_region = make_region(
62 64 name=namespace_name,
63 65 function_key_generator=None
64 66 )
65 67
66 68 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(namespace_name))
67 69 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
68 70 if log.isEnabledFor(logging.DEBUG):
69 71 region_args = dict(backend=new_region.actual_backend.__class__,
70 72 region_invalidator=new_region.region_invalidator.__class__)
71 73 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
72 74
73 75 region_meta.dogpile_cache_regions[namespace_name] = new_region
74 76
75 77
76 78 def includeme(config):
77 79 configure_dogpile_cache(config.registry.settings)
@@ -1,328 +1,329 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import time
19 19 import errno
20 20 import logging
21 21
22 22 import msgpack
23 23 import redis
24 24
25 25 from dogpile.cache.api import CachedValue
26 26 from dogpile.cache.backends import memory as memory_backend
27 27 from dogpile.cache.backends import file as file_backend
28 28 from dogpile.cache.backends import redis as redis_backend
29 29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 30 from dogpile.cache.util import memoized_property
31 31
32 32 from pyramid.settings import asbool
33 33
34 34 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
35 from vcsserver.utils import safe_str
35 36
36 37
37 38 _default_max_size = 1024
38 39
39 40 log = logging.getLogger(__name__)
40 41
41 42
42 43 class LRUMemoryBackend(memory_backend.MemoryBackend):
43 44 key_prefix = 'lru_mem_backend'
44 45 pickle_values = False
45 46
46 47 def __init__(self, arguments):
47 48 max_size = arguments.pop('max_size', _default_max_size)
48 49
49 50 LRUDictClass = LRUDict
50 51 if arguments.pop('log_key_count', None):
51 52 LRUDictClass = LRUDictDebug
52 53
53 54 arguments['cache_dict'] = LRUDictClass(max_size)
54 55 super(LRUMemoryBackend, self).__init__(arguments)
55 56
56 57 def delete(self, key):
57 58 try:
58 59 del self._cache[key]
59 60 except KeyError:
60 61 # we don't care if key isn't there at deletion
61 62 pass
62 63
63 64 def delete_multi(self, keys):
64 65 for key in keys:
65 66 self.delete(key)
66 67
67 68
68 69 class PickleSerializer(object):
69 70
70 71 def _dumps(self, value, safe=False):
71 72 try:
72 73 return compat.pickle.dumps(value)
73 74 except Exception:
74 75 if safe:
75 76 return NO_VALUE
76 77 else:
77 78 raise
78 79
79 80 def _loads(self, value, safe=True):
80 81 try:
81 82 return compat.pickle.loads(value)
82 83 except Exception:
83 84 if safe:
84 85 return NO_VALUE
85 86 else:
86 87 raise
87 88
88 89
89 90 class MsgPackSerializer(object):
90 91
91 92 def _dumps(self, value, safe=False):
92 93 try:
93 94 return msgpack.packb(value)
94 95 except Exception:
95 96 if safe:
96 97 return NO_VALUE
97 98 else:
98 99 raise
99 100
100 101 def _loads(self, value, safe=True):
101 102 """
102 103 pickle maintained the `CachedValue` wrapper of the tuple
103 104 msgpack does not, so it must be added back in.
104 105 """
105 106 try:
106 107 value = msgpack.unpackb(value, use_list=False)
107 108 return CachedValue(*value)
108 109 except Exception:
109 110 if safe:
110 111 return NO_VALUE
111 112 else:
112 113 raise
113 114
114 115
115 116 import fcntl
116 117 flock_org = fcntl.flock
117 118
118 119
119 120 class CustomLockFactory(FileLock):
120 121
121 122 pass
122 123
123 124
124 125 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
125 126 key_prefix = 'file_backend'
126 127
127 128 def __init__(self, arguments):
128 129 arguments['lock_factory'] = CustomLockFactory
129 130 db_file = arguments.get('filename')
130 131
131 132 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
132 133 try:
133 134 super(FileNamespaceBackend, self).__init__(arguments)
134 135 except Exception:
135 136 log.error('Failed to initialize db at: %s', db_file)
136 137 raise
137 138
138 139 def __repr__(self):
139 140 return '{} `{}`'.format(self.__class__, self.filename)
140 141
141 142 def list_keys(self, prefix=''):
142 143 prefix = '{}:{}'.format(self.key_prefix, prefix)
143 144
144 145 def cond(v):
145 146 if not prefix:
146 147 return True
147 148
148 149 if v.startswith(prefix):
149 150 return True
150 151 return False
151 152
152 153 with self._dbm_file(True) as dbm:
153 154 try:
154 155 return filter(cond, dbm.keys())
155 156 except Exception:
156 157 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
157 158 raise
158 159
159 160 def get_store(self):
160 161 return self.filename
161 162
162 163 def _dbm_get(self, key):
163 164 with self._dbm_file(False) as dbm:
164 165 if hasattr(dbm, 'get'):
165 166 value = dbm.get(key, NO_VALUE)
166 167 else:
167 168 # gdbm objects lack a .get method
168 169 try:
169 170 value = dbm[key]
170 171 except KeyError:
171 172 value = NO_VALUE
172 173 if value is not NO_VALUE:
173 174 value = self._loads(value)
174 175 return value
175 176
176 177 def get(self, key):
177 178 try:
178 179 return self._dbm_get(key)
179 180 except Exception:
180 181 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
181 182 raise
182 183
183 184 def set(self, key, value):
184 185 with self._dbm_file(True) as dbm:
185 186 dbm[key] = self._dumps(value)
186 187
187 188 def set_multi(self, mapping):
188 189 with self._dbm_file(True) as dbm:
189 190 for key, value in mapping.items():
190 191 dbm[key] = self._dumps(value)
191 192
192 193
193 194 class BaseRedisBackend(redis_backend.RedisBackend):
194 195 key_prefix = ''
195 196
196 197 def __init__(self, arguments):
197 198 super(BaseRedisBackend, self).__init__(arguments)
198 199 self._lock_timeout = self.lock_timeout
199 200 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
200 201
201 202 if self._lock_auto_renewal and not self._lock_timeout:
202 203 # set default timeout for auto_renewal
203 204 self._lock_timeout = 30
204 205
205 206 def _create_client(self):
206 207 args = {}
207 208
208 209 if self.url is not None:
209 210 args.update(url=self.url)
210 211
211 212 else:
212 213 args.update(
213 214 host=self.host, password=self.password,
214 215 port=self.port, db=self.db
215 216 )
216 217
217 218 connection_pool = redis.ConnectionPool(**args)
218 219
219 220 return redis.StrictRedis(connection_pool=connection_pool)
220 221
221 222 def list_keys(self, prefix=''):
222 223 prefix = '{}:{}*'.format(self.key_prefix, prefix)
223 224 return self.client.keys(prefix)
224 225
225 226 def get_store(self):
226 227 return self.client.connection_pool
227 228
228 229 def get(self, key):
229 230 value = self.client.get(key)
230 231 if value is None:
231 232 return NO_VALUE
232 233 return self._loads(value)
233 234
234 235 def get_multi(self, keys):
235 236 if not keys:
236 237 return []
237 238 values = self.client.mget(keys)
238 239 loads = self._loads
239 240 return [
240 241 loads(v) if v is not None else NO_VALUE
241 242 for v in values]
242 243
243 244 def set(self, key, value):
244 245 if self.redis_expiration_time:
245 246 self.client.setex(key, self.redis_expiration_time,
246 247 self._dumps(value))
247 248 else:
248 249 self.client.set(key, self._dumps(value))
249 250
250 251 def set_multi(self, mapping):
251 252 dumps = self._dumps
252 253 mapping = dict(
253 254 (k, dumps(v))
254 255 for k, v in mapping.items()
255 256 )
256 257
257 258 if not self.redis_expiration_time:
258 259 self.client.mset(mapping)
259 260 else:
260 261 pipe = self.client.pipeline()
261 262 for key, value in mapping.items():
262 263 pipe.setex(key, self.redis_expiration_time, value)
263 264 pipe.execute()
264 265
265 266 def get_mutex(self, key):
266 267 if self.distributed_lock:
267 lock_key = redis_backend.u('_lock_{0}').format(key)
268 lock_key = redis_backend.u('_lock_{0}').format(safe_str(key))
268 269 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
269 270 auto_renewal=self._lock_auto_renewal)
270 271 else:
271 272 return None
272 273
273 274
274 275 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
275 276 key_prefix = 'redis_pickle_backend'
276 277 pass
277 278
278 279
279 280 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
280 281 key_prefix = 'redis_msgpack_backend'
281 282 pass
282 283
283 284
284 285 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
285 286 import redis_lock
286 287
287 288 class _RedisLockWrapper(object):
288 289 """LockWrapper for redis_lock"""
289 290
290 291 @classmethod
291 292 def get_lock(cls):
292 293 return redis_lock.Lock(
293 294 redis_client=client,
294 295 name=lock_key,
295 296 expire=lock_timeout,
296 297 auto_renewal=auto_renewal,
297 298 strict=True,
298 299 )
299 300
300 301 def __repr__(self):
301 302 return "{}:{}".format(self.__class__.__name__, lock_key)
302 303
303 304 def __str__(self):
304 305 return "{}:{}".format(self.__class__.__name__, lock_key)
305 306
306 307 def __init__(self):
307 308 self.lock = self.get_lock()
308 309 self.lock_key = lock_key
309 310
310 311 def acquire(self, wait=True):
311 312 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
312 313 try:
313 314 acquired = self.lock.acquire(wait)
314 315 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
315 316 return acquired
316 317 except redis_lock.AlreadyAcquired:
317 318 return False
318 319 except redis_lock.AlreadyStarted:
319 320 # refresh thread exists, but it also means we acquired the lock
320 321 return True
321 322
322 323 def release(self):
323 324 try:
324 325 self.lock.release()
325 326 except redis_lock.NotAcquired:
326 327 pass
327 328
328 329 return _RedisLockWrapper()
@@ -1,158 +1,263 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 import time
19 20 import logging
20 21 import functools
21 import time
22
23 from decorator import decorate
24 22
25 23 from dogpile.cache import CacheRegion
26 24 from dogpile.cache.util import compat
27 25
28 26 from vcsserver.utils import safe_str, sha1
29 27
28 from vcsserver.lib.rc_cache import region_meta
30 29
31 30 log = logging.getLogger(__name__)
32 31
33 32
34 33 class RhodeCodeCacheRegion(CacheRegion):
35 34
36 35 def conditional_cache_on_arguments(
37 36 self, namespace=None,
38 37 expiration_time=None,
39 38 should_cache_fn=None,
40 39 to_str=compat.string_type,
41 40 function_key_generator=None,
42 41 condition=True):
43 42 """
44 43 Custom conditional decorator, that will not touch any dogpile internals if
45 44 condition isn't meet. This works a bit different than should_cache_fn
46 45 And it's faster in cases we don't ever want to compute cached values
47 46 """
48 47 expiration_time_is_callable = compat.callable(expiration_time)
49 48
50 49 if function_key_generator is None:
51 50 function_key_generator = self.function_key_generator
52 51
52 # workaround for py2 and cython problems, this block should be removed
53 # once we've migrated to py3
54 if 'cython' == 'cython':
55 def decorator(fn):
56 if to_str is compat.string_type:
57 # backwards compatible
58 key_generator = function_key_generator(namespace, fn)
59 else:
60 key_generator = function_key_generator(namespace, fn, to_str=to_str)
61
62 @functools.wraps(fn)
63 def decorate(*arg, **kw):
64 key = key_generator(*arg, **kw)
65
66 @functools.wraps(fn)
67 def creator():
68 return fn(*arg, **kw)
69
70 if not condition:
71 return creator()
72
73 timeout = expiration_time() if expiration_time_is_callable \
74 else expiration_time
75
76 return self.get_or_create(key, creator, timeout, should_cache_fn)
77
78 def invalidate(*arg, **kw):
79 key = key_generator(*arg, **kw)
80 self.delete(key)
81
82 def set_(value, *arg, **kw):
83 key = key_generator(*arg, **kw)
84 self.set(key, value)
85
86 def get(*arg, **kw):
87 key = key_generator(*arg, **kw)
88 return self.get(key)
89
90 def refresh(*arg, **kw):
91 key = key_generator(*arg, **kw)
92 value = fn(*arg, **kw)
93 self.set(key, value)
94 return value
95
96 decorate.set = set_
97 decorate.invalidate = invalidate
98 decorate.refresh = refresh
99 decorate.get = get
100 decorate.original = fn
101 decorate.key_generator = key_generator
102 decorate.__wrapped__ = fn
103
104 return decorate
105 return decorator
106
53 107 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
54 108
55 109 if not condition:
56 log.debug('Calling un-cached func:%s', user_func.func_name)
110 log.debug('Calling un-cached method:%s', user_func.func_name)
57 111 start = time.time()
58 112 result = user_func(*arg, **kw)
59 113 total = time.time() - start
60 log.debug('un-cached func:%s took %.4fs', user_func.func_name, total)
114 log.debug('un-cached method:%s took %.4fs', user_func.func_name, total)
61 115 return result
62 116
63 117 key = key_generator(*arg, **kw)
64 118
65 119 timeout = expiration_time() if expiration_time_is_callable \
66 120 else expiration_time
67 121
68 log.debug('Calling cached fn:%s', user_func.func_name)
122 log.debug('Calling cached method:`%s`', user_func.func_name)
69 123 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
70 124
71 125 def cache_decorator(user_func):
72 126 if to_str is compat.string_type:
73 127 # backwards compatible
74 128 key_generator = function_key_generator(namespace, user_func)
75 129 else:
76 130 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
77 131
78 132 def refresh(*arg, **kw):
79 133 """
80 134 Like invalidate, but regenerates the value instead
81 135 """
82 136 key = key_generator(*arg, **kw)
83 137 value = user_func(*arg, **kw)
84 138 self.set(key, value)
85 139 return value
86 140
87 141 def invalidate(*arg, **kw):
88 142 key = key_generator(*arg, **kw)
89 143 self.delete(key)
90 144
91 145 def set_(value, *arg, **kw):
92 146 key = key_generator(*arg, **kw)
93 147 self.set(key, value)
94 148
95 149 def get(*arg, **kw):
96 150 key = key_generator(*arg, **kw)
97 151 return self.get(key)
98 152
99 153 user_func.set = set_
100 154 user_func.invalidate = invalidate
101 155 user_func.get = get
102 156 user_func.refresh = refresh
103 157 user_func.key_generator = key_generator
104 158 user_func.original = user_func
105 159
106 160 # Use `decorate` to preserve the signature of :param:`user_func`.
107 return decorate(user_func, functools.partial(
161 return decorator.decorate(user_func, functools.partial(
108 162 get_or_create_for_user_func, key_generator))
109 163
110 164 return cache_decorator
111 165
112 166
113 167 def make_region(*arg, **kw):
114 168 return RhodeCodeCacheRegion(*arg, **kw)
115 169
116 170
117 171 def get_default_cache_settings(settings, prefixes=None):
118 172 prefixes = prefixes or []
119 173 cache_settings = {}
120 174 for key in settings.keys():
121 175 for prefix in prefixes:
122 176 if key.startswith(prefix):
123 177 name = key.split(prefix)[1].strip()
124 178 val = settings[key]
125 179 if isinstance(val, compat.string_types):
126 180 val = val.strip()
127 181 cache_settings[name] = val
128 182 return cache_settings
129 183
130 184
131 185 def compute_key_from_params(*args):
132 186 """
133 187 Helper to compute key from given params to be used in cache manager
134 188 """
135 189 return sha1("_".join(map(safe_str, args)))
136 190
137 191
138 192 def backend_key_generator(backend):
139 193 """
140 194 Special wrapper that also sends over the backend to the key generator
141 195 """
142 196 def wrapper(namespace, fn):
143 197 return key_generator(backend, namespace, fn)
144 198 return wrapper
145 199
146 200
147 201 def key_generator(backend, namespace, fn):
148 202 fname = fn.__name__
149 203
150 204 def generate_key(*args):
151 205 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
152 206 namespace_pref = namespace or 'default_namespace'
153 207 arg_key = compute_key_from_params(*args)
154 208 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
155 209
156 210 return final_key
157 211
158 212 return generate_key
213
214
215 def get_or_create_region(region_name, region_namespace=None):
216 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
217 region_obj = region_meta.dogpile_cache_regions.get(region_name)
218 if not region_obj:
219 raise EnvironmentError(
220 'Region `{}` not in configured: {}.'.format(
221 region_name, region_meta.dogpile_cache_regions.keys()))
222
223 region_uid_name = '{}:{}'.format(region_name, region_namespace)
224 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
225 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
226 if region_exist:
227 log.debug('Using already configured region: %s', region_namespace)
228 return region_exist
229 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
230 expiration_time = region_obj.expiration_time
231
232 if not os.path.isdir(cache_dir):
233 os.makedirs(cache_dir)
234 new_region = make_region(
235 name=region_uid_name,
236 function_key_generator=backend_key_generator(region_obj.actual_backend)
237 )
238 namespace_filename = os.path.join(
239 cache_dir, "{}.cache.dbm".format(region_namespace))
240 # special type that allows 1db per namespace
241 new_region.configure(
242 backend='dogpile.cache.rc.file_namespace',
243 expiration_time=expiration_time,
244 arguments={"filename": namespace_filename}
245 )
246
247 # create and save in region caches
248 log.debug('configuring new region: %s', region_uid_name)
249 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
250
251 return region_obj
252
253
254 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
255 region = get_or_create_region(cache_region, cache_namespace_uid)
256 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
257 num_delete_keys = len(cache_keys)
258 if invalidate:
259 region.invalidate(hard=False)
260 else:
261 if num_delete_keys:
262 region.delete_multi(cache_keys)
263 return num_delete_keys
@@ -1,855 +1,866 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 import subprocess
22 22 import time
23 23 from urllib2 import URLError
24 24 import urlparse
25 25 import logging
26 26 import posixpath as vcspath
27 27 import StringIO
28 28 import urllib
29 29 import traceback
30 30
31 31 import svn.client
32 32 import svn.core
33 33 import svn.delta
34 34 import svn.diff
35 35 import svn.fs
36 36 import svn.repos
37 37
38 38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 40 from vcsserver.exceptions import NoContentException
41 41 from vcsserver.utils import safe_str
42 42 from vcsserver.vcs_base import RemoteBase
43 43
44 44 log = logging.getLogger(__name__)
45 45
46 46
47 47 svn_compatible_versions_map = {
48 48 'pre-1.4-compatible': '1.3',
49 49 'pre-1.5-compatible': '1.4',
50 50 'pre-1.6-compatible': '1.5',
51 51 'pre-1.8-compatible': '1.7',
52 52 'pre-1.9-compatible': '1.8',
53 53 }
54 54
55 55 current_compatible_version = '1.12'
56 56
57 57
58 58 def reraise_safe_exceptions(func):
59 59 """Decorator for converting svn exceptions to something neutral."""
60 60 def wrapper(*args, **kwargs):
61 61 try:
62 62 return func(*args, **kwargs)
63 63 except Exception as e:
64 64 if not hasattr(e, '_vcs_kind'):
65 65 log.exception("Unhandled exception in svn remote call")
66 66 raise_from_original(exceptions.UnhandledException(e))
67 67 raise
68 68 return wrapper
69 69
70 70
71 71 class SubversionFactory(RepoFactory):
72 72 repo_type = 'svn'
73 73
74 74 def _create_repo(self, wire, create, compatible_version):
75 75 path = svn.core.svn_path_canonicalize(wire['path'])
76 76 if create:
77 77 fs_config = {'compatible-version': current_compatible_version}
78 78 if compatible_version:
79 79
80 80 compatible_version_string = \
81 81 svn_compatible_versions_map.get(compatible_version) \
82 82 or compatible_version
83 83 fs_config['compatible-version'] = compatible_version_string
84 84
85 85 log.debug('Create SVN repo with config "%s"', fs_config)
86 86 repo = svn.repos.create(path, "", "", None, fs_config)
87 87 else:
88 88 repo = svn.repos.open(path)
89 89
90 90 log.debug('Got SVN object: %s', repo)
91 91 return repo
92 92
93 93 def repo(self, wire, create=False, compatible_version=None):
94 94 """
95 95 Get a repository instance for the given path.
96 96 """
97 97 return self._create_repo(wire, create, compatible_version)
98 98
99 99
100 100 NODE_TYPE_MAPPING = {
101 101 svn.core.svn_node_file: 'file',
102 102 svn.core.svn_node_dir: 'dir',
103 103 }
104 104
105 105
106 106 class SvnRemote(RemoteBase):
107 107
108 108 def __init__(self, factory, hg_factory=None):
109 109 self._factory = factory
110 110 # TODO: Remove once we do not use internal Mercurial objects anymore
111 111 # for subversion
112 112 self._hg_factory = hg_factory
113 113
114 114 @reraise_safe_exceptions
115 115 def discover_svn_version(self):
116 116 try:
117 117 import svn.core
118 118 svn_ver = svn.core.SVN_VERSION
119 119 except ImportError:
120 120 svn_ver = None
121 121 return svn_ver
122 122
123 123 @reraise_safe_exceptions
124 124 def is_empty(self, wire):
125 125
126 126 try:
127 127 return self.lookup(wire, -1) == 0
128 128 except Exception:
129 129 log.exception("failed to read object_store")
130 130 return False
131 131
132 132 def check_url(self, url, config_items):
133 133 # this can throw exception if not installed, but we detect this
134 134 from hgsubversion import svnrepo
135 135
136 136 baseui = self._hg_factory._create_config(config_items)
137 137 # uuid function get's only valid UUID from proper repo, else
138 138 # throws exception
139 139 try:
140 140 svnrepo.svnremoterepo(baseui, url).svn.uuid
141 141 except Exception:
142 142 tb = traceback.format_exc()
143 143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
144 144 raise URLError(
145 145 '"%s" is not a valid Subversion source url.' % (url, ))
146 146 return True
147 147
148 148 def is_path_valid_repository(self, wire, path):
149 149
150 150 # NOTE(marcink): short circuit the check for SVN repo
151 151 # the repos.open might be expensive to check, but we have one cheap
152 152 # pre condition that we can use, to check for 'format' file
153 153
154 154 if not os.path.isfile(os.path.join(path, 'format')):
155 155 return False
156 156
157 157 try:
158 158 svn.repos.open(path)
159 159 except svn.core.SubversionException:
160 160 tb = traceback.format_exc()
161 161 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
162 162 return False
163 163 return True
164 164
165 165 @reraise_safe_exceptions
166 166 def verify(self, wire,):
167 167 repo_path = wire['path']
168 168 if not self.is_path_valid_repository(wire, repo_path):
169 169 raise Exception(
170 170 "Path %s is not a valid Subversion repository." % repo_path)
171 171
172 172 cmd = ['svnadmin', 'info', repo_path]
173 173 stdout, stderr = subprocessio.run_command(cmd)
174 174 return stdout
175 175
176 176 def lookup(self, wire, revision):
177 177 if revision not in [-1, None, 'HEAD']:
178 178 raise NotImplementedError
179 179 repo = self._factory.repo(wire)
180 180 fs_ptr = svn.repos.fs(repo)
181 181 head = svn.fs.youngest_rev(fs_ptr)
182 182 return head
183 183
184 184 def lookup_interval(self, wire, start_ts, end_ts):
185 185 repo = self._factory.repo(wire)
186 186 fsobj = svn.repos.fs(repo)
187 187 start_rev = None
188 188 end_rev = None
189 189 if start_ts:
190 190 start_ts_svn = apr_time_t(start_ts)
191 191 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
192 192 else:
193 193 start_rev = 1
194 194 if end_ts:
195 195 end_ts_svn = apr_time_t(end_ts)
196 196 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
197 197 else:
198 198 end_rev = svn.fs.youngest_rev(fsobj)
199 199 return start_rev, end_rev
200 200
201 201 def revision_properties(self, wire, revision):
202 202
203 203 cache_on, context_uid, repo_id = self._cache_on(wire)
204 @self.region.conditional_cache_on_arguments(condition=cache_on)
204 region = self._region(wire)
205 @region.conditional_cache_on_arguments(condition=cache_on)
205 206 def _revision_properties(_repo_id, _revision):
206 207 repo = self._factory.repo(wire)
207 208 fs_ptr = svn.repos.fs(repo)
208 209 return svn.fs.revision_proplist(fs_ptr, revision)
209 210 return _revision_properties(repo_id, revision)
210 211
211 212 def revision_changes(self, wire, revision):
212 213
213 214 repo = self._factory.repo(wire)
214 215 fsobj = svn.repos.fs(repo)
215 216 rev_root = svn.fs.revision_root(fsobj, revision)
216 217
217 218 editor = svn.repos.ChangeCollector(fsobj, rev_root)
218 219 editor_ptr, editor_baton = svn.delta.make_editor(editor)
219 220 base_dir = ""
220 221 send_deltas = False
221 222 svn.repos.replay2(
222 223 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
223 224 editor_ptr, editor_baton, None)
224 225
225 226 added = []
226 227 changed = []
227 228 removed = []
228 229
229 230 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
230 231 for path, change in editor.changes.iteritems():
231 232 # TODO: Decide what to do with directory nodes. Subversion can add
232 233 # empty directories.
233 234
234 235 if change.item_kind == svn.core.svn_node_dir:
235 236 continue
236 237 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
237 238 added.append(path)
238 239 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
239 240 svn.repos.CHANGE_ACTION_REPLACE]:
240 241 changed.append(path)
241 242 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
242 243 removed.append(path)
243 244 else:
244 245 raise NotImplementedError(
245 246 "Action %s not supported on path %s" % (
246 247 change.action, path))
247 248
248 249 changes = {
249 250 'added': added,
250 251 'changed': changed,
251 252 'removed': removed,
252 253 }
253 254 return changes
254 255
255 256 @reraise_safe_exceptions
256 257 def node_history(self, wire, path, revision, limit):
257 258 cache_on, context_uid, repo_id = self._cache_on(wire)
258 @self.region.conditional_cache_on_arguments(condition=cache_on)
259 region = self._region(wire)
260 @region.conditional_cache_on_arguments(condition=cache_on)
259 261 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
260 262 cross_copies = False
261 263 repo = self._factory.repo(wire)
262 264 fsobj = svn.repos.fs(repo)
263 265 rev_root = svn.fs.revision_root(fsobj, revision)
264 266
265 267 history_revisions = []
266 268 history = svn.fs.node_history(rev_root, path)
267 269 history = svn.fs.history_prev(history, cross_copies)
268 270 while history:
269 271 __, node_revision = svn.fs.history_location(history)
270 272 history_revisions.append(node_revision)
271 273 if limit and len(history_revisions) >= limit:
272 274 break
273 275 history = svn.fs.history_prev(history, cross_copies)
274 276 return history_revisions
275 277 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
276 278
277 279 def node_properties(self, wire, path, revision):
278 280 cache_on, context_uid, repo_id = self._cache_on(wire)
279 @self.region.conditional_cache_on_arguments(condition=cache_on)
281 region = self._region(wire)
282 @region.conditional_cache_on_arguments(condition=cache_on)
280 283 def _node_properties(_repo_id, _path, _revision):
281 284 repo = self._factory.repo(wire)
282 285 fsobj = svn.repos.fs(repo)
283 286 rev_root = svn.fs.revision_root(fsobj, revision)
284 287 return svn.fs.node_proplist(rev_root, path)
285 288 return _node_properties(repo_id, path, revision)
286 289
287 290 def file_annotate(self, wire, path, revision):
288 291 abs_path = 'file://' + urllib.pathname2url(
289 292 vcspath.join(wire['path'], path))
290 293 file_uri = svn.core.svn_path_canonicalize(abs_path)
291 294
292 295 start_rev = svn_opt_revision_value_t(0)
293 296 peg_rev = svn_opt_revision_value_t(revision)
294 297 end_rev = peg_rev
295 298
296 299 annotations = []
297 300
298 301 def receiver(line_no, revision, author, date, line, pool):
299 302 annotations.append((line_no, revision, line))
300 303
301 304 # TODO: Cannot use blame5, missing typemap function in the swig code
302 305 try:
303 306 svn.client.blame2(
304 307 file_uri, peg_rev, start_rev, end_rev,
305 308 receiver, svn.client.create_context())
306 309 except svn.core.SubversionException as exc:
307 310 log.exception("Error during blame operation.")
308 311 raise Exception(
309 312 "Blame not supported or file does not exist at path %s. "
310 313 "Error %s." % (path, exc))
311 314
312 315 return annotations
313 316
314 317 def get_node_type(self, wire, path, revision=None):
315 318
316 319 cache_on, context_uid, repo_id = self._cache_on(wire)
317 @self.region.conditional_cache_on_arguments(condition=cache_on)
320 region = self._region(wire)
321 @region.conditional_cache_on_arguments(condition=cache_on)
318 322 def _get_node_type(_repo_id, _path, _revision):
319 323 repo = self._factory.repo(wire)
320 324 fs_ptr = svn.repos.fs(repo)
321 325 if _revision is None:
322 326 _revision = svn.fs.youngest_rev(fs_ptr)
323 327 root = svn.fs.revision_root(fs_ptr, _revision)
324 328 node = svn.fs.check_path(root, path)
325 329 return NODE_TYPE_MAPPING.get(node, None)
326 330 return _get_node_type(repo_id, path, revision)
327 331
328 332 def get_nodes(self, wire, path, revision=None):
329 333
330 334 cache_on, context_uid, repo_id = self._cache_on(wire)
331 @self.region.conditional_cache_on_arguments(condition=cache_on)
335 region = self._region(wire)
336 @region.conditional_cache_on_arguments(condition=cache_on)
332 337 def _get_nodes(_repo_id, _path, _revision):
333 338 repo = self._factory.repo(wire)
334 339 fsobj = svn.repos.fs(repo)
335 340 if _revision is None:
336 341 _revision = svn.fs.youngest_rev(fsobj)
337 342 root = svn.fs.revision_root(fsobj, _revision)
338 343 entries = svn.fs.dir_entries(root, path)
339 344 result = []
340 345 for entry_path, entry_info in entries.iteritems():
341 346 result.append(
342 347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
343 348 return result
344 349 return _get_nodes(repo_id, path, revision)
345 350
346 351 def get_file_content(self, wire, path, rev=None):
347 352 repo = self._factory.repo(wire)
348 353 fsobj = svn.repos.fs(repo)
349 354 if rev is None:
350 355 rev = svn.fs.youngest_revision(fsobj)
351 356 root = svn.fs.revision_root(fsobj, rev)
352 357 content = svn.core.Stream(svn.fs.file_contents(root, path))
353 358 return content.read()
354 359
355 360 def get_file_size(self, wire, path, revision=None):
356 361
357 362 cache_on, context_uid, repo_id = self._cache_on(wire)
358 @self.region.conditional_cache_on_arguments(condition=cache_on)
363 region = self._region(wire)
364 @region.conditional_cache_on_arguments(condition=cache_on)
359 365 def _get_file_size(_repo_id, _path, _revision):
360 366 repo = self._factory.repo(wire)
361 367 fsobj = svn.repos.fs(repo)
362 368 if _revision is None:
363 369 _revision = svn.fs.youngest_revision(fsobj)
364 370 root = svn.fs.revision_root(fsobj, _revision)
365 371 size = svn.fs.file_length(root, path)
366 372 return size
367 373 return _get_file_size(repo_id, path, revision)
368 374
369 375 def create_repository(self, wire, compatible_version=None):
370 376 log.info('Creating Subversion repository in path "%s"', wire['path'])
371 377 self._factory.repo(wire, create=True,
372 378 compatible_version=compatible_version)
373 379
374 380 def get_url_and_credentials(self, src_url):
375 381 obj = urlparse.urlparse(src_url)
376 382 username = obj.username or None
377 383 password = obj.password or None
378 384 return username, password, src_url
379 385
380 386 def import_remote_repository(self, wire, src_url):
381 387 repo_path = wire['path']
382 388 if not self.is_path_valid_repository(wire, repo_path):
383 389 raise Exception(
384 390 "Path %s is not a valid Subversion repository." % repo_path)
385 391
386 392 username, password, src_url = self.get_url_and_credentials(src_url)
387 393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
388 394 '--trust-server-cert-failures=unknown-ca']
389 395 if username and password:
390 396 rdump_cmd += ['--username', username, '--password', password]
391 397 rdump_cmd += [src_url]
392 398
393 399 rdump = subprocess.Popen(
394 400 rdump_cmd,
395 401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
396 402 load = subprocess.Popen(
397 403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
398 404
399 405 # TODO: johbo: This can be a very long operation, might be better
400 406 # to track some kind of status and provide an api to check if the
401 407 # import is done.
402 408 rdump.wait()
403 409 load.wait()
404 410
405 411 log.debug('Return process ended with code: %s', rdump.returncode)
406 412 if rdump.returncode != 0:
407 413 errors = rdump.stderr.read()
408 414 log.error('svnrdump dump failed: statuscode %s: message: %s',
409 415 rdump.returncode, errors)
410 416 reason = 'UNKNOWN'
411 417 if 'svnrdump: E230001:' in errors:
412 418 reason = 'INVALID_CERTIFICATE'
413 419
414 420 if reason == 'UNKNOWN':
415 421 reason = 'UNKNOWN:{}'.format(errors)
416 422 raise Exception(
417 423 'Failed to dump the remote repository from %s. Reason:%s' % (
418 424 src_url, reason))
419 425 if load.returncode != 0:
420 426 raise Exception(
421 427 'Failed to load the dump of remote repository from %s.' %
422 428 (src_url, ))
423 429
424 430 def commit(self, wire, message, author, timestamp, updated, removed):
425 431 assert isinstance(message, str)
426 432 assert isinstance(author, str)
427 433
428 434 repo = self._factory.repo(wire)
429 435 fsobj = svn.repos.fs(repo)
430 436
431 437 rev = svn.fs.youngest_rev(fsobj)
432 438 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
433 439 txn_root = svn.fs.txn_root(txn)
434 440
435 441 for node in updated:
436 442 TxnNodeProcessor(node, txn_root).update()
437 443 for node in removed:
438 444 TxnNodeProcessor(node, txn_root).remove()
439 445
440 446 commit_id = svn.repos.fs_commit_txn(repo, txn)
441 447
442 448 if timestamp:
443 449 apr_time = apr_time_t(timestamp)
444 450 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
445 451 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
446 452
447 453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
448 454 return commit_id
449 455
450 456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
451 457 ignore_whitespace=False, context=3):
452 458
453 459 wire.update(cache=False)
454 460 repo = self._factory.repo(wire)
455 461 diff_creator = SvnDiffer(
456 462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
457 463 try:
458 464 return diff_creator.generate_diff()
459 465 except svn.core.SubversionException as e:
460 466 log.exception(
461 467 "Error during diff operation operation. "
462 468 "Path might not exist %s, %s" % (path1, path2))
463 469 return ""
464 470
465 471 @reraise_safe_exceptions
466 472 def is_large_file(self, wire, path):
467 473 return False
468 474
469 475 @reraise_safe_exceptions
470 476 def is_binary(self, wire, rev, path):
471 477 cache_on, context_uid, repo_id = self._cache_on(wire)
472 478
473 @self.region.conditional_cache_on_arguments(condition=cache_on)
479 region = self._region(wire)
480 @region.conditional_cache_on_arguments(condition=cache_on)
474 481 def _is_binary(_repo_id, _rev, _path):
475 482 raw_bytes = self.get_file_content(wire, path, rev)
476 483 return raw_bytes and '\0' in raw_bytes
477 484
478 485 return _is_binary(repo_id, rev, path)
479 486
480 487 @reraise_safe_exceptions
481 488 def run_svn_command(self, wire, cmd, **opts):
482 489 path = wire.get('path', None)
483 490
484 491 if path and os.path.isdir(path):
485 492 opts['cwd'] = path
486 493
487 494 safe_call = opts.pop('_safe', False)
488 495
489 496 svnenv = os.environ.copy()
490 497 svnenv.update(opts.pop('extra_env', {}))
491 498
492 499 _opts = {'env': svnenv, 'shell': False}
493 500
494 501 try:
495 502 _opts.update(opts)
496 503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
497 504
498 505 return ''.join(p), ''.join(p.error)
499 506 except (EnvironmentError, OSError) as err:
500 507 if safe_call:
501 508 return '', safe_str(err).strip()
502 509 else:
503 510 cmd = ' '.join(cmd) # human friendly CMD
504 511 tb_err = ("Couldn't run svn command (%s).\n"
505 512 "Original error was:%s\n"
506 513 "Call options:%s\n"
507 514 % (cmd, err, _opts))
508 515 log.exception(tb_err)
509 516 raise exceptions.VcsException()(tb_err)
510 517
511 518 @reraise_safe_exceptions
512 519 def install_hooks(self, wire, force=False):
513 520 from vcsserver.hook_utils import install_svn_hooks
514 521 repo_path = wire['path']
515 522 binary_dir = settings.BINARY_DIR
516 523 executable = None
517 524 if binary_dir:
518 525 executable = os.path.join(binary_dir, 'python')
519 526 return install_svn_hooks(
520 527 repo_path, executable=executable, force_create=force)
521 528
522 529 @reraise_safe_exceptions
523 530 def get_hooks_info(self, wire):
524 531 from vcsserver.hook_utils import (
525 532 get_svn_pre_hook_version, get_svn_post_hook_version)
526 533 repo_path = wire['path']
527 534 return {
528 535 'pre_version': get_svn_pre_hook_version(repo_path),
529 536 'post_version': get_svn_post_hook_version(repo_path),
530 537 }
531 538
532 539 @reraise_safe_exceptions
540 def set_head_ref(self, wire, head_name):
541 pass
542
543 @reraise_safe_exceptions
533 544 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
534 545 archive_dir_name, commit_id):
535 546
536 547 def walk_tree(root, root_dir, _commit_id):
537 548 """
538 549 Special recursive svn repo walker
539 550 """
540 551
541 552 filemode_default = 0o100644
542 553 filemode_executable = 0o100755
543 554
544 555 file_iter = svn.fs.dir_entries(root, root_dir)
545 556 for f_name in file_iter:
546 557 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
547 558
548 559 if f_type == 'dir':
549 560 # return only DIR, and then all entries in that dir
550 561 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
551 562 new_root = os.path.join(root_dir, f_name)
552 563 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
553 564 yield _f_name, _f_data, _f_type
554 565 else:
555 566 f_path = os.path.join(root_dir, f_name).rstrip('/')
556 567 prop_list = svn.fs.node_proplist(root, f_path)
557 568
558 569 f_mode = filemode_default
559 570 if prop_list.get('svn:executable'):
560 571 f_mode = filemode_executable
561 572
562 573 f_is_link = False
563 574 if prop_list.get('svn:special'):
564 575 f_is_link = True
565 576
566 577 data = {
567 578 'is_link': f_is_link,
568 579 'mode': f_mode,
569 580 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
570 581 }
571 582
572 583 yield f_path, data, f_type
573 584
574 585 def file_walker(_commit_id, path):
575 586 repo = self._factory.repo(wire)
576 587 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
577 588
578 589 def no_content():
579 590 raise NoContentException()
580 591
581 592 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
582 593 file_path = f_name
583 594
584 595 if f_type == 'dir':
585 596 mode = f_data['mode']
586 597 yield ArchiveNode(file_path, mode, False, no_content)
587 598 else:
588 599 mode = f_data['mode']
589 600 is_link = f_data['is_link']
590 601 data_stream = f_data['content_stream']
591 602 yield ArchiveNode(file_path, mode, is_link, data_stream)
592 603
593 604 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
594 605 archive_dir_name, commit_id)
595 606
596 607
597 608 class SvnDiffer(object):
598 609 """
599 610 Utility to create diffs based on difflib and the Subversion api
600 611 """
601 612
602 613 binary_content = False
603 614
604 615 def __init__(
605 616 self, repo, src_rev, src_path, tgt_rev, tgt_path,
606 617 ignore_whitespace, context):
607 618 self.repo = repo
608 619 self.ignore_whitespace = ignore_whitespace
609 620 self.context = context
610 621
611 622 fsobj = svn.repos.fs(repo)
612 623
613 624 self.tgt_rev = tgt_rev
614 625 self.tgt_path = tgt_path or ''
615 626 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
616 627 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
617 628
618 629 self.src_rev = src_rev
619 630 self.src_path = src_path or self.tgt_path
620 631 self.src_root = svn.fs.revision_root(fsobj, src_rev)
621 632 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
622 633
623 634 self._validate()
624 635
625 636 def _validate(self):
626 637 if (self.tgt_kind != svn.core.svn_node_none and
627 638 self.src_kind != svn.core.svn_node_none and
628 639 self.src_kind != self.tgt_kind):
629 640 # TODO: johbo: proper error handling
630 641 raise Exception(
631 642 "Source and target are not compatible for diff generation. "
632 643 "Source type: %s, target type: %s" %
633 644 (self.src_kind, self.tgt_kind))
634 645
635 646 def generate_diff(self):
636 647 buf = StringIO.StringIO()
637 648 if self.tgt_kind == svn.core.svn_node_dir:
638 649 self._generate_dir_diff(buf)
639 650 else:
640 651 self._generate_file_diff(buf)
641 652 return buf.getvalue()
642 653
643 654 def _generate_dir_diff(self, buf):
644 655 editor = DiffChangeEditor()
645 656 editor_ptr, editor_baton = svn.delta.make_editor(editor)
646 657 svn.repos.dir_delta2(
647 658 self.src_root,
648 659 self.src_path,
649 660 '', # src_entry
650 661 self.tgt_root,
651 662 self.tgt_path,
652 663 editor_ptr, editor_baton,
653 664 authorization_callback_allow_all,
654 665 False, # text_deltas
655 666 svn.core.svn_depth_infinity, # depth
656 667 False, # entry_props
657 668 False, # ignore_ancestry
658 669 )
659 670
660 671 for path, __, change in sorted(editor.changes):
661 672 self._generate_node_diff(
662 673 buf, change, path, self.tgt_path, path, self.src_path)
663 674
664 675 def _generate_file_diff(self, buf):
665 676 change = None
666 677 if self.src_kind == svn.core.svn_node_none:
667 678 change = "add"
668 679 elif self.tgt_kind == svn.core.svn_node_none:
669 680 change = "delete"
670 681 tgt_base, tgt_path = vcspath.split(self.tgt_path)
671 682 src_base, src_path = vcspath.split(self.src_path)
672 683 self._generate_node_diff(
673 684 buf, change, tgt_path, tgt_base, src_path, src_base)
674 685
675 686 def _generate_node_diff(
676 687 self, buf, change, tgt_path, tgt_base, src_path, src_base):
677 688
678 689 if self.src_rev == self.tgt_rev and tgt_base == src_base:
679 690 # makes consistent behaviour with git/hg to return empty diff if
680 691 # we compare same revisions
681 692 return
682 693
683 694 tgt_full_path = vcspath.join(tgt_base, tgt_path)
684 695 src_full_path = vcspath.join(src_base, src_path)
685 696
686 697 self.binary_content = False
687 698 mime_type = self._get_mime_type(tgt_full_path)
688 699
689 700 if mime_type and not mime_type.startswith('text'):
690 701 self.binary_content = True
691 702 buf.write("=" * 67 + '\n')
692 703 buf.write("Cannot display: file marked as a binary type.\n")
693 704 buf.write("svn:mime-type = %s\n" % mime_type)
694 705 buf.write("Index: %s\n" % (tgt_path, ))
695 706 buf.write("=" * 67 + '\n')
696 707 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
697 708 'tgt_path': tgt_path})
698 709
699 710 if change == 'add':
700 711 # TODO: johbo: SVN is missing a zero here compared to git
701 712 buf.write("new file mode 10644\n")
702 713
703 714 #TODO(marcink): intro to binary detection of svn patches
704 715 # if self.binary_content:
705 716 # buf.write('GIT binary patch\n')
706 717
707 718 buf.write("--- /dev/null\t(revision 0)\n")
708 719 src_lines = []
709 720 else:
710 721 if change == 'delete':
711 722 buf.write("deleted file mode 10644\n")
712 723
713 724 #TODO(marcink): intro to binary detection of svn patches
714 725 # if self.binary_content:
715 726 # buf.write('GIT binary patch\n')
716 727
717 728 buf.write("--- a/%s\t(revision %s)\n" % (
718 729 src_path, self.src_rev))
719 730 src_lines = self._svn_readlines(self.src_root, src_full_path)
720 731
721 732 if change == 'delete':
722 733 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
723 734 tgt_lines = []
724 735 else:
725 736 buf.write("+++ b/%s\t(revision %s)\n" % (
726 737 tgt_path, self.tgt_rev))
727 738 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
728 739
729 740 if not self.binary_content:
730 741 udiff = svn_diff.unified_diff(
731 742 src_lines, tgt_lines, context=self.context,
732 743 ignore_blank_lines=self.ignore_whitespace,
733 744 ignore_case=False,
734 745 ignore_space_changes=self.ignore_whitespace)
735 746 buf.writelines(udiff)
736 747
737 748 def _get_mime_type(self, path):
738 749 try:
739 750 mime_type = svn.fs.node_prop(
740 751 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
741 752 except svn.core.SubversionException:
742 753 mime_type = svn.fs.node_prop(
743 754 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
744 755 return mime_type
745 756
746 757 def _svn_readlines(self, fs_root, node_path):
747 758 if self.binary_content:
748 759 return []
749 760 node_kind = svn.fs.check_path(fs_root, node_path)
750 761 if node_kind not in (
751 762 svn.core.svn_node_file, svn.core.svn_node_symlink):
752 763 return []
753 764 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
754 765 return content.splitlines(True)
755 766
756 767
757 768 class DiffChangeEditor(svn.delta.Editor):
758 769 """
759 770 Records changes between two given revisions
760 771 """
761 772
762 773 def __init__(self):
763 774 self.changes = []
764 775
765 776 def delete_entry(self, path, revision, parent_baton, pool=None):
766 777 self.changes.append((path, None, 'delete'))
767 778
768 779 def add_file(
769 780 self, path, parent_baton, copyfrom_path, copyfrom_revision,
770 781 file_pool=None):
771 782 self.changes.append((path, 'file', 'add'))
772 783
773 784 def open_file(self, path, parent_baton, base_revision, file_pool=None):
774 785 self.changes.append((path, 'file', 'change'))
775 786
776 787
777 788 def authorization_callback_allow_all(root, path, pool):
778 789 return True
779 790
780 791
781 792 class TxnNodeProcessor(object):
782 793 """
783 794 Utility to process the change of one node within a transaction root.
784 795
785 796 It encapsulates the knowledge of how to add, update or remove
786 797 a node for a given transaction root. The purpose is to support the method
787 798 `SvnRemote.commit`.
788 799 """
789 800
790 801 def __init__(self, node, txn_root):
791 802 assert isinstance(node['path'], str)
792 803
793 804 self.node = node
794 805 self.txn_root = txn_root
795 806
796 807 def update(self):
797 808 self._ensure_parent_dirs()
798 809 self._add_file_if_node_does_not_exist()
799 810 self._update_file_content()
800 811 self._update_file_properties()
801 812
802 813 def remove(self):
803 814 svn.fs.delete(self.txn_root, self.node['path'])
804 815 # TODO: Clean up directory if empty
805 816
806 817 def _ensure_parent_dirs(self):
807 818 curdir = vcspath.dirname(self.node['path'])
808 819 dirs_to_create = []
809 820 while not self._svn_path_exists(curdir):
810 821 dirs_to_create.append(curdir)
811 822 curdir = vcspath.dirname(curdir)
812 823
813 824 for curdir in reversed(dirs_to_create):
814 825 log.debug('Creating missing directory "%s"', curdir)
815 826 svn.fs.make_dir(self.txn_root, curdir)
816 827
817 828 def _svn_path_exists(self, path):
818 829 path_status = svn.fs.check_path(self.txn_root, path)
819 830 return path_status != svn.core.svn_node_none
820 831
821 832 def _add_file_if_node_does_not_exist(self):
822 833 kind = svn.fs.check_path(self.txn_root, self.node['path'])
823 834 if kind == svn.core.svn_node_none:
824 835 svn.fs.make_file(self.txn_root, self.node['path'])
825 836
826 837 def _update_file_content(self):
827 838 assert isinstance(self.node['content'], str)
828 839 handler, baton = svn.fs.apply_textdelta(
829 840 self.txn_root, self.node['path'], None, None)
830 841 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
831 842
832 843 def _update_file_properties(self):
833 844 properties = self.node.get('properties', {})
834 845 for key, value in properties.iteritems():
835 846 svn.fs.change_node_prop(
836 847 self.txn_root, self.node['path'], key, value)
837 848
838 849
839 850 def apr_time_t(timestamp):
840 851 """
841 852 Convert a Python timestamp into APR timestamp type apr_time_t
842 853 """
843 854 return timestamp * 1E6
844 855
845 856
846 857 def svn_opt_revision_value_t(num):
847 858 """
848 859 Put `num` into a `svn_opt_revision_value_t` structure.
849 860 """
850 861 value = svn.core.svn_opt_revision_value_t()
851 862 value.number = num
852 863 revision = svn.core.svn_opt_revision_t()
853 864 revision.kind = svn.core.svn_opt_revision_number
854 865 revision.value = value
855 866 return revision
@@ -1,160 +1,160 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver import git
25 25
26 26
27 27 SAMPLE_REFS = {
28 28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 33 }
34 34
35 35
36 36 @pytest.fixture
37 37 def git_remote():
38 38 """
39 39 A GitRemote instance with a mock factory.
40 40 """
41 41 factory = Mock()
42 42 remote = git.GitRemote(factory)
43 43 return remote
44 44
45 45
46 46 def test_discover_git_version(git_remote):
47 47 version = git_remote.discover_git_version()
48 48 assert version
49 49
50 50
51 51 class TestGitFetch(object):
52 52 def setup(self):
53 53 self.mock_repo = Mock()
54 54 factory = Mock()
55 55 factory.repo = Mock(return_value=self.mock_repo)
56 56 self.remote_git = git.GitRemote(factory)
57 57
58 58 def test_fetches_all_when_no_commit_ids_specified(self):
59 59 def side_effect(determine_wants, *args, **kwargs):
60 60 determine_wants(SAMPLE_REFS)
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
68 68 def test_fetches_specified_commits(self):
69 69 selected_refs = {
70 70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 72 }
73 73
74 74 def side_effect(determine_wants, *args, **kwargs):
75 75 result = determine_wants(SAMPLE_REFS)
76 76 assert sorted(result) == sorted(selected_refs.values())
77 77 return result
78 78
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 81 self.remote_git.pull(
82 82 wire={}, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
86 86
87 87 def test_get_remote_refs(self):
88 88 factory = Mock()
89 89 remote_git = git.GitRemote(factory)
90 90 url = 'http://example.com/test/test.git'
91 91 sample_refs = {
92 92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 94 }
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
102 102
103 103 class TestReraiseSafeExceptions(object):
104 104
105 105 def test_method_decorated_with_reraise_safe_exceptions(self):
106 106 factory = Mock()
107 107 git_remote = git.GitRemote(factory)
108 108
109 109 def fake_function():
110 110 return None
111 111
112 112 decorator = git.reraise_safe_exceptions(fake_function)
113 113
114 114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
115 115 for method_name, method in methods:
116 if not method_name.startswith('_'):
116 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
117 117 assert method.im_func.__code__ == decorator.__code__
118 118
119 119 @pytest.mark.parametrize('side_effect, expected_type', [
120 120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
121 121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
122 122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
123 123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
124 124 (dulwich.errors.HangupException(), 'error'),
125 125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
126 126 ])
127 127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
128 128 @git.reraise_safe_exceptions
129 129 def fake_method():
130 130 raise side_effect
131 131
132 132 with pytest.raises(Exception) as exc_info:
133 133 fake_method()
134 134 assert type(exc_info.value) == Exception
135 135 assert exc_info.value._vcs_kind == expected_type
136 136
137 137
138 138 class TestDulwichRepoWrapper(object):
139 139 def test_calls_close_on_delete(self):
140 140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
141 141 with isdir_patcher:
142 142 repo = git.Repo('/tmp/abcde')
143 143 with patch.object(git.DulwichRepo, 'close') as close_mock:
144 144 del repo
145 145 close_mock.assert_called_once_with()
146 146
147 147
148 148 class TestGitFactory(object):
149 149 def test_create_repo_returns_dulwich_wrapper(self):
150 150
151 151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
152 152 mock.side_effect = {'repo_objects': ''}
153 153 factory = git.GitFactory()
154 154 wire = {
155 155 'path': '/tmp/abcde'
156 156 }
157 157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
158 158 with isdir_patcher:
159 159 result = factory._create_repo(wire, True)
160 160 assert isinstance(result, git.Repo)
@@ -1,108 +1,108 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19 import sys
20 20 import traceback
21 21
22 22 import pytest
23 23 from mercurial.error import LookupError
24 24 from mock import Mock, MagicMock, patch
25 25
26 26 from vcsserver import exceptions, hg, hgcompat
27 27
28 28
29 29 class TestDiff(object):
30 30 def test_raising_safe_exception_when_lookup_failed(self):
31 31
32 32 factory = Mock()
33 33 hg_remote = hg.HgRemote(factory)
34 34 with patch('mercurial.patch.diff') as diff_mock:
35 35 diff_mock.side_effect = LookupError(
36 36 'deadbeef', 'index', 'message')
37 37 with pytest.raises(Exception) as exc_info:
38 38 hg_remote.diff(
39 39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 40 file_filter=None, opt_git=True, opt_ignorews=True,
41 41 context=3)
42 42 assert type(exc_info.value) == Exception
43 43 assert exc_info.value._vcs_kind == 'lookup'
44 44
45 45
46 46 class TestReraiseSafeExceptions(object):
47 47 def test_method_decorated_with_reraise_safe_exceptions(self):
48 48 factory = Mock()
49 49 hg_remote = hg.HgRemote(factory)
50 50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
51 51 decorator = hg.reraise_safe_exceptions(None)
52 52 for method_name, method in methods:
53 if not method_name.startswith('_'):
53 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
54 54 assert method.im_func.__code__ == decorator.__code__
55 55
56 56 @pytest.mark.parametrize('side_effect, expected_type', [
57 57 (hgcompat.Abort(), 'abort'),
58 58 (hgcompat.InterventionRequired(), 'abort'),
59 59 (hgcompat.RepoLookupError(), 'lookup'),
60 60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
61 61 (hgcompat.RepoError(), 'error'),
62 62 (hgcompat.RequirementError(), 'requirement'),
63 63 ])
64 64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
65 65 @hg.reraise_safe_exceptions
66 66 def fake_method():
67 67 raise side_effect
68 68
69 69 with pytest.raises(Exception) as exc_info:
70 70 fake_method()
71 71 assert type(exc_info.value) == Exception
72 72 assert exc_info.value._vcs_kind == expected_type
73 73
74 74 def test_keeps_original_traceback(self):
75 75 @hg.reraise_safe_exceptions
76 76 def fake_method():
77 77 try:
78 78 raise hgcompat.Abort()
79 79 except:
80 80 self.original_traceback = traceback.format_tb(
81 81 sys.exc_info()[2])
82 82 raise
83 83
84 84 try:
85 85 fake_method()
86 86 except Exception:
87 87 new_traceback = traceback.format_tb(sys.exc_info()[2])
88 88
89 89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 90 assert new_traceback_tail == self.original_traceback
91 91
92 92 def test_maps_unknow_exceptions_to_unhandled(self):
93 93 @hg.reraise_safe_exceptions
94 94 def stub_method():
95 95 raise ValueError('stub')
96 96
97 97 with pytest.raises(Exception) as exc_info:
98 98 stub_method()
99 99 assert exc_info.value._vcs_kind == 'unhandled'
100 100
101 101 def test_does_not_map_known_exceptions(self):
102 102 @hg.reraise_safe_exceptions
103 103 def stub_method():
104 104 raise exceptions.LookupException()('stub')
105 105
106 106 with pytest.raises(Exception) as exc_info:
107 107 stub_method()
108 108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,32 +1,45 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 from vcsserver.lib import rc_cache
18 19
19 20 class RemoteBase(object):
20 21 EMPTY_COMMIT = '0' * 40
21 22
22 @property
23 def region(self):
24 return self._factory._cache_region
23 def _region(self, wire):
24 repo_id = wire.get('repo_id', '')
25 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
26 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
25 27
26 28 def _cache_on(self, wire):
27 29 context = wire.get('context', '')
28 30 context_uid = '{}'.format(context)
29 31 repo_id = wire.get('repo_id', '')
30 32 cache = wire.get('cache', True)
31 33 cache_on = context and cache
32 34 return cache_on, context_uid, repo_id
35
36 def vcsserver_invalidate_cache(self, wire, delete):
37 from vcsserver.lib import rc_cache
38 repo_id = wire.get('repo_id', '')
39
40 if delete:
41 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
42 rc_cache.clear_cache_namespace(
43 'repo_object', cache_namespace_uid, invalidate=True)
44
45 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
General Comments 0
You need to be logged in to leave comments. Login now