##// END OF EJS Templates
release: Merge default into stable for release preparation
milka -
r915:ed04aac5 merge stable
parent child Browse files
Show More
@@ -1,6 +1,5 b''
1 1 [bumpversion]
2 current_version = 4.23.2
2 current_version = 4.24.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.23.2
12 state = in_progress
13 version = 4.24.0
16 14
@@ -1,28 +1,45 b''
1
2 .PHONY: clean test test-clean test-only generate-pkgs pip-packages
1 .DEFAULT_GOAL := help
3 2
4 3 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
5 4 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
6 5
7 clean:
6 .PHONY: clean
7 clean: ## full clean
8 8 make test-clean
9 9 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
10 10
11 test:
11
12 .PHONY: test
13 test: ## run test-clean and tests
12 14 make test-clean
13 15 make test-only
14 16
15 test-clean:
17
18 .PHONY:test-clean
19 test-clean: ## run test-clean and tests
16 20 rm -rf coverage.xml htmlcov junit.xml pylint.log result
17 21 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
22 find . -type f \( -iname '.coverage.*' \) -exec rm '{}' ';'
18 23
19 test-only:
24
25 .PHONY: test-only
26 test-only: ## run tests
20 27 PYTHONHASHSEED=random \
21 28 py.test -x -vv -r xw -p no:sugar \
22 --cov=vcsserver --cov-report=term-missing --cov-report=html vcsserver
29 --cov=vcsserver --cov-report=term-missing --cov-report=html \
30 vcsserver
23 31
24 generate-pkgs:
32
33 .PHONY: generate-pkgs
34 generate-pkgs: ## generate new python packages
25 35 nix-shell pkgs/shell-generate.nix --command "pip2nix generate --licenses"
26 36
27 pip-packages:
37
38 .PHONY: pip-packages
39 pip-packages: ## show outdated packages
28 40 python ${OUTDATED_PACKAGES}
41
42
43 .PHONY: help
44 help:
45 @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-24s\033[0m %s\n", $$1, $$2}'
@@ -1,1090 +1,1090 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "atomicwrites" = super.buildPythonPackage {
8 8 name = "atomicwrites-1.3.0";
9 9 doCheck = false;
10 10 src = fetchurl {
11 11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
12 12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.mit ];
16 16 };
17 17 };
18 18 "attrs" = super.buildPythonPackage {
19 19 name = "attrs-19.3.0";
20 20 doCheck = false;
21 21 src = fetchurl {
22 22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
23 23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
24 24 };
25 25 meta = {
26 26 license = [ pkgs.lib.licenses.mit ];
27 27 };
28 28 };
29 29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 30 name = "backports.shutil-get-terminal-size-1.0.0";
31 31 doCheck = false;
32 32 src = fetchurl {
33 33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 35 };
36 36 meta = {
37 37 license = [ pkgs.lib.licenses.mit ];
38 38 };
39 39 };
40 40 "beautifulsoup4" = super.buildPythonPackage {
41 41 name = "beautifulsoup4-4.6.3";
42 42 doCheck = false;
43 43 src = fetchurl {
44 44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 46 };
47 47 meta = {
48 48 license = [ pkgs.lib.licenses.mit ];
49 49 };
50 50 };
51 51 "cffi" = super.buildPythonPackage {
52 52 name = "cffi-1.12.3";
53 53 doCheck = false;
54 54 propagatedBuildInputs = [
55 55 self."pycparser"
56 56 ];
57 57 src = fetchurl {
58 58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
59 59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.mit ];
63 63 };
64 64 };
65 65 "configobj" = super.buildPythonPackage {
66 66 name = "configobj-5.0.6";
67 67 doCheck = false;
68 68 propagatedBuildInputs = [
69 69 self."six"
70 70 ];
71 71 src = fetchurl {
72 72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
73 73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.bsdOriginal ];
77 77 };
78 78 };
79 79 "configparser" = super.buildPythonPackage {
80 80 name = "configparser-4.0.2";
81 81 doCheck = false;
82 82 src = fetchurl {
83 83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
84 84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
85 85 };
86 86 meta = {
87 87 license = [ pkgs.lib.licenses.mit ];
88 88 };
89 89 };
90 90 "contextlib2" = super.buildPythonPackage {
91 91 name = "contextlib2-0.6.0.post1";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
95 95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.psfl ];
99 99 };
100 100 };
101 101 "cov-core" = super.buildPythonPackage {
102 102 name = "cov-core-1.15.0";
103 103 doCheck = false;
104 104 propagatedBuildInputs = [
105 105 self."coverage"
106 106 ];
107 107 src = fetchurl {
108 108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
109 109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
110 110 };
111 111 meta = {
112 112 license = [ pkgs.lib.licenses.mit ];
113 113 };
114 114 };
115 115 "coverage" = super.buildPythonPackage {
116 116 name = "coverage-4.5.4";
117 117 doCheck = false;
118 118 src = fetchurl {
119 119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
120 120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
121 121 };
122 122 meta = {
123 123 license = [ pkgs.lib.licenses.asl20 ];
124 124 };
125 125 };
126 126 "decorator" = super.buildPythonPackage {
127 127 name = "decorator-4.1.2";
128 128 doCheck = false;
129 129 src = fetchurl {
130 130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
131 131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
132 132 };
133 133 meta = {
134 134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
135 135 };
136 136 };
137 137 "dogpile.cache" = super.buildPythonPackage {
138 138 name = "dogpile.cache-0.9.0";
139 139 doCheck = false;
140 140 propagatedBuildInputs = [
141 141 self."decorator"
142 142 ];
143 143 src = fetchurl {
144 144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
145 145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
146 146 };
147 147 meta = {
148 148 license = [ pkgs.lib.licenses.bsdOriginal ];
149 149 };
150 150 };
151 151 "dogpile.core" = super.buildPythonPackage {
152 152 name = "dogpile.core-0.4.1";
153 153 doCheck = false;
154 154 src = fetchurl {
155 155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
156 156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
157 157 };
158 158 meta = {
159 159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 160 };
161 161 };
162 162 "dulwich" = super.buildPythonPackage {
163 163 name = "dulwich-0.13.0";
164 164 doCheck = false;
165 165 src = fetchurl {
166 166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
167 167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
168 168 };
169 169 meta = {
170 170 license = [ pkgs.lib.licenses.gpl2Plus ];
171 171 };
172 172 };
173 173 "enum34" = super.buildPythonPackage {
174 174 name = "enum34-1.1.10";
175 175 doCheck = false;
176 176 src = fetchurl {
177 177 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
178 178 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
179 179 };
180 180 meta = {
181 181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 182 };
183 183 };
184 184 "funcsigs" = super.buildPythonPackage {
185 185 name = "funcsigs-1.0.2";
186 186 doCheck = false;
187 187 src = fetchurl {
188 188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
189 189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
190 190 };
191 191 meta = {
192 192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
193 193 };
194 194 };
195 195 "gevent" = super.buildPythonPackage {
196 196 name = "gevent-1.5.0";
197 197 doCheck = false;
198 198 propagatedBuildInputs = [
199 199 self."greenlet"
200 200 ];
201 201 src = fetchurl {
202 202 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
203 203 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
204 204 };
205 205 meta = {
206 206 license = [ pkgs.lib.licenses.mit ];
207 207 };
208 208 };
209 209 "gprof2dot" = super.buildPythonPackage {
210 210 name = "gprof2dot-2017.9.19";
211 211 doCheck = false;
212 212 src = fetchurl {
213 213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
214 214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
215 215 };
216 216 meta = {
217 217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
218 218 };
219 219 };
220 220 "greenlet" = super.buildPythonPackage {
221 221 name = "greenlet-0.4.15";
222 222 doCheck = false;
223 223 src = fetchurl {
224 224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
225 225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
226 226 };
227 227 meta = {
228 228 license = [ pkgs.lib.licenses.mit ];
229 229 };
230 230 };
231 231 "gunicorn" = super.buildPythonPackage {
232 232 name = "gunicorn-19.9.0";
233 233 doCheck = false;
234 234 src = fetchurl {
235 235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
236 236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
237 237 };
238 238 meta = {
239 239 license = [ pkgs.lib.licenses.mit ];
240 240 };
241 241 };
242 242 "hg-evolve" = super.buildPythonPackage {
243 243 name = "hg-evolve-9.1.0";
244 244 doCheck = false;
245 245 src = fetchurl {
246 246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
247 247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
248 248 };
249 249 meta = {
250 250 license = [ { fullName = "GPLv2+"; } ];
251 251 };
252 252 };
253 253 "hgsubversion" = super.buildPythonPackage {
254 254 name = "hgsubversion-1.9.3";
255 255 doCheck = false;
256 256 propagatedBuildInputs = [
257 257 self."mercurial"
258 258 self."subvertpy"
259 259 ];
260 260 src = fetchurl {
261 261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
262 262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
263 263 };
264 264 meta = {
265 265 license = [ pkgs.lib.licenses.gpl1 ];
266 266 };
267 267 };
268 268 "hupper" = super.buildPythonPackage {
269 269 name = "hupper-1.10.2";
270 270 doCheck = false;
271 271 src = fetchurl {
272 272 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
273 273 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
274 274 };
275 275 meta = {
276 276 license = [ pkgs.lib.licenses.mit ];
277 277 };
278 278 };
279 279 "importlib-metadata" = super.buildPythonPackage {
280 280 name = "importlib-metadata-1.6.0";
281 281 doCheck = false;
282 282 propagatedBuildInputs = [
283 283 self."zipp"
284 284 self."pathlib2"
285 285 self."contextlib2"
286 286 self."configparser"
287 287 ];
288 288 src = fetchurl {
289 289 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
290 290 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
291 291 };
292 292 meta = {
293 293 license = [ pkgs.lib.licenses.asl20 ];
294 294 };
295 295 };
296 296 "ipdb" = super.buildPythonPackage {
297 297 name = "ipdb-0.13.2";
298 298 doCheck = false;
299 299 propagatedBuildInputs = [
300 300 self."setuptools"
301 301 self."ipython"
302 302 ];
303 303 src = fetchurl {
304 304 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
305 305 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
306 306 };
307 307 meta = {
308 308 license = [ pkgs.lib.licenses.bsdOriginal ];
309 309 };
310 310 };
311 311 "ipython" = super.buildPythonPackage {
312 312 name = "ipython-5.1.0";
313 313 doCheck = false;
314 314 propagatedBuildInputs = [
315 315 self."setuptools"
316 316 self."decorator"
317 317 self."pickleshare"
318 318 self."simplegeneric"
319 319 self."traitlets"
320 320 self."prompt-toolkit"
321 321 self."pygments"
322 322 self."pexpect"
323 323 self."backports.shutil-get-terminal-size"
324 324 self."pathlib2"
325 325 self."pexpect"
326 326 ];
327 327 src = fetchurl {
328 328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
329 329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
330 330 };
331 331 meta = {
332 332 license = [ pkgs.lib.licenses.bsdOriginal ];
333 333 };
334 334 };
335 335 "ipython-genutils" = super.buildPythonPackage {
336 336 name = "ipython-genutils-0.2.0";
337 337 doCheck = false;
338 338 src = fetchurl {
339 339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
340 340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
341 341 };
342 342 meta = {
343 343 license = [ pkgs.lib.licenses.bsdOriginal ];
344 344 };
345 345 };
346 346 "mako" = super.buildPythonPackage {
347 347 name = "mako-1.1.0";
348 348 doCheck = false;
349 349 propagatedBuildInputs = [
350 350 self."markupsafe"
351 351 ];
352 352 src = fetchurl {
353 353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
354 354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
355 355 };
356 356 meta = {
357 357 license = [ pkgs.lib.licenses.mit ];
358 358 };
359 359 };
360 360 "markupsafe" = super.buildPythonPackage {
361 361 name = "markupsafe-1.1.1";
362 362 doCheck = false;
363 363 src = fetchurl {
364 364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
365 365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
366 366 };
367 367 meta = {
368 368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
369 369 };
370 370 };
371 371 "mercurial" = super.buildPythonPackage {
372 372 name = "mercurial-5.1.1";
373 373 doCheck = false;
374 374 src = fetchurl {
375 375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
376 376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
380 380 };
381 381 };
382 382 "mock" = super.buildPythonPackage {
383 383 name = "mock-3.0.5";
384 384 doCheck = false;
385 385 propagatedBuildInputs = [
386 386 self."six"
387 387 self."funcsigs"
388 388 ];
389 389 src = fetchurl {
390 390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
391 391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
392 392 };
393 393 meta = {
394 394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
395 395 };
396 396 };
397 397 "more-itertools" = super.buildPythonPackage {
398 398 name = "more-itertools-5.0.0";
399 399 doCheck = false;
400 400 propagatedBuildInputs = [
401 401 self."six"
402 402 ];
403 403 src = fetchurl {
404 404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
405 405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
406 406 };
407 407 meta = {
408 408 license = [ pkgs.lib.licenses.mit ];
409 409 };
410 410 };
411 411 "msgpack-python" = super.buildPythonPackage {
412 412 name = "msgpack-python-0.5.6";
413 413 doCheck = false;
414 414 src = fetchurl {
415 415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
416 416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
417 417 };
418 418 meta = {
419 419 license = [ pkgs.lib.licenses.asl20 ];
420 420 };
421 421 };
422 422 "packaging" = super.buildPythonPackage {
423 423 name = "packaging-20.3";
424 424 doCheck = false;
425 425 propagatedBuildInputs = [
426 426 self."pyparsing"
427 427 self."six"
428 428 ];
429 429 src = fetchurl {
430 430 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
431 431 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
432 432 };
433 433 meta = {
434 434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
435 435 };
436 436 };
437 437 "pastedeploy" = super.buildPythonPackage {
438 438 name = "pastedeploy-2.1.0";
439 439 doCheck = false;
440 440 src = fetchurl {
441 441 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
442 442 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
443 443 };
444 444 meta = {
445 445 license = [ pkgs.lib.licenses.mit ];
446 446 };
447 447 };
448 448 "pathlib2" = super.buildPythonPackage {
449 449 name = "pathlib2-2.3.5";
450 450 doCheck = false;
451 451 propagatedBuildInputs = [
452 452 self."six"
453 453 self."scandir"
454 454 ];
455 455 src = fetchurl {
456 456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
457 457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
458 458 };
459 459 meta = {
460 460 license = [ pkgs.lib.licenses.mit ];
461 461 };
462 462 };
463 463 "pexpect" = super.buildPythonPackage {
464 464 name = "pexpect-4.8.0";
465 465 doCheck = false;
466 466 propagatedBuildInputs = [
467 467 self."ptyprocess"
468 468 ];
469 469 src = fetchurl {
470 470 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
471 471 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
472 472 };
473 473 meta = {
474 474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
475 475 };
476 476 };
477 477 "pickleshare" = super.buildPythonPackage {
478 478 name = "pickleshare-0.7.5";
479 479 doCheck = false;
480 480 propagatedBuildInputs = [
481 481 self."pathlib2"
482 482 ];
483 483 src = fetchurl {
484 484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
485 485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
486 486 };
487 487 meta = {
488 488 license = [ pkgs.lib.licenses.mit ];
489 489 };
490 490 };
491 491 "plaster" = super.buildPythonPackage {
492 492 name = "plaster-1.0";
493 493 doCheck = false;
494 494 propagatedBuildInputs = [
495 495 self."setuptools"
496 496 ];
497 497 src = fetchurl {
498 498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
499 499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
500 500 };
501 501 meta = {
502 502 license = [ pkgs.lib.licenses.mit ];
503 503 };
504 504 };
505 505 "plaster-pastedeploy" = super.buildPythonPackage {
506 506 name = "plaster-pastedeploy-0.7";
507 507 doCheck = false;
508 508 propagatedBuildInputs = [
509 509 self."pastedeploy"
510 510 self."plaster"
511 511 ];
512 512 src = fetchurl {
513 513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
514 514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
515 515 };
516 516 meta = {
517 517 license = [ pkgs.lib.licenses.mit ];
518 518 };
519 519 };
520 520 "pluggy" = super.buildPythonPackage {
521 521 name = "pluggy-0.13.1";
522 522 doCheck = false;
523 523 propagatedBuildInputs = [
524 524 self."importlib-metadata"
525 525 ];
526 526 src = fetchurl {
527 527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
528 528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
529 529 };
530 530 meta = {
531 531 license = [ pkgs.lib.licenses.mit ];
532 532 };
533 533 };
534 534 "prompt-toolkit" = super.buildPythonPackage {
535 535 name = "prompt-toolkit-1.0.18";
536 536 doCheck = false;
537 537 propagatedBuildInputs = [
538 538 self."six"
539 539 self."wcwidth"
540 540 ];
541 541 src = fetchurl {
542 542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
543 543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
544 544 };
545 545 meta = {
546 546 license = [ pkgs.lib.licenses.bsdOriginal ];
547 547 };
548 548 };
549 549 "psutil" = super.buildPythonPackage {
550 550 name = "psutil-5.7.0";
551 551 doCheck = false;
552 552 src = fetchurl {
553 553 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
554 554 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
555 555 };
556 556 meta = {
557 557 license = [ pkgs.lib.licenses.bsdOriginal ];
558 558 };
559 559 };
560 560 "ptyprocess" = super.buildPythonPackage {
561 561 name = "ptyprocess-0.6.0";
562 562 doCheck = false;
563 563 src = fetchurl {
564 564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
565 565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
566 566 };
567 567 meta = {
568 568 license = [ ];
569 569 };
570 570 };
571 571 "py" = super.buildPythonPackage {
572 572 name = "py-1.8.0";
573 573 doCheck = false;
574 574 src = fetchurl {
575 575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
576 576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
577 577 };
578 578 meta = {
579 579 license = [ pkgs.lib.licenses.mit ];
580 580 };
581 581 };
582 582 "pycparser" = super.buildPythonPackage {
583 583 name = "pycparser-2.20";
584 584 doCheck = false;
585 585 src = fetchurl {
586 586 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
587 587 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
588 588 };
589 589 meta = {
590 590 license = [ pkgs.lib.licenses.bsdOriginal ];
591 591 };
592 592 };
593 593 "pygit2" = super.buildPythonPackage {
594 594 name = "pygit2-0.28.2";
595 595 doCheck = false;
596 596 propagatedBuildInputs = [
597 597 self."cffi"
598 598 self."six"
599 599 ];
600 600 src = fetchurl {
601 601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
602 602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
603 603 };
604 604 meta = {
605 605 license = [ { fullName = "GPLv2 with linking exception"; } ];
606 606 };
607 607 };
608 608 "pygments" = super.buildPythonPackage {
609 609 name = "pygments-2.4.2";
610 610 doCheck = false;
611 611 src = fetchurl {
612 612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
613 613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
614 614 };
615 615 meta = {
616 616 license = [ pkgs.lib.licenses.bsdOriginal ];
617 617 };
618 618 };
619 619 "pyparsing" = super.buildPythonPackage {
620 620 name = "pyparsing-2.4.7";
621 621 doCheck = false;
622 622 src = fetchurl {
623 623 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
624 624 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
625 625 };
626 626 meta = {
627 627 license = [ pkgs.lib.licenses.mit ];
628 628 };
629 629 };
630 630 "pyramid" = super.buildPythonPackage {
631 631 name = "pyramid-1.10.4";
632 632 doCheck = false;
633 633 propagatedBuildInputs = [
634 634 self."hupper"
635 635 self."plaster"
636 636 self."plaster-pastedeploy"
637 637 self."setuptools"
638 638 self."translationstring"
639 639 self."venusian"
640 640 self."webob"
641 641 self."zope.deprecation"
642 642 self."zope.interface"
643 643 self."repoze.lru"
644 644 ];
645 645 src = fetchurl {
646 646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
647 647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
648 648 };
649 649 meta = {
650 650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
651 651 };
652 652 };
653 653 "pyramid-mako" = super.buildPythonPackage {
654 654 name = "pyramid-mako-1.1.0";
655 655 doCheck = false;
656 656 propagatedBuildInputs = [
657 657 self."pyramid"
658 658 self."mako"
659 659 ];
660 660 src = fetchurl {
661 661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
662 662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
663 663 };
664 664 meta = {
665 665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
666 666 };
667 667 };
668 668 "pytest" = super.buildPythonPackage {
669 669 name = "pytest-4.6.5";
670 670 doCheck = false;
671 671 propagatedBuildInputs = [
672 672 self."py"
673 673 self."six"
674 674 self."packaging"
675 675 self."attrs"
676 676 self."atomicwrites"
677 677 self."pluggy"
678 678 self."importlib-metadata"
679 679 self."wcwidth"
680 680 self."funcsigs"
681 681 self."pathlib2"
682 682 self."more-itertools"
683 683 ];
684 684 src = fetchurl {
685 685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
686 686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
687 687 };
688 688 meta = {
689 689 license = [ pkgs.lib.licenses.mit ];
690 690 };
691 691 };
692 692 "pytest-cov" = super.buildPythonPackage {
693 693 name = "pytest-cov-2.7.1";
694 694 doCheck = false;
695 695 propagatedBuildInputs = [
696 696 self."pytest"
697 697 self."coverage"
698 698 ];
699 699 src = fetchurl {
700 700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
701 701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
702 702 };
703 703 meta = {
704 704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
705 705 };
706 706 };
707 707 "pytest-profiling" = super.buildPythonPackage {
708 708 name = "pytest-profiling-1.7.0";
709 709 doCheck = false;
710 710 propagatedBuildInputs = [
711 711 self."six"
712 712 self."pytest"
713 713 self."gprof2dot"
714 714 ];
715 715 src = fetchurl {
716 716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
717 717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
718 718 };
719 719 meta = {
720 720 license = [ pkgs.lib.licenses.mit ];
721 721 };
722 722 };
723 723 "pytest-runner" = super.buildPythonPackage {
724 724 name = "pytest-runner-5.1";
725 725 doCheck = false;
726 726 src = fetchurl {
727 727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
728 728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
729 729 };
730 730 meta = {
731 731 license = [ pkgs.lib.licenses.mit ];
732 732 };
733 733 };
734 734 "pytest-sugar" = super.buildPythonPackage {
735 735 name = "pytest-sugar-0.9.2";
736 736 doCheck = false;
737 737 propagatedBuildInputs = [
738 738 self."pytest"
739 739 self."termcolor"
740 740 self."packaging"
741 741 ];
742 742 src = fetchurl {
743 743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
744 744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
745 745 };
746 746 meta = {
747 747 license = [ pkgs.lib.licenses.bsdOriginal ];
748 748 };
749 749 };
750 750 "pytest-timeout" = super.buildPythonPackage {
751 751 name = "pytest-timeout-1.3.3";
752 752 doCheck = false;
753 753 propagatedBuildInputs = [
754 754 self."pytest"
755 755 ];
756 756 src = fetchurl {
757 757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
758 758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
759 759 };
760 760 meta = {
761 761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
762 762 };
763 763 };
764 764 "redis" = super.buildPythonPackage {
765 name = "redis-3.4.1";
765 name = "redis-3.5.3";
766 766 doCheck = false;
767 767 src = fetchurl {
768 url = "https://files.pythonhosted.org/packages/ef/2e/2c0f59891db7db087a7eeaa79bc7c7f2c039e71a2b5b0a41391e9d462926/redis-3.4.1.tar.gz";
769 sha256 = "07yaj0j9fs7xdkg5bg926fa990khyigjbp31si8ai20vj8sv7kqd";
768 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
769 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
770 770 };
771 771 meta = {
772 772 license = [ pkgs.lib.licenses.mit ];
773 773 };
774 774 };
775 775 "repoze.lru" = super.buildPythonPackage {
776 776 name = "repoze.lru-0.7";
777 777 doCheck = false;
778 778 src = fetchurl {
779 779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
780 780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
781 781 };
782 782 meta = {
783 783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
784 784 };
785 785 };
786 786 "rhodecode-vcsserver" = super.buildPythonPackage {
787 name = "rhodecode-vcsserver-4.23.2";
787 name = "rhodecode-vcsserver-4.24.0";
788 788 buildInputs = [
789 789 self."pytest"
790 790 self."py"
791 791 self."pytest-cov"
792 792 self."pytest-sugar"
793 793 self."pytest-runner"
794 794 self."pytest-profiling"
795 795 self."pytest-timeout"
796 796 self."gprof2dot"
797 797 self."mock"
798 798 self."cov-core"
799 799 self."coverage"
800 800 self."webtest"
801 801 self."beautifulsoup4"
802 802 self."configobj"
803 803 ];
804 804 doCheck = true;
805 805 propagatedBuildInputs = [
806 806 self."configobj"
807 807 self."dogpile.cache"
808 808 self."dogpile.core"
809 809 self."decorator"
810 810 self."dulwich"
811 811 self."hgsubversion"
812 812 self."hg-evolve"
813 813 self."mako"
814 814 self."markupsafe"
815 815 self."mercurial"
816 816 self."msgpack-python"
817 817 self."pastedeploy"
818 818 self."pyramid"
819 819 self."pyramid-mako"
820 820 self."pygit2"
821 821 self."repoze.lru"
822 822 self."redis"
823 823 self."simplejson"
824 824 self."subprocess32"
825 825 self."subvertpy"
826 826 self."six"
827 827 self."translationstring"
828 828 self."webob"
829 829 self."zope.deprecation"
830 830 self."zope.interface"
831 831 self."gevent"
832 832 self."greenlet"
833 833 self."gunicorn"
834 834 self."waitress"
835 835 self."ipdb"
836 836 self."ipython"
837 837 self."pytest"
838 838 self."py"
839 839 self."pytest-cov"
840 840 self."pytest-sugar"
841 841 self."pytest-runner"
842 842 self."pytest-profiling"
843 843 self."pytest-timeout"
844 844 self."gprof2dot"
845 845 self."mock"
846 846 self."cov-core"
847 847 self."coverage"
848 848 self."webtest"
849 849 self."beautifulsoup4"
850 850 ];
851 851 src = ./.;
852 852 meta = {
853 853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
854 854 };
855 855 };
856 856 "scandir" = super.buildPythonPackage {
857 857 name = "scandir-1.10.0";
858 858 doCheck = false;
859 859 src = fetchurl {
860 860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
861 861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
862 862 };
863 863 meta = {
864 864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
865 865 };
866 866 };
867 867 "setproctitle" = super.buildPythonPackage {
868 868 name = "setproctitle-1.1.10";
869 869 doCheck = false;
870 870 src = fetchurl {
871 871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
872 872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
873 873 };
874 874 meta = {
875 875 license = [ pkgs.lib.licenses.bsdOriginal ];
876 876 };
877 877 };
878 878 "setuptools" = super.buildPythonPackage {
879 879 name = "setuptools-44.1.0";
880 880 doCheck = false;
881 881 src = fetchurl {
882 882 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
883 883 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
884 884 };
885 885 meta = {
886 886 license = [ pkgs.lib.licenses.mit ];
887 887 };
888 888 };
889 889 "simplegeneric" = super.buildPythonPackage {
890 890 name = "simplegeneric-0.8.1";
891 891 doCheck = false;
892 892 src = fetchurl {
893 893 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
894 894 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
895 895 };
896 896 meta = {
897 897 license = [ pkgs.lib.licenses.zpl21 ];
898 898 };
899 899 };
900 900 "simplejson" = super.buildPythonPackage {
901 901 name = "simplejson-3.16.0";
902 902 doCheck = false;
903 903 src = fetchurl {
904 904 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
905 905 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
906 906 };
907 907 meta = {
908 908 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
909 909 };
910 910 };
911 911 "six" = super.buildPythonPackage {
912 912 name = "six-1.11.0";
913 913 doCheck = false;
914 914 src = fetchurl {
915 915 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
916 916 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
917 917 };
918 918 meta = {
919 919 license = [ pkgs.lib.licenses.mit ];
920 920 };
921 921 };
922 922 "subprocess32" = super.buildPythonPackage {
923 923 name = "subprocess32-3.5.4";
924 924 doCheck = false;
925 925 src = fetchurl {
926 926 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
927 927 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
928 928 };
929 929 meta = {
930 930 license = [ pkgs.lib.licenses.psfl ];
931 931 };
932 932 };
933 933 "subvertpy" = super.buildPythonPackage {
934 934 name = "subvertpy-0.10.1";
935 935 doCheck = false;
936 936 src = fetchurl {
937 937 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
938 938 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
939 939 };
940 940 meta = {
941 941 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
942 942 };
943 943 };
944 944 "termcolor" = super.buildPythonPackage {
945 945 name = "termcolor-1.1.0";
946 946 doCheck = false;
947 947 src = fetchurl {
948 948 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
949 949 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
950 950 };
951 951 meta = {
952 952 license = [ pkgs.lib.licenses.mit ];
953 953 };
954 954 };
955 955 "traitlets" = super.buildPythonPackage {
956 956 name = "traitlets-4.3.3";
957 957 doCheck = false;
958 958 propagatedBuildInputs = [
959 959 self."ipython-genutils"
960 960 self."six"
961 961 self."decorator"
962 962 self."enum34"
963 963 ];
964 964 src = fetchurl {
965 965 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
966 966 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
967 967 };
968 968 meta = {
969 969 license = [ pkgs.lib.licenses.bsdOriginal ];
970 970 };
971 971 };
972 972 "translationstring" = super.buildPythonPackage {
973 973 name = "translationstring-1.3";
974 974 doCheck = false;
975 975 src = fetchurl {
976 976 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
977 977 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
978 978 };
979 979 meta = {
980 980 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
981 981 };
982 982 };
983 983 "venusian" = super.buildPythonPackage {
984 984 name = "venusian-1.2.0";
985 985 doCheck = false;
986 986 src = fetchurl {
987 987 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
988 988 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
989 989 };
990 990 meta = {
991 991 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
992 992 };
993 993 };
994 994 "waitress" = super.buildPythonPackage {
995 995 name = "waitress-1.3.1";
996 996 doCheck = false;
997 997 src = fetchurl {
998 998 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
999 999 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
1000 1000 };
1001 1001 meta = {
1002 1002 license = [ pkgs.lib.licenses.zpl21 ];
1003 1003 };
1004 1004 };
1005 1005 "wcwidth" = super.buildPythonPackage {
1006 1006 name = "wcwidth-0.1.9";
1007 1007 doCheck = false;
1008 1008 src = fetchurl {
1009 1009 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
1010 1010 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
1011 1011 };
1012 1012 meta = {
1013 1013 license = [ pkgs.lib.licenses.mit ];
1014 1014 };
1015 1015 };
1016 1016 "webob" = super.buildPythonPackage {
1017 1017 name = "webob-1.8.5";
1018 1018 doCheck = false;
1019 1019 src = fetchurl {
1020 1020 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
1021 1021 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
1022 1022 };
1023 1023 meta = {
1024 1024 license = [ pkgs.lib.licenses.mit ];
1025 1025 };
1026 1026 };
1027 1027 "webtest" = super.buildPythonPackage {
1028 1028 name = "webtest-2.0.34";
1029 1029 doCheck = false;
1030 1030 propagatedBuildInputs = [
1031 1031 self."six"
1032 1032 self."webob"
1033 1033 self."waitress"
1034 1034 self."beautifulsoup4"
1035 1035 ];
1036 1036 src = fetchurl {
1037 1037 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
1038 1038 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
1039 1039 };
1040 1040 meta = {
1041 1041 license = [ pkgs.lib.licenses.mit ];
1042 1042 };
1043 1043 };
1044 1044 "zipp" = super.buildPythonPackage {
1045 1045 name = "zipp-1.2.0";
1046 1046 doCheck = false;
1047 1047 propagatedBuildInputs = [
1048 1048 self."contextlib2"
1049 1049 ];
1050 1050 src = fetchurl {
1051 1051 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
1052 1052 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
1053 1053 };
1054 1054 meta = {
1055 1055 license = [ pkgs.lib.licenses.mit ];
1056 1056 };
1057 1057 };
1058 1058 "zope.deprecation" = super.buildPythonPackage {
1059 1059 name = "zope.deprecation-4.4.0";
1060 1060 doCheck = false;
1061 1061 propagatedBuildInputs = [
1062 1062 self."setuptools"
1063 1063 ];
1064 1064 src = fetchurl {
1065 1065 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
1066 1066 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
1067 1067 };
1068 1068 meta = {
1069 1069 license = [ pkgs.lib.licenses.zpl21 ];
1070 1070 };
1071 1071 };
1072 1072 "zope.interface" = super.buildPythonPackage {
1073 1073 name = "zope.interface-4.6.0";
1074 1074 doCheck = false;
1075 1075 propagatedBuildInputs = [
1076 1076 self."setuptools"
1077 1077 ];
1078 1078 src = fetchurl {
1079 1079 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
1080 1080 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
1081 1081 };
1082 1082 meta = {
1083 1083 license = [ pkgs.lib.licenses.zpl21 ];
1084 1084 };
1085 1085 };
1086 1086
1087 1087 ### Test requirements
1088 1088
1089 1089
1090 1090 }
@@ -1,48 +1,48 b''
1 1 ## dependencies
2 2
3 3 # our custom configobj
4 4 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
5 5
6 6 dogpile.cache==0.9.0
7 7 dogpile.core==0.4.1
8 8 decorator==4.1.2
9 9 dulwich==0.13.0
10 10 hgsubversion==1.9.3
11 11 hg-evolve==9.1.0
12 12 mako==1.1.0
13 13 markupsafe==1.1.1
14 14 mercurial==5.1.1
15 15 msgpack-python==0.5.6
16 16
17 17 pastedeploy==2.1.0
18 18 pyramid==1.10.4
19 19 pyramid-mako==1.1.0
20 20 pygit2==0.28.2
21 21
22 22 repoze.lru==0.7
23 redis==3.4.1
23 redis==3.5.3
24 24 simplejson==3.16.0
25 25 subprocess32==3.5.4
26 26 subvertpy==0.10.1
27 27
28 28 six==1.11.0
29 29 translationstring==1.3
30 30 webob==1.8.5
31 31 zope.deprecation==4.4.0
32 32 zope.interface==4.6.0
33 33
34 34 ## http servers
35 35 gevent==1.5.0
36 36 greenlet==0.4.15
37 37 gunicorn==19.9.0
38 38 waitress==1.3.1
39 39
40 40 ## debug
41 41 ipdb==0.13.2
42 42 ipython==5.1.0
43 43
44 44 ## test related requirements
45 45 -r requirements_test.txt
46 46
47 47 ## uncomment to add the debug libraries
48 48 #-r requirements_debug.txt
@@ -1,1 +1,1 b''
1 4.23.2 No newline at end of file
1 4.24.0 No newline at end of file
@@ -1,692 +1,700 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import base64
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import wsgiref.util
25 25 import traceback
26 26 import tempfile
27 27 import resource
28 import psutil
28 29 from itertools import chain
29 30 from cStringIO import StringIO
30 31
31 32 import simplejson as json
32 33 import msgpack
33 34 from pyramid.config import Configurator
34 35 from pyramid.settings import asbool, aslist
35 36 from pyramid.wsgi import wsgiapp
36 37 from pyramid.compat import configparser
37 38 from pyramid.response import Response
38 39
39 40 from vcsserver.utils import safe_int
40 41
41 42 log = logging.getLogger(__name__)
42 43
43 44 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
44 45 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
45 46
46 47 try:
47 48 locale.setlocale(locale.LC_ALL, '')
48 49 except locale.Error as e:
49 50 log.error(
50 51 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
51 52 os.environ['LC_ALL'] = 'C'
52 53
53 54 import vcsserver
54 55 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
55 56 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
56 57 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
57 58 from vcsserver.echo_stub.echo_app import EchoApp
58 59 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
59 60 from vcsserver.lib.exc_tracking import store_exception
60 61 from vcsserver.server import VcsServer
61 62
62 63 try:
63 64 from vcsserver.git import GitFactory, GitRemote
64 65 except ImportError:
65 66 GitFactory = None
66 67 GitRemote = None
67 68
68 69 try:
69 70 from vcsserver.hg import MercurialFactory, HgRemote
70 71 except ImportError:
71 72 MercurialFactory = None
72 73 HgRemote = None
73 74
74 75 try:
75 76 from vcsserver.svn import SubversionFactory, SvnRemote
76 77 except ImportError:
77 78 SubversionFactory = None
78 79 SvnRemote = None
79 80
80 81
81 82 def _is_request_chunked(environ):
82 83 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
83 84 return stream
84 85
85 86
86 87 def _int_setting(settings, name, default):
87 88 settings[name] = int(settings.get(name, default))
88 89 return settings[name]
89 90
90 91
91 92 def _bool_setting(settings, name, default):
92 93 input_val = settings.get(name, default)
93 94 if isinstance(input_val, unicode):
94 95 input_val = input_val.encode('utf8')
95 96 settings[name] = asbool(input_val)
96 97 return settings[name]
97 98
98 99
99 100 def _list_setting(settings, name, default):
100 101 raw_value = settings.get(name, default)
101 102
102 103 # Otherwise we assume it uses pyramids space/newline separation.
103 104 settings[name] = aslist(raw_value)
104 105 return settings[name]
105 106
106 107
107 108 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
108 109 value = settings.get(name, default)
109 110
110 111 if default_when_empty and not value:
111 112 # use default value when value is empty
112 113 value = default
113 114
114 115 if lower:
115 116 value = value.lower()
116 117 settings[name] = value
117 118 return settings[name]
118 119
119 120
121 def log_max_fd():
122 try:
123 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
124 log.info('Max file descriptors value: %s', maxfd)
125 except Exception:
126 pass
127
128
120 129 class VCS(object):
121 130 def __init__(self, locale_conf=None, cache_config=None):
122 131 self.locale = locale_conf
123 132 self.cache_config = cache_config
124 133 self._configure_locale()
125 134
126 maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
127 log.info('Max file descriptors value: %s', maxfd)
135 log_max_fd()
128 136
129 137 if GitFactory and GitRemote:
130 138 git_factory = GitFactory()
131 139 self._git_remote = GitRemote(git_factory)
132 140 else:
133 141 log.info("Git client import failed")
134 142
135 143 if MercurialFactory and HgRemote:
136 144 hg_factory = MercurialFactory()
137 145 self._hg_remote = HgRemote(hg_factory)
138 146 else:
139 147 log.info("Mercurial client import failed")
140 148
141 149 if SubversionFactory and SvnRemote:
142 150 svn_factory = SubversionFactory()
143 151
144 152 # hg factory is used for svn url validation
145 153 hg_factory = MercurialFactory()
146 154 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
147 155 else:
148 156 log.info("Subversion client import failed")
149 157
150 158 self._vcsserver = VcsServer()
151 159
152 160 def _configure_locale(self):
153 161 if self.locale:
154 162 log.info('Settings locale: `LC_ALL` to %s', self.locale)
155 163 else:
156 164 log.info(
157 165 'Configuring locale subsystem based on environment variables')
158 166 try:
159 167 # If self.locale is the empty string, then the locale
160 168 # module will use the environment variables. See the
161 169 # documentation of the package `locale`.
162 170 locale.setlocale(locale.LC_ALL, self.locale)
163 171
164 172 language_code, encoding = locale.getlocale()
165 173 log.info(
166 174 'Locale set to language code "%s" with encoding "%s".',
167 175 language_code, encoding)
168 176 except locale.Error:
169 177 log.exception(
170 178 'Cannot set locale, not configuring the locale system')
171 179
172 180
173 181 class WsgiProxy(object):
174 182 def __init__(self, wsgi):
175 183 self.wsgi = wsgi
176 184
177 185 def __call__(self, environ, start_response):
178 186 input_data = environ['wsgi.input'].read()
179 187 input_data = msgpack.unpackb(input_data)
180 188
181 189 error = None
182 190 try:
183 191 data, status, headers = self.wsgi.handle(
184 192 input_data['environment'], input_data['input_data'],
185 193 *input_data['args'], **input_data['kwargs'])
186 194 except Exception as e:
187 195 data, status, headers = [], None, None
188 196 error = {
189 197 'message': str(e),
190 198 '_vcs_kind': getattr(e, '_vcs_kind', None)
191 199 }
192 200
193 201 start_response(200, {})
194 202 return self._iterator(error, status, headers, data)
195 203
196 204 def _iterator(self, error, status, headers, data):
197 205 initial_data = [
198 206 error,
199 207 status,
200 208 headers,
201 209 ]
202 210
203 211 for d in chain(initial_data, data):
204 212 yield msgpack.packb(d)
205 213
206 214
207 215 def not_found(request):
208 216 return {'status': '404 NOT FOUND'}
209 217
210 218
211 219 class VCSViewPredicate(object):
212 220 def __init__(self, val, config):
213 221 self.remotes = val
214 222
215 223 def text(self):
216 224 return 'vcs view method = %s' % (self.remotes.keys(),)
217 225
218 226 phash = text
219 227
220 228 def __call__(self, context, request):
221 229 """
222 230 View predicate that returns true if given backend is supported by
223 231 defined remotes.
224 232 """
225 233 backend = request.matchdict.get('backend')
226 234 return backend in self.remotes
227 235
228 236
229 237 class HTTPApplication(object):
230 238 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
231 239
232 240 remote_wsgi = remote_wsgi
233 241 _use_echo_app = False
234 242
235 243 def __init__(self, settings=None, global_config=None):
236 244 self._sanitize_settings_and_apply_defaults(settings)
237 245
238 246 self.config = Configurator(settings=settings)
239 247 self.global_config = global_config
240 248 self.config.include('vcsserver.lib.rc_cache')
241 249
242 250 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
243 251 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
244 252 self._remotes = {
245 253 'hg': vcs._hg_remote,
246 254 'git': vcs._git_remote,
247 255 'svn': vcs._svn_remote,
248 256 'server': vcs._vcsserver,
249 257 }
250 258 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
251 259 self._use_echo_app = True
252 260 log.warning("Using EchoApp for VCS operations.")
253 261 self.remote_wsgi = remote_wsgi_stub
254 262
255 263 self._configure_settings(global_config, settings)
256 264 self._configure()
257 265
258 266 def _configure_settings(self, global_config, app_settings):
259 267 """
260 268 Configure the settings module.
261 269 """
262 270 settings_merged = global_config.copy()
263 271 settings_merged.update(app_settings)
264 272
265 273 git_path = app_settings.get('git_path', None)
266 274 if git_path:
267 275 settings.GIT_EXECUTABLE = git_path
268 276 binary_dir = app_settings.get('core.binary_dir', None)
269 277 if binary_dir:
270 278 settings.BINARY_DIR = binary_dir
271 279
272 280 # Store the settings to make them available to other modules.
273 281 vcsserver.PYRAMID_SETTINGS = settings_merged
274 282 vcsserver.CONFIG = settings_merged
275 283
276 284 def _sanitize_settings_and_apply_defaults(self, settings):
277 285 temp_store = tempfile.gettempdir()
278 286 default_cache_dir = os.path.join(temp_store, 'rc_cache')
279 287
280 288 # save default, cache dir, and use it for all backends later.
281 289 default_cache_dir = _string_setting(
282 290 settings,
283 291 'cache_dir',
284 292 default_cache_dir, lower=False, default_when_empty=True)
285 293
286 294 # ensure we have our dir created
287 295 if not os.path.isdir(default_cache_dir):
288 296 os.makedirs(default_cache_dir, mode=0o755)
289 297
290 298 # exception store cache
291 299 _string_setting(
292 300 settings,
293 301 'exception_tracker.store_path',
294 302 temp_store, lower=False, default_when_empty=True)
295 303
296 304 # repo_object cache
297 305 _string_setting(
298 306 settings,
299 307 'rc_cache.repo_object.backend',
300 308 'dogpile.cache.rc.file_namespace', lower=False)
301 309 _int_setting(
302 310 settings,
303 311 'rc_cache.repo_object.expiration_time',
304 312 30 * 24 * 60 * 60)
305 313 _string_setting(
306 314 settings,
307 315 'rc_cache.repo_object.arguments.filename',
308 316 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
309 317
310 318 def _configure(self):
311 319 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
312 320
313 321 self.config.add_route('service', '/_service')
314 322 self.config.add_route('status', '/status')
315 323 self.config.add_route('hg_proxy', '/proxy/hg')
316 324 self.config.add_route('git_proxy', '/proxy/git')
317 325
318 326 # rpc methods
319 327 self.config.add_route('vcs', '/{backend}')
320 328
321 329 # streaming rpc remote methods
322 330 self.config.add_route('vcs_stream', '/{backend}/stream')
323 331
324 332 # vcs operations clone/push as streaming
325 333 self.config.add_route('stream_git', '/stream/git/*repo_name')
326 334 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
327 335
328 336 self.config.add_view(self.status_view, route_name='status', renderer='json')
329 337 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
330 338
331 339 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
332 340 self.config.add_view(self.git_proxy(), route_name='git_proxy')
333 341 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
334 342 vcs_view=self._remotes)
335 343 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
336 344 vcs_view=self._remotes)
337 345
338 346 self.config.add_view(self.hg_stream(), route_name='stream_hg')
339 347 self.config.add_view(self.git_stream(), route_name='stream_git')
340 348
341 349 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
342 350
343 351 self.config.add_notfound_view(not_found, renderer='json')
344 352
345 353 self.config.add_view(self.handle_vcs_exception, context=Exception)
346 354
347 355 self.config.add_tween(
348 356 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
349 357 )
350 358 self.config.add_request_method(
351 359 'vcsserver.lib.request_counter.get_request_counter',
352 360 'request_count')
353 361
354 362 def wsgi_app(self):
355 363 return self.config.make_wsgi_app()
356 364
357 365 def _vcs_view_params(self, request):
358 366 remote = self._remotes[request.matchdict['backend']]
359 367 payload = msgpack.unpackb(request.body, use_list=True)
360 368 method = payload.get('method')
361 369 params = payload['params']
362 370 wire = params.get('wire')
363 371 args = params.get('args')
364 372 kwargs = params.get('kwargs')
365 373 context_uid = None
366 374
367 375 if wire:
368 376 try:
369 377 wire['context'] = context_uid = uuid.UUID(wire['context'])
370 378 except KeyError:
371 379 pass
372 380 args.insert(0, wire)
373 381 repo_state_uid = wire.get('repo_state_uid') if wire else None
374 382
375 383 # NOTE(marcink): trading complexity for slight performance
376 384 if log.isEnabledFor(logging.DEBUG):
377 385 no_args_methods = [
378 386
379 387 ]
380 388 if method in no_args_methods:
381 389 call_args = ''
382 390 else:
383 391 call_args = args[1:]
384 392
385 393 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
386 394 method, call_args, kwargs, context_uid, repo_state_uid)
387 395
388 396 return payload, remote, method, args, kwargs
389 397
390 398 def vcs_view(self, request):
391 399
392 400 payload, remote, method, args, kwargs = self._vcs_view_params(request)
393 401 payload_id = payload.get('id')
394 402
395 403 try:
396 404 resp = getattr(remote, method)(*args, **kwargs)
397 405 except Exception as e:
398 406 exc_info = list(sys.exc_info())
399 407 exc_type, exc_value, exc_traceback = exc_info
400 408
401 409 org_exc = getattr(e, '_org_exc', None)
402 410 org_exc_name = None
403 411 org_exc_tb = ''
404 412 if org_exc:
405 413 org_exc_name = org_exc.__class__.__name__
406 414 org_exc_tb = getattr(e, '_org_exc_tb', '')
407 415 # replace our "faked" exception with our org
408 416 exc_info[0] = org_exc.__class__
409 417 exc_info[1] = org_exc
410 418
411 419 should_store_exc = True
412 420 if org_exc:
413 421 def get_exc_fqn(_exc_obj):
414 422 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
415 423 return module_name + '.' + org_exc_name
416 424
417 425 exc_fqn = get_exc_fqn(org_exc)
418 426
419 427 if exc_fqn in ['mercurial.error.RepoLookupError',
420 428 'vcsserver.exceptions.RefNotFoundException']:
421 429 should_store_exc = False
422 430
423 431 if should_store_exc:
424 432 store_exception(id(exc_info), exc_info)
425 433
426 434 tb_info = ''.join(
427 435 traceback.format_exception(exc_type, exc_value, exc_traceback))
428 436
429 437 type_ = e.__class__.__name__
430 438 if type_ not in self.ALLOWED_EXCEPTIONS:
431 439 type_ = None
432 440
433 441 resp = {
434 442 'id': payload_id,
435 443 'error': {
436 444 'message': e.message,
437 445 'traceback': tb_info,
438 446 'org_exc': org_exc_name,
439 447 'org_exc_tb': org_exc_tb,
440 448 'type': type_
441 449 }
442 450 }
443 451 try:
444 452 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
445 453 except AttributeError:
446 454 pass
447 455 else:
448 456 resp = {
449 457 'id': payload_id,
450 458 'result': resp
451 459 }
452 460
453 461 return resp
454 462
455 463 def vcs_stream_view(self, request):
456 464 payload, remote, method, args, kwargs = self._vcs_view_params(request)
457 465 # this method has a stream: marker we remove it here
458 466 method = method.split('stream:')[-1]
459 467 chunk_size = safe_int(payload.get('chunk_size')) or 4096
460 468
461 469 try:
462 470 resp = getattr(remote, method)(*args, **kwargs)
463 471 except Exception as e:
464 472 raise
465 473
466 474 def get_chunked_data(method_resp):
467 475 stream = StringIO(method_resp)
468 476 while 1:
469 477 chunk = stream.read(chunk_size)
470 478 if not chunk:
471 479 break
472 480 yield chunk
473 481
474 482 response = Response(app_iter=get_chunked_data(resp))
475 483 response.content_type = 'application/octet-stream'
476 484
477 485 return response
478 486
479 487 def status_view(self, request):
480 488 import vcsserver
481 489 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
482 490 'pid': os.getpid()}
483 491
484 492 def service_view(self, request):
485 493 import vcsserver
486 494
487 495 payload = msgpack.unpackb(request.body, use_list=True)
488 496 server_config, app_config = {}, {}
489 497
490 498 try:
491 499 path = self.global_config['__file__']
492 500 config = configparser.RawConfigParser()
493 501
494 502 config.read(path)
495 503
496 504 if config.has_section('server:main'):
497 505 server_config = dict(config.items('server:main'))
498 506 if config.has_section('app:main'):
499 507 app_config = dict(config.items('app:main'))
500 508
501 509 except Exception:
502 510 log.exception('Failed to read .ini file for display')
503 511
504 512 environ = os.environ.items()
505 513
506 514 resp = {
507 515 'id': payload.get('id'),
508 516 'result': dict(
509 517 version=vcsserver.__version__,
510 518 config=server_config,
511 519 app_config=app_config,
512 520 environ=environ,
513 521 payload=payload,
514 522 )
515 523 }
516 524 return resp
517 525
518 526 def _msgpack_renderer_factory(self, info):
519 527 def _render(value, system):
520 528 request = system.get('request')
521 529 if request is not None:
522 530 response = request.response
523 531 ct = response.content_type
524 532 if ct == response.default_content_type:
525 533 response.content_type = 'application/x-msgpack'
526 534 return msgpack.packb(value)
527 535 return _render
528 536
529 537 def set_env_from_config(self, environ, config):
530 538 dict_conf = {}
531 539 try:
532 540 for elem in config:
533 541 if elem[0] == 'rhodecode':
534 542 dict_conf = json.loads(elem[2])
535 543 break
536 544 except Exception:
537 545 log.exception('Failed to fetch SCM CONFIG')
538 546 return
539 547
540 548 username = dict_conf.get('username')
541 549 if username:
542 550 environ['REMOTE_USER'] = username
543 551 # mercurial specific, some extension api rely on this
544 552 environ['HGUSER'] = username
545 553
546 554 ip = dict_conf.get('ip')
547 555 if ip:
548 556 environ['REMOTE_HOST'] = ip
549 557
550 558 if _is_request_chunked(environ):
551 559 # set the compatibility flag for webob
552 560 environ['wsgi.input_terminated'] = True
553 561
554 562 def hg_proxy(self):
555 563 @wsgiapp
556 564 def _hg_proxy(environ, start_response):
557 565 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
558 566 return app(environ, start_response)
559 567 return _hg_proxy
560 568
561 569 def git_proxy(self):
562 570 @wsgiapp
563 571 def _git_proxy(environ, start_response):
564 572 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
565 573 return app(environ, start_response)
566 574 return _git_proxy
567 575
568 576 def hg_stream(self):
569 577 if self._use_echo_app:
570 578 @wsgiapp
571 579 def _hg_stream(environ, start_response):
572 580 app = EchoApp('fake_path', 'fake_name', None)
573 581 return app(environ, start_response)
574 582 return _hg_stream
575 583 else:
576 584 @wsgiapp
577 585 def _hg_stream(environ, start_response):
578 586 log.debug('http-app: handling hg stream')
579 587 repo_path = environ['HTTP_X_RC_REPO_PATH']
580 588 repo_name = environ['HTTP_X_RC_REPO_NAME']
581 589 packed_config = base64.b64decode(
582 590 environ['HTTP_X_RC_REPO_CONFIG'])
583 591 config = msgpack.unpackb(packed_config)
584 592 app = scm_app.create_hg_wsgi_app(
585 593 repo_path, repo_name, config)
586 594
587 595 # Consistent path information for hgweb
588 596 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
589 597 environ['REPO_NAME'] = repo_name
590 598 self.set_env_from_config(environ, config)
591 599
592 600 log.debug('http-app: starting app handler '
593 601 'with %s and process request', app)
594 602 return app(environ, ResponseFilter(start_response))
595 603 return _hg_stream
596 604
597 605 def git_stream(self):
598 606 if self._use_echo_app:
599 607 @wsgiapp
600 608 def _git_stream(environ, start_response):
601 609 app = EchoApp('fake_path', 'fake_name', None)
602 610 return app(environ, start_response)
603 611 return _git_stream
604 612 else:
605 613 @wsgiapp
606 614 def _git_stream(environ, start_response):
607 615 log.debug('http-app: handling git stream')
608 616 repo_path = environ['HTTP_X_RC_REPO_PATH']
609 617 repo_name = environ['HTTP_X_RC_REPO_NAME']
610 618 packed_config = base64.b64decode(
611 619 environ['HTTP_X_RC_REPO_CONFIG'])
612 620 config = msgpack.unpackb(packed_config)
613 621
614 622 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
615 623 self.set_env_from_config(environ, config)
616 624
617 625 content_type = environ.get('CONTENT_TYPE', '')
618 626
619 627 path = environ['PATH_INFO']
620 628 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
621 629 log.debug(
622 630 'LFS: Detecting if request `%s` is LFS server path based '
623 631 'on content type:`%s`, is_lfs:%s',
624 632 path, content_type, is_lfs_request)
625 633
626 634 if not is_lfs_request:
627 635 # fallback detection by path
628 636 if GIT_LFS_PROTO_PAT.match(path):
629 637 is_lfs_request = True
630 638 log.debug(
631 639 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
632 640 path, is_lfs_request)
633 641
634 642 if is_lfs_request:
635 643 app = scm_app.create_git_lfs_wsgi_app(
636 644 repo_path, repo_name, config)
637 645 else:
638 646 app = scm_app.create_git_wsgi_app(
639 647 repo_path, repo_name, config)
640 648
641 649 log.debug('http-app: starting app handler '
642 650 'with %s and process request', app)
643 651
644 652 return app(environ, start_response)
645 653
646 654 return _git_stream
647 655
648 656 def handle_vcs_exception(self, exception, request):
649 657 _vcs_kind = getattr(exception, '_vcs_kind', '')
650 658 if _vcs_kind == 'repo_locked':
651 659 # Get custom repo-locked status code if present.
652 660 status_code = request.headers.get('X-RC-Locked-Status-Code')
653 661 return HTTPRepoLocked(
654 662 title=exception.message, status_code=status_code)
655 663
656 664 elif _vcs_kind == 'repo_branch_protected':
657 665 # Get custom repo-branch-protected status code if present.
658 666 return HTTPRepoBranchProtected(title=exception.message)
659 667
660 668 exc_info = request.exc_info
661 669 store_exception(id(exc_info), exc_info)
662 670
663 671 traceback_info = 'unavailable'
664 672 if request.exc_info:
665 673 exc_type, exc_value, exc_tb = request.exc_info
666 674 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
667 675
668 676 log.error(
669 677 'error occurred handling this request for path: %s, \n tb: %s',
670 678 request.path, traceback_info)
671 679 raise exception
672 680
673 681
674 682 class ResponseFilter(object):
675 683
676 684 def __init__(self, start_response):
677 685 self._start_response = start_response
678 686
679 687 def __call__(self, status, response_headers, exc_info=None):
680 688 headers = tuple(
681 689 (h, v) for h, v in response_headers
682 690 if not wsgiref.util.is_hop_by_hop(h))
683 691 return self._start_response(status, headers, exc_info)
684 692
685 693
686 694 def main(global_config, **settings):
687 695 if MercurialFactory:
688 696 hgpatches.patch_largefiles_capabilities()
689 697 hgpatches.patch_subrepo_type_mapping()
690 698
691 699 app = HTTPApplication(settings=settings, global_config=global_config)
692 700 return app.wsgi_app()
@@ -1,856 +1,856 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 import subprocess
22 22 import time
23 23 from urllib2 import URLError
24 24 import urlparse
25 25 import logging
26 26 import posixpath as vcspath
27 27 import StringIO
28 28 import urllib
29 29 import traceback
30 30
31 31 import svn.client
32 32 import svn.core
33 33 import svn.delta
34 34 import svn.diff
35 35 import svn.fs
36 36 import svn.repos
37 37
38 38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 40 from vcsserver.exceptions import NoContentException
41 41 from vcsserver.vcs_base import RemoteBase
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 svn_compatible_versions_map = {
47 47 'pre-1.4-compatible': '1.3',
48 48 'pre-1.5-compatible': '1.4',
49 49 'pre-1.6-compatible': '1.5',
50 50 'pre-1.8-compatible': '1.7',
51 51 'pre-1.9-compatible': '1.8',
52 52 }
53 53
54 54 current_compatible_version = '1.12'
55 55
56 56
57 57 def reraise_safe_exceptions(func):
58 58 """Decorator for converting svn exceptions to something neutral."""
59 59 def wrapper(*args, **kwargs):
60 60 try:
61 61 return func(*args, **kwargs)
62 62 except Exception as e:
63 63 if not hasattr(e, '_vcs_kind'):
64 64 log.exception("Unhandled exception in svn remote call")
65 65 raise_from_original(exceptions.UnhandledException(e))
66 66 raise
67 67 return wrapper
68 68
69 69
70 70 class SubversionFactory(RepoFactory):
71 71 repo_type = 'svn'
72 72
73 73 def _create_repo(self, wire, create, compatible_version):
74 74 path = svn.core.svn_path_canonicalize(wire['path'])
75 75 if create:
76 76 fs_config = {'compatible-version': current_compatible_version}
77 77 if compatible_version:
78 78
79 79 compatible_version_string = \
80 80 svn_compatible_versions_map.get(compatible_version) \
81 81 or compatible_version
82 82 fs_config['compatible-version'] = compatible_version_string
83 83
84 84 log.debug('Create SVN repo with config "%s"', fs_config)
85 85 repo = svn.repos.create(path, "", "", None, fs_config)
86 86 else:
87 87 repo = svn.repos.open(path)
88 88
89 89 log.debug('Got SVN object: %s', repo)
90 90 return repo
91 91
92 92 def repo(self, wire, create=False, compatible_version=None):
93 93 """
94 94 Get a repository instance for the given path.
95 95 """
96 96 return self._create_repo(wire, create, compatible_version)
97 97
98 98
99 99 NODE_TYPE_MAPPING = {
100 100 svn.core.svn_node_file: 'file',
101 101 svn.core.svn_node_dir: 'dir',
102 102 }
103 103
104 104
105 105 class SvnRemote(RemoteBase):
106 106
107 107 def __init__(self, factory, hg_factory=None):
108 108 self._factory = factory
109 109 # TODO: Remove once we do not use internal Mercurial objects anymore
110 110 # for subversion
111 111 self._hg_factory = hg_factory
112 112
113 113 @reraise_safe_exceptions
114 114 def discover_svn_version(self):
115 115 try:
116 116 import svn.core
117 117 svn_ver = svn.core.SVN_VERSION
118 118 except ImportError:
119 119 svn_ver = None
120 120 return svn_ver
121 121
122 122 @reraise_safe_exceptions
123 123 def is_empty(self, wire):
124 124
125 125 try:
126 126 return self.lookup(wire, -1) == 0
127 127 except Exception:
128 128 log.exception("failed to read object_store")
129 129 return False
130 130
131 131 def check_url(self, url, config_items):
132 132 # this can throw exception if not installed, but we detect this
133 133 from hgsubversion import svnrepo
134 134
135 135 baseui = self._hg_factory._create_config(config_items)
136 136 # uuid function get's only valid UUID from proper repo, else
137 137 # throws exception
138 138 try:
139 139 svnrepo.svnremoterepo(baseui, url).svn.uuid
140 140 except Exception:
141 141 tb = traceback.format_exc()
142 142 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
143 143 raise URLError(
144 144 '"%s" is not a valid Subversion source url.' % (url, ))
145 145 return True
146 146
147 147 def is_path_valid_repository(self, wire, path):
148 148
149 149 # NOTE(marcink): short circuit the check for SVN repo
150 150 # the repos.open might be expensive to check, but we have one cheap
151 151 # pre condition that we can use, to check for 'format' file
152 152
153 153 if not os.path.isfile(os.path.join(path, 'format')):
154 154 return False
155 155
156 156 try:
157 157 svn.repos.open(path)
158 158 except svn.core.SubversionException:
159 159 tb = traceback.format_exc()
160 160 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
161 161 return False
162 162 return True
163 163
164 164 @reraise_safe_exceptions
165 165 def verify(self, wire,):
166 166 repo_path = wire['path']
167 167 if not self.is_path_valid_repository(wire, repo_path):
168 168 raise Exception(
169 169 "Path %s is not a valid Subversion repository." % repo_path)
170 170
171 171 cmd = ['svnadmin', 'info', repo_path]
172 172 stdout, stderr = subprocessio.run_command(cmd)
173 173 return stdout
174 174
175 175 def lookup(self, wire, revision):
176 176 if revision not in [-1, None, 'HEAD']:
177 177 raise NotImplementedError
178 178 repo = self._factory.repo(wire)
179 179 fs_ptr = svn.repos.fs(repo)
180 180 head = svn.fs.youngest_rev(fs_ptr)
181 181 return head
182 182
183 183 def lookup_interval(self, wire, start_ts, end_ts):
184 184 repo = self._factory.repo(wire)
185 185 fsobj = svn.repos.fs(repo)
186 186 start_rev = None
187 187 end_rev = None
188 188 if start_ts:
189 189 start_ts_svn = apr_time_t(start_ts)
190 190 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
191 191 else:
192 192 start_rev = 1
193 193 if end_ts:
194 194 end_ts_svn = apr_time_t(end_ts)
195 195 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
196 196 else:
197 197 end_rev = svn.fs.youngest_rev(fsobj)
198 198 return start_rev, end_rev
199 199
200 200 def revision_properties(self, wire, revision):
201 201
202 202 cache_on, context_uid, repo_id = self._cache_on(wire)
203 203 @self.region.conditional_cache_on_arguments(condition=cache_on)
204 204 def _revision_properties(_repo_id, _revision):
205 205 repo = self._factory.repo(wire)
206 206 fs_ptr = svn.repos.fs(repo)
207 207 return svn.fs.revision_proplist(fs_ptr, revision)
208 208 return _revision_properties(repo_id, revision)
209 209
210 210 def revision_changes(self, wire, revision):
211 211
212 212 repo = self._factory.repo(wire)
213 213 fsobj = svn.repos.fs(repo)
214 214 rev_root = svn.fs.revision_root(fsobj, revision)
215 215
216 216 editor = svn.repos.ChangeCollector(fsobj, rev_root)
217 217 editor_ptr, editor_baton = svn.delta.make_editor(editor)
218 218 base_dir = ""
219 219 send_deltas = False
220 220 svn.repos.replay2(
221 221 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
222 222 editor_ptr, editor_baton, None)
223 223
224 224 added = []
225 225 changed = []
226 226 removed = []
227 227
228 228 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
229 229 for path, change in editor.changes.iteritems():
230 230 # TODO: Decide what to do with directory nodes. Subversion can add
231 231 # empty directories.
232 232
233 233 if change.item_kind == svn.core.svn_node_dir:
234 234 continue
235 235 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
236 236 added.append(path)
237 237 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
238 238 svn.repos.CHANGE_ACTION_REPLACE]:
239 239 changed.append(path)
240 240 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
241 241 removed.append(path)
242 242 else:
243 243 raise NotImplementedError(
244 244 "Action %s not supported on path %s" % (
245 245 change.action, path))
246 246
247 247 changes = {
248 248 'added': added,
249 249 'changed': changed,
250 250 'removed': removed,
251 251 }
252 252 return changes
253 253
254 254 @reraise_safe_exceptions
255 255 def node_history(self, wire, path, revision, limit):
256 256 cache_on, context_uid, repo_id = self._cache_on(wire)
257 257 @self.region.conditional_cache_on_arguments(condition=cache_on)
258 258 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
259 259 cross_copies = False
260 260 repo = self._factory.repo(wire)
261 261 fsobj = svn.repos.fs(repo)
262 262 rev_root = svn.fs.revision_root(fsobj, revision)
263 263
264 264 history_revisions = []
265 265 history = svn.fs.node_history(rev_root, path)
266 266 history = svn.fs.history_prev(history, cross_copies)
267 267 while history:
268 268 __, node_revision = svn.fs.history_location(history)
269 269 history_revisions.append(node_revision)
270 270 if limit and len(history_revisions) >= limit:
271 271 break
272 272 history = svn.fs.history_prev(history, cross_copies)
273 273 return history_revisions
274 274 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
275 275
276 276 def node_properties(self, wire, path, revision):
277 277 cache_on, context_uid, repo_id = self._cache_on(wire)
278 278 @self.region.conditional_cache_on_arguments(condition=cache_on)
279 279 def _node_properties(_repo_id, _path, _revision):
280 280 repo = self._factory.repo(wire)
281 281 fsobj = svn.repos.fs(repo)
282 282 rev_root = svn.fs.revision_root(fsobj, revision)
283 283 return svn.fs.node_proplist(rev_root, path)
284 284 return _node_properties(repo_id, path, revision)
285 285
286 286 def file_annotate(self, wire, path, revision):
287 287 abs_path = 'file://' + urllib.pathname2url(
288 288 vcspath.join(wire['path'], path))
289 289 file_uri = svn.core.svn_path_canonicalize(abs_path)
290 290
291 291 start_rev = svn_opt_revision_value_t(0)
292 292 peg_rev = svn_opt_revision_value_t(revision)
293 293 end_rev = peg_rev
294 294
295 295 annotations = []
296 296
297 297 def receiver(line_no, revision, author, date, line, pool):
298 298 annotations.append((line_no, revision, line))
299 299
300 300 # TODO: Cannot use blame5, missing typemap function in the swig code
301 301 try:
302 302 svn.client.blame2(
303 303 file_uri, peg_rev, start_rev, end_rev,
304 304 receiver, svn.client.create_context())
305 305 except svn.core.SubversionException as exc:
306 306 log.exception("Error during blame operation.")
307 307 raise Exception(
308 308 "Blame not supported or file does not exist at path %s. "
309 309 "Error %s." % (path, exc))
310 310
311 311 return annotations
312 312
313 313 def get_node_type(self, wire, path, revision=None):
314 314
315 315 cache_on, context_uid, repo_id = self._cache_on(wire)
316 316 @self.region.conditional_cache_on_arguments(condition=cache_on)
317 317 def _get_node_type(_repo_id, _path, _revision):
318 318 repo = self._factory.repo(wire)
319 319 fs_ptr = svn.repos.fs(repo)
320 320 if _revision is None:
321 321 _revision = svn.fs.youngest_rev(fs_ptr)
322 322 root = svn.fs.revision_root(fs_ptr, _revision)
323 323 node = svn.fs.check_path(root, path)
324 324 return NODE_TYPE_MAPPING.get(node, None)
325 325 return _get_node_type(repo_id, path, revision)
326 326
327 327 def get_nodes(self, wire, path, revision=None):
328 328
329 329 cache_on, context_uid, repo_id = self._cache_on(wire)
330 330 @self.region.conditional_cache_on_arguments(condition=cache_on)
331 331 def _get_nodes(_repo_id, _path, _revision):
332 332 repo = self._factory.repo(wire)
333 333 fsobj = svn.repos.fs(repo)
334 334 if _revision is None:
335 335 _revision = svn.fs.youngest_rev(fsobj)
336 336 root = svn.fs.revision_root(fsobj, _revision)
337 337 entries = svn.fs.dir_entries(root, path)
338 338 result = []
339 339 for entry_path, entry_info in entries.iteritems():
340 340 result.append(
341 341 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
342 342 return result
343 343 return _get_nodes(repo_id, path, revision)
344 344
345 345 def get_file_content(self, wire, path, rev=None):
346 346 repo = self._factory.repo(wire)
347 347 fsobj = svn.repos.fs(repo)
348 348 if rev is None:
349 349 rev = svn.fs.youngest_revision(fsobj)
350 350 root = svn.fs.revision_root(fsobj, rev)
351 351 content = svn.core.Stream(svn.fs.file_contents(root, path))
352 352 return content.read()
353 353
354 354 def get_file_size(self, wire, path, revision=None):
355 355
356 356 cache_on, context_uid, repo_id = self._cache_on(wire)
357 357 @self.region.conditional_cache_on_arguments(condition=cache_on)
358 358 def _get_file_size(_repo_id, _path, _revision):
359 359 repo = self._factory.repo(wire)
360 360 fsobj = svn.repos.fs(repo)
361 361 if _revision is None:
362 362 _revision = svn.fs.youngest_revision(fsobj)
363 363 root = svn.fs.revision_root(fsobj, _revision)
364 364 size = svn.fs.file_length(root, path)
365 365 return size
366 366 return _get_file_size(repo_id, path, revision)
367 367
368 368 def create_repository(self, wire, compatible_version=None):
369 369 log.info('Creating Subversion repository in path "%s"', wire['path'])
370 370 self._factory.repo(wire, create=True,
371 371 compatible_version=compatible_version)
372 372
373 373 def get_url_and_credentials(self, src_url):
374 374 obj = urlparse.urlparse(src_url)
375 375 username = obj.username or None
376 376 password = obj.password or None
377 377 return username, password, src_url
378 378
379 379 def import_remote_repository(self, wire, src_url):
380 380 repo_path = wire['path']
381 381 if not self.is_path_valid_repository(wire, repo_path):
382 382 raise Exception(
383 383 "Path %s is not a valid Subversion repository." % repo_path)
384 384
385 385 username, password, src_url = self.get_url_and_credentials(src_url)
386 386 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
387 387 '--trust-server-cert-failures=unknown-ca']
388 388 if username and password:
389 389 rdump_cmd += ['--username', username, '--password', password]
390 390 rdump_cmd += [src_url]
391 391
392 392 rdump = subprocess.Popen(
393 393 rdump_cmd,
394 394 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
395 395 load = subprocess.Popen(
396 396 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
397 397
398 398 # TODO: johbo: This can be a very long operation, might be better
399 399 # to track some kind of status and provide an api to check if the
400 400 # import is done.
401 401 rdump.wait()
402 402 load.wait()
403 403
404 404 log.debug('Return process ended with code: %s', rdump.returncode)
405 405 if rdump.returncode != 0:
406 406 errors = rdump.stderr.read()
407 407 log.error('svnrdump dump failed: statuscode %s: message: %s',
408 408 rdump.returncode, errors)
409 409 reason = 'UNKNOWN'
410 410 if 'svnrdump: E230001:' in errors:
411 411 reason = 'INVALID_CERTIFICATE'
412 412
413 413 if reason == 'UNKNOWN':
414 414 reason = 'UNKNOWN:{}'.format(errors)
415 415 raise Exception(
416 416 'Failed to dump the remote repository from %s. Reason:%s' % (
417 417 src_url, reason))
418 418 if load.returncode != 0:
419 419 raise Exception(
420 420 'Failed to load the dump of remote repository from %s.' %
421 421 (src_url, ))
422 422
423 423 def commit(self, wire, message, author, timestamp, updated, removed):
424 424 assert isinstance(message, str)
425 425 assert isinstance(author, str)
426 426
427 427 repo = self._factory.repo(wire)
428 428 fsobj = svn.repos.fs(repo)
429 429
430 430 rev = svn.fs.youngest_rev(fsobj)
431 431 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
432 432 txn_root = svn.fs.txn_root(txn)
433 433
434 434 for node in updated:
435 435 TxnNodeProcessor(node, txn_root).update()
436 436 for node in removed:
437 437 TxnNodeProcessor(node, txn_root).remove()
438 438
439 439 commit_id = svn.repos.fs_commit_txn(repo, txn)
440 440
441 441 if timestamp:
442 442 apr_time = apr_time_t(timestamp)
443 443 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
444 444 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
445 445
446 446 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
447 447 return commit_id
448 448
449 449 def diff(self, wire, rev1, rev2, path1=None, path2=None,
450 450 ignore_whitespace=False, context=3):
451 451
452 452 wire.update(cache=False)
453 453 repo = self._factory.repo(wire)
454 454 diff_creator = SvnDiffer(
455 455 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
456 456 try:
457 457 return diff_creator.generate_diff()
458 458 except svn.core.SubversionException as e:
459 459 log.exception(
460 460 "Error during diff operation operation. "
461 461 "Path might not exist %s, %s" % (path1, path2))
462 462 return ""
463 463
464 464 @reraise_safe_exceptions
465 465 def is_large_file(self, wire, path):
466 466 return False
467 467
468 468 @reraise_safe_exceptions
469 469 def is_binary(self, wire, rev, path):
470 470 cache_on, context_uid, repo_id = self._cache_on(wire)
471 471
472 472 @self.region.conditional_cache_on_arguments(condition=cache_on)
473 473 def _is_binary(_repo_id, _rev, _path):
474 474 raw_bytes = self.get_file_content(wire, path, rev)
475 475 return raw_bytes and '\0' in raw_bytes
476 476
477 477 return _is_binary(repo_id, rev, path)
478 478
479 479 @reraise_safe_exceptions
480 480 def run_svn_command(self, wire, cmd, **opts):
481 481 path = wire.get('path', None)
482 482
483 483 if path and os.path.isdir(path):
484 484 opts['cwd'] = path
485 485
486 486 safe_call = False
487 487 if '_safe' in opts:
488 488 safe_call = True
489 489
490 490 svnenv = os.environ.copy()
491 491 svnenv.update(opts.pop('extra_env', {}))
492 492
493 493 _opts = {'env': svnenv, 'shell': False}
494 494
495 495 try:
496 496 _opts.update(opts)
497 497 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
498 498
499 499 return ''.join(p), ''.join(p.error)
500 500 except (EnvironmentError, OSError) as err:
501 cmd = ' '.join(cmd) # human friendly CMD
502 tb_err = ("Couldn't run svn command (%s).\n"
503 "Original error was:%s\n"
504 "Call options:%s\n"
505 % (cmd, err, _opts))
506 log.exception(tb_err)
507 501 if safe_call:
508 502 return '', err
509 503 else:
504 cmd = ' '.join(cmd) # human friendly CMD
505 tb_err = ("Couldn't run svn command (%s).\n"
506 "Original error was:%s\n"
507 "Call options:%s\n"
508 % (cmd, err, _opts))
509 log.exception(tb_err)
510 510 raise exceptions.VcsException()(tb_err)
511 511
512 512 @reraise_safe_exceptions
513 513 def install_hooks(self, wire, force=False):
514 514 from vcsserver.hook_utils import install_svn_hooks
515 515 repo_path = wire['path']
516 516 binary_dir = settings.BINARY_DIR
517 517 executable = None
518 518 if binary_dir:
519 519 executable = os.path.join(binary_dir, 'python')
520 520 return install_svn_hooks(
521 521 repo_path, executable=executable, force_create=force)
522 522
523 523 @reraise_safe_exceptions
524 524 def get_hooks_info(self, wire):
525 525 from vcsserver.hook_utils import (
526 526 get_svn_pre_hook_version, get_svn_post_hook_version)
527 527 repo_path = wire['path']
528 528 return {
529 529 'pre_version': get_svn_pre_hook_version(repo_path),
530 530 'post_version': get_svn_post_hook_version(repo_path),
531 531 }
532 532
533 533 @reraise_safe_exceptions
534 534 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
535 535 archive_dir_name, commit_id):
536 536
537 537 def walk_tree(root, root_dir, _commit_id):
538 538 """
539 539 Special recursive svn repo walker
540 540 """
541 541
542 542 filemode_default = 0o100644
543 543 filemode_executable = 0o100755
544 544
545 545 file_iter = svn.fs.dir_entries(root, root_dir)
546 546 for f_name in file_iter:
547 547 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
548 548
549 549 if f_type == 'dir':
550 550 # return only DIR, and then all entries in that dir
551 551 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
552 552 new_root = os.path.join(root_dir, f_name)
553 553 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
554 554 yield _f_name, _f_data, _f_type
555 555 else:
556 556 f_path = os.path.join(root_dir, f_name).rstrip('/')
557 557 prop_list = svn.fs.node_proplist(root, f_path)
558 558
559 559 f_mode = filemode_default
560 560 if prop_list.get('svn:executable'):
561 561 f_mode = filemode_executable
562 562
563 563 f_is_link = False
564 564 if prop_list.get('svn:special'):
565 565 f_is_link = True
566 566
567 567 data = {
568 568 'is_link': f_is_link,
569 569 'mode': f_mode,
570 570 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
571 571 }
572 572
573 573 yield f_path, data, f_type
574 574
575 575 def file_walker(_commit_id, path):
576 576 repo = self._factory.repo(wire)
577 577 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
578 578
579 579 def no_content():
580 580 raise NoContentException()
581 581
582 582 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
583 583 file_path = f_name
584 584
585 585 if f_type == 'dir':
586 586 mode = f_data['mode']
587 587 yield ArchiveNode(file_path, mode, False, no_content)
588 588 else:
589 589 mode = f_data['mode']
590 590 is_link = f_data['is_link']
591 591 data_stream = f_data['content_stream']
592 592 yield ArchiveNode(file_path, mode, is_link, data_stream)
593 593
594 594 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
595 595 archive_dir_name, commit_id)
596 596
597 597
598 598 class SvnDiffer(object):
599 599 """
600 600 Utility to create diffs based on difflib and the Subversion api
601 601 """
602 602
603 603 binary_content = False
604 604
605 605 def __init__(
606 606 self, repo, src_rev, src_path, tgt_rev, tgt_path,
607 607 ignore_whitespace, context):
608 608 self.repo = repo
609 609 self.ignore_whitespace = ignore_whitespace
610 610 self.context = context
611 611
612 612 fsobj = svn.repos.fs(repo)
613 613
614 614 self.tgt_rev = tgt_rev
615 615 self.tgt_path = tgt_path or ''
616 616 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
617 617 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
618 618
619 619 self.src_rev = src_rev
620 620 self.src_path = src_path or self.tgt_path
621 621 self.src_root = svn.fs.revision_root(fsobj, src_rev)
622 622 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
623 623
624 624 self._validate()
625 625
626 626 def _validate(self):
627 627 if (self.tgt_kind != svn.core.svn_node_none and
628 628 self.src_kind != svn.core.svn_node_none and
629 629 self.src_kind != self.tgt_kind):
630 630 # TODO: johbo: proper error handling
631 631 raise Exception(
632 632 "Source and target are not compatible for diff generation. "
633 633 "Source type: %s, target type: %s" %
634 634 (self.src_kind, self.tgt_kind))
635 635
636 636 def generate_diff(self):
637 637 buf = StringIO.StringIO()
638 638 if self.tgt_kind == svn.core.svn_node_dir:
639 639 self._generate_dir_diff(buf)
640 640 else:
641 641 self._generate_file_diff(buf)
642 642 return buf.getvalue()
643 643
644 644 def _generate_dir_diff(self, buf):
645 645 editor = DiffChangeEditor()
646 646 editor_ptr, editor_baton = svn.delta.make_editor(editor)
647 647 svn.repos.dir_delta2(
648 648 self.src_root,
649 649 self.src_path,
650 650 '', # src_entry
651 651 self.tgt_root,
652 652 self.tgt_path,
653 653 editor_ptr, editor_baton,
654 654 authorization_callback_allow_all,
655 655 False, # text_deltas
656 656 svn.core.svn_depth_infinity, # depth
657 657 False, # entry_props
658 658 False, # ignore_ancestry
659 659 )
660 660
661 661 for path, __, change in sorted(editor.changes):
662 662 self._generate_node_diff(
663 663 buf, change, path, self.tgt_path, path, self.src_path)
664 664
665 665 def _generate_file_diff(self, buf):
666 666 change = None
667 667 if self.src_kind == svn.core.svn_node_none:
668 668 change = "add"
669 669 elif self.tgt_kind == svn.core.svn_node_none:
670 670 change = "delete"
671 671 tgt_base, tgt_path = vcspath.split(self.tgt_path)
672 672 src_base, src_path = vcspath.split(self.src_path)
673 673 self._generate_node_diff(
674 674 buf, change, tgt_path, tgt_base, src_path, src_base)
675 675
676 676 def _generate_node_diff(
677 677 self, buf, change, tgt_path, tgt_base, src_path, src_base):
678 678
679 679 if self.src_rev == self.tgt_rev and tgt_base == src_base:
680 680 # makes consistent behaviour with git/hg to return empty diff if
681 681 # we compare same revisions
682 682 return
683 683
684 684 tgt_full_path = vcspath.join(tgt_base, tgt_path)
685 685 src_full_path = vcspath.join(src_base, src_path)
686 686
687 687 self.binary_content = False
688 688 mime_type = self._get_mime_type(tgt_full_path)
689 689
690 690 if mime_type and not mime_type.startswith('text'):
691 691 self.binary_content = True
692 692 buf.write("=" * 67 + '\n')
693 693 buf.write("Cannot display: file marked as a binary type.\n")
694 694 buf.write("svn:mime-type = %s\n" % mime_type)
695 695 buf.write("Index: %s\n" % (tgt_path, ))
696 696 buf.write("=" * 67 + '\n')
697 697 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
698 698 'tgt_path': tgt_path})
699 699
700 700 if change == 'add':
701 701 # TODO: johbo: SVN is missing a zero here compared to git
702 702 buf.write("new file mode 10644\n")
703 703
704 704 #TODO(marcink): intro to binary detection of svn patches
705 705 # if self.binary_content:
706 706 # buf.write('GIT binary patch\n')
707 707
708 708 buf.write("--- /dev/null\t(revision 0)\n")
709 709 src_lines = []
710 710 else:
711 711 if change == 'delete':
712 712 buf.write("deleted file mode 10644\n")
713 713
714 714 #TODO(marcink): intro to binary detection of svn patches
715 715 # if self.binary_content:
716 716 # buf.write('GIT binary patch\n')
717 717
718 718 buf.write("--- a/%s\t(revision %s)\n" % (
719 719 src_path, self.src_rev))
720 720 src_lines = self._svn_readlines(self.src_root, src_full_path)
721 721
722 722 if change == 'delete':
723 723 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
724 724 tgt_lines = []
725 725 else:
726 726 buf.write("+++ b/%s\t(revision %s)\n" % (
727 727 tgt_path, self.tgt_rev))
728 728 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
729 729
730 730 if not self.binary_content:
731 731 udiff = svn_diff.unified_diff(
732 732 src_lines, tgt_lines, context=self.context,
733 733 ignore_blank_lines=self.ignore_whitespace,
734 734 ignore_case=False,
735 735 ignore_space_changes=self.ignore_whitespace)
736 736 buf.writelines(udiff)
737 737
738 738 def _get_mime_type(self, path):
739 739 try:
740 740 mime_type = svn.fs.node_prop(
741 741 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
742 742 except svn.core.SubversionException:
743 743 mime_type = svn.fs.node_prop(
744 744 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
745 745 return mime_type
746 746
747 747 def _svn_readlines(self, fs_root, node_path):
748 748 if self.binary_content:
749 749 return []
750 750 node_kind = svn.fs.check_path(fs_root, node_path)
751 751 if node_kind not in (
752 752 svn.core.svn_node_file, svn.core.svn_node_symlink):
753 753 return []
754 754 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
755 755 return content.splitlines(True)
756 756
757 757
758 758 class DiffChangeEditor(svn.delta.Editor):
759 759 """
760 760 Records changes between two given revisions
761 761 """
762 762
763 763 def __init__(self):
764 764 self.changes = []
765 765
766 766 def delete_entry(self, path, revision, parent_baton, pool=None):
767 767 self.changes.append((path, None, 'delete'))
768 768
769 769 def add_file(
770 770 self, path, parent_baton, copyfrom_path, copyfrom_revision,
771 771 file_pool=None):
772 772 self.changes.append((path, 'file', 'add'))
773 773
774 774 def open_file(self, path, parent_baton, base_revision, file_pool=None):
775 775 self.changes.append((path, 'file', 'change'))
776 776
777 777
778 778 def authorization_callback_allow_all(root, path, pool):
779 779 return True
780 780
781 781
782 782 class TxnNodeProcessor(object):
783 783 """
784 784 Utility to process the change of one node within a transaction root.
785 785
786 786 It encapsulates the knowledge of how to add, update or remove
787 787 a node for a given transaction root. The purpose is to support the method
788 788 `SvnRemote.commit`.
789 789 """
790 790
791 791 def __init__(self, node, txn_root):
792 792 assert isinstance(node['path'], str)
793 793
794 794 self.node = node
795 795 self.txn_root = txn_root
796 796
797 797 def update(self):
798 798 self._ensure_parent_dirs()
799 799 self._add_file_if_node_does_not_exist()
800 800 self._update_file_content()
801 801 self._update_file_properties()
802 802
803 803 def remove(self):
804 804 svn.fs.delete(self.txn_root, self.node['path'])
805 805 # TODO: Clean up directory if empty
806 806
807 807 def _ensure_parent_dirs(self):
808 808 curdir = vcspath.dirname(self.node['path'])
809 809 dirs_to_create = []
810 810 while not self._svn_path_exists(curdir):
811 811 dirs_to_create.append(curdir)
812 812 curdir = vcspath.dirname(curdir)
813 813
814 814 for curdir in reversed(dirs_to_create):
815 815 log.debug('Creating missing directory "%s"', curdir)
816 816 svn.fs.make_dir(self.txn_root, curdir)
817 817
818 818 def _svn_path_exists(self, path):
819 819 path_status = svn.fs.check_path(self.txn_root, path)
820 820 return path_status != svn.core.svn_node_none
821 821
822 822 def _add_file_if_node_does_not_exist(self):
823 823 kind = svn.fs.check_path(self.txn_root, self.node['path'])
824 824 if kind == svn.core.svn_node_none:
825 825 svn.fs.make_file(self.txn_root, self.node['path'])
826 826
827 827 def _update_file_content(self):
828 828 assert isinstance(self.node['content'], str)
829 829 handler, baton = svn.fs.apply_textdelta(
830 830 self.txn_root, self.node['path'], None, None)
831 831 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
832 832
833 833 def _update_file_properties(self):
834 834 properties = self.node.get('properties', {})
835 835 for key, value in properties.iteritems():
836 836 svn.fs.change_node_prop(
837 837 self.txn_root, self.node['path'], key, value)
838 838
839 839
840 840 def apr_time_t(timestamp):
841 841 """
842 842 Convert a Python timestamp into APR timestamp type apr_time_t
843 843 """
844 844 return timestamp * 1E6
845 845
846 846
847 847 def svn_opt_revision_value_t(num):
848 848 """
849 849 Put `num` into a `svn_opt_revision_value_t` structure.
850 850 """
851 851 value = svn.core.svn_opt_revision_value_t()
852 852 value.number = num
853 853 revision = svn.core.svn_opt_revision_t()
854 854 revision.kind = svn.core.svn_opt_revision_number
855 855 revision.value = value
856 856 return revision
General Comments 0
You need to be logged in to leave comments. Login now