##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r853:41b1b868 merge stable
parent child Browse files
Show More
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.18.3
2 current_version = 4.19.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.18.3
12 state = in_progress
13 version = 4.19.0
16 14
@@ -1,23 +1,28 b''
1 1
2 .PHONY: clean test test-clean test-only generate-pkgs
2 .PHONY: clean test test-clean test-only generate-pkgs pip-packages
3 3
4 # set by: PATH_TO_OUTDATED_PACKAGES=/some/path/outdated_packages.py
5 OUTDATED_PACKAGES = ${PATH_TO_OUTDATED_PACKAGES}
4 6
5 7 clean:
6 8 make test-clean
7 9 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' -o -iname '*.orig' \) -exec rm '{}' ';'
8 10
9 11 test:
10 12 make test-clean
11 13 make test-only
12 14
13 15 test-clean:
14 16 rm -rf coverage.xml htmlcov junit.xml pylint.log result
15 17 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
16 18
17 19 test-only:
18 20 PYTHONHASHSEED=random \
19 21 py.test -x -vv -r xw -p no:sugar \
20 22 --cov=vcsserver --cov-report=term-missing --cov-report=html vcsserver
21 23
22 24 generate-pkgs:
23 25 nix-shell pkgs/shell-generate.nix --command "pip2nix generate --licenses"
26
27 pip-packages:
28 python ${OUTDATED_PACKAGES}
@@ -1,71 +1,71 b''
1 1 self: super: {
2 2
3 3 # bump GIT version
4 4 git = super.lib.overrideDerivation super.git (oldAttrs: {
5 5 name = "git-2.24.1";
6 6 src = self.fetchurl {
7 7 url = "https://www.kernel.org/pub/software/scm/git/git-2.24.1.tar.xz";
8 8 sha256 = "0ql5z31vgl7b785gwrf00m129mg7zi9pa65n12ij3mpxx3f28gvj";
9 9 };
10 10
11 11 # patches come from: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
12 12 patches = [
13 13 ./patches/git/docbook2texi.patch
14 14 ./patches/git/git-sh-i18n.patch
15 15 ./patches/git/ssh-path.patch
16 16 ./patches/git/git-send-email-honor-PATH.patch
17 17 ./patches/git/installCheck-path.patch
18 18 ];
19 19
20 20 });
21 21
22 22 libgit2rc = super.lib.overrideDerivation super.libgit2 (oldAttrs: {
23 23 name = "libgit2-0.28.2";
24 24 version = "0.28.2";
25 25
26 26 src = self.fetchFromGitHub {
27 27 owner = "libgit2";
28 28 repo = "libgit2";
29 29 rev = "v0.28.2";
30 30 sha256 = "0cm8fvs05rj0baigs2133q5a0sm3pa234y8h6hmwhl2bz9xq3k4b";
31 31 };
32 32
33 33 cmakeFlags = [ "-DTHREADSAFE=ON" "-DUSE_HTTPS=no"];
34 34
35 35 buildInputs = [
36 36 super.zlib
37 37 super.libssh2
38 38 super.openssl
39 39 super.curl
40 40 ];
41 41
42 42
43 43 });
44 44
45 45 # Override subversion derivation to
46 46 # - activate python bindings
47 47 subversion =
48 48 let
49 49 subversionWithPython = super.subversion.override {
50 50 httpSupport = true;
51 51 pythonBindings = true;
52 52 python = self.python27Packages.python;
53 53 };
54 54 in
55 55 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
56 name = "subversion-1.12.2";
56 name = "subversion-1.13.0";
57 57 src = self.fetchurl {
58 url = "https://archive.apache.org/dist/subversion/subversion-1.12.2.tar.gz";
59 sha256 = "1wr1pklnq67xdzmf237zj6l1hg43yshfkbxvpvd5sv6r0dk7v4pl";
58 url = "https://archive.apache.org/dist/subversion/subversion-1.13.0.tar.gz";
59 sha256 = "0cb9p7f5hg0l4k32hz8vmvy2r45igchq5sh4m366za5q0c649bfs";
60 60 };
61 61
62 62 ## use internal lz4/utf8proc because it is stable and shipped with SVN
63 63 configureFlags = oldAttrs.configureFlags ++ [
64 64 " --with-lz4=internal"
65 65 " --with-utf8proc=internal"
66 66 ];
67 67
68 68 });
69 69
70 70
71 71 }
@@ -1,1090 +1,1090 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "atomicwrites" = super.buildPythonPackage {
8 8 name = "atomicwrites-1.3.0";
9 9 doCheck = false;
10 10 src = fetchurl {
11 11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
12 12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.mit ];
16 16 };
17 17 };
18 18 "attrs" = super.buildPythonPackage {
19 19 name = "attrs-19.3.0";
20 20 doCheck = false;
21 21 src = fetchurl {
22 22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
23 23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
24 24 };
25 25 meta = {
26 26 license = [ pkgs.lib.licenses.mit ];
27 27 };
28 28 };
29 29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 30 name = "backports.shutil-get-terminal-size-1.0.0";
31 31 doCheck = false;
32 32 src = fetchurl {
33 33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 35 };
36 36 meta = {
37 37 license = [ pkgs.lib.licenses.mit ];
38 38 };
39 39 };
40 40 "beautifulsoup4" = super.buildPythonPackage {
41 41 name = "beautifulsoup4-4.6.3";
42 42 doCheck = false;
43 43 src = fetchurl {
44 44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 46 };
47 47 meta = {
48 48 license = [ pkgs.lib.licenses.mit ];
49 49 };
50 50 };
51 51 "cffi" = super.buildPythonPackage {
52 52 name = "cffi-1.12.3";
53 53 doCheck = false;
54 54 propagatedBuildInputs = [
55 55 self."pycparser"
56 56 ];
57 57 src = fetchurl {
58 58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
59 59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.mit ];
63 63 };
64 64 };
65 65 "configobj" = super.buildPythonPackage {
66 66 name = "configobj-5.0.6";
67 67 doCheck = false;
68 68 propagatedBuildInputs = [
69 69 self."six"
70 70 ];
71 71 src = fetchurl {
72 72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
73 73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.bsdOriginal ];
77 77 };
78 78 };
79 79 "configparser" = super.buildPythonPackage {
80 80 name = "configparser-4.0.2";
81 81 doCheck = false;
82 82 src = fetchurl {
83 83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
84 84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
85 85 };
86 86 meta = {
87 87 license = [ pkgs.lib.licenses.mit ];
88 88 };
89 89 };
90 90 "contextlib2" = super.buildPythonPackage {
91 91 name = "contextlib2-0.6.0.post1";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
95 95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.psfl ];
99 99 };
100 100 };
101 101 "cov-core" = super.buildPythonPackage {
102 102 name = "cov-core-1.15.0";
103 103 doCheck = false;
104 104 propagatedBuildInputs = [
105 105 self."coverage"
106 106 ];
107 107 src = fetchurl {
108 108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
109 109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
110 110 };
111 111 meta = {
112 112 license = [ pkgs.lib.licenses.mit ];
113 113 };
114 114 };
115 115 "coverage" = super.buildPythonPackage {
116 116 name = "coverage-4.5.4";
117 117 doCheck = false;
118 118 src = fetchurl {
119 119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
120 120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
121 121 };
122 122 meta = {
123 123 license = [ pkgs.lib.licenses.asl20 ];
124 124 };
125 125 };
126 126 "decorator" = super.buildPythonPackage {
127 127 name = "decorator-4.1.2";
128 128 doCheck = false;
129 129 src = fetchurl {
130 130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
131 131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
132 132 };
133 133 meta = {
134 134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
135 135 };
136 136 };
137 137 "dogpile.cache" = super.buildPythonPackage {
138 138 name = "dogpile.cache-0.9.0";
139 139 doCheck = false;
140 140 propagatedBuildInputs = [
141 141 self."decorator"
142 142 ];
143 143 src = fetchurl {
144 144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
145 145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
146 146 };
147 147 meta = {
148 148 license = [ pkgs.lib.licenses.bsdOriginal ];
149 149 };
150 150 };
151 151 "dogpile.core" = super.buildPythonPackage {
152 152 name = "dogpile.core-0.4.1";
153 153 doCheck = false;
154 154 src = fetchurl {
155 155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
156 156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
157 157 };
158 158 meta = {
159 159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 160 };
161 161 };
162 162 "dulwich" = super.buildPythonPackage {
163 163 name = "dulwich-0.13.0";
164 164 doCheck = false;
165 165 src = fetchurl {
166 166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
167 167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
168 168 };
169 169 meta = {
170 170 license = [ pkgs.lib.licenses.gpl2Plus ];
171 171 };
172 172 };
173 173 "enum34" = super.buildPythonPackage {
174 name = "enum34-1.1.6";
174 name = "enum34-1.1.10";
175 175 doCheck = false;
176 176 src = fetchurl {
177 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
178 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
177 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
178 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
179 179 };
180 180 meta = {
181 181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 182 };
183 183 };
184 184 "funcsigs" = super.buildPythonPackage {
185 185 name = "funcsigs-1.0.2";
186 186 doCheck = false;
187 187 src = fetchurl {
188 188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
189 189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
190 190 };
191 191 meta = {
192 192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
193 193 };
194 194 };
195 195 "gevent" = super.buildPythonPackage {
196 name = "gevent-1.4.0";
196 name = "gevent-1.5.0";
197 197 doCheck = false;
198 198 propagatedBuildInputs = [
199 199 self."greenlet"
200 200 ];
201 201 src = fetchurl {
202 url = "https://files.pythonhosted.org/packages/ed/27/6c49b70808f569b66ec7fac2e78f076e9b204db9cf5768740cff3d5a07ae/gevent-1.4.0.tar.gz";
203 sha256 = "1lchr4akw2jkm5v4kz7bdm4wv3knkfhbfn9vkkz4s5yrkcxzmdqy";
202 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
203 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
204 204 };
205 205 meta = {
206 206 license = [ pkgs.lib.licenses.mit ];
207 207 };
208 208 };
209 209 "gprof2dot" = super.buildPythonPackage {
210 210 name = "gprof2dot-2017.9.19";
211 211 doCheck = false;
212 212 src = fetchurl {
213 213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
214 214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
215 215 };
216 216 meta = {
217 217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
218 218 };
219 219 };
220 220 "greenlet" = super.buildPythonPackage {
221 221 name = "greenlet-0.4.15";
222 222 doCheck = false;
223 223 src = fetchurl {
224 224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
225 225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
226 226 };
227 227 meta = {
228 228 license = [ pkgs.lib.licenses.mit ];
229 229 };
230 230 };
231 231 "gunicorn" = super.buildPythonPackage {
232 232 name = "gunicorn-19.9.0";
233 233 doCheck = false;
234 234 src = fetchurl {
235 235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
236 236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
237 237 };
238 238 meta = {
239 239 license = [ pkgs.lib.licenses.mit ];
240 240 };
241 241 };
242 242 "hg-evolve" = super.buildPythonPackage {
243 243 name = "hg-evolve-9.1.0";
244 244 doCheck = false;
245 245 src = fetchurl {
246 246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
247 247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
248 248 };
249 249 meta = {
250 250 license = [ { fullName = "GPLv2+"; } ];
251 251 };
252 252 };
253 253 "hgsubversion" = super.buildPythonPackage {
254 254 name = "hgsubversion-1.9.3";
255 255 doCheck = false;
256 256 propagatedBuildInputs = [
257 257 self."mercurial"
258 258 self."subvertpy"
259 259 ];
260 260 src = fetchurl {
261 261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
262 262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
263 263 };
264 264 meta = {
265 265 license = [ pkgs.lib.licenses.gpl1 ];
266 266 };
267 267 };
268 268 "hupper" = super.buildPythonPackage {
269 name = "hupper-1.9.1";
269 name = "hupper-1.10.2";
270 270 doCheck = false;
271 271 src = fetchurl {
272 url = "https://files.pythonhosted.org/packages/09/3a/4f215659f31eeffe364a984dba486bfa3907bfcc54b7013bdfe825cebb5f/hupper-1.9.1.tar.gz";
273 sha256 = "0pyg879fv9mbwlnbzw2a3234qqycqs9l97h5mpkmk0bvxhi2471v";
272 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
273 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
274 274 };
275 275 meta = {
276 276 license = [ pkgs.lib.licenses.mit ];
277 277 };
278 278 };
279 279 "importlib-metadata" = super.buildPythonPackage {
280 name = "importlib-metadata-0.23";
280 name = "importlib-metadata-1.6.0";
281 281 doCheck = false;
282 282 propagatedBuildInputs = [
283 283 self."zipp"
284 self."pathlib2"
284 285 self."contextlib2"
285 286 self."configparser"
286 self."pathlib2"
287 287 ];
288 288 src = fetchurl {
289 url = "https://files.pythonhosted.org/packages/5d/44/636bcd15697791943e2dedda0dbe098d8530a38d113b202817133e0b06c0/importlib_metadata-0.23.tar.gz";
290 sha256 = "09mdqdfv5rdrwz80jh9m379gxmvk2vhjfz0fg53hid00icvxf65a";
289 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
290 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
291 291 };
292 292 meta = {
293 293 license = [ pkgs.lib.licenses.asl20 ];
294 294 };
295 295 };
296 296 "ipdb" = super.buildPythonPackage {
297 name = "ipdb-0.12";
297 name = "ipdb-0.13.2";
298 298 doCheck = false;
299 299 propagatedBuildInputs = [
300 300 self."setuptools"
301 301 self."ipython"
302 302 ];
303 303 src = fetchurl {
304 url = "https://files.pythonhosted.org/packages/6d/43/c3c2e866a8803e196d6209595020a4a6db1a3c5d07c01455669497ae23d0/ipdb-0.12.tar.gz";
305 sha256 = "1khr2n7xfy8hg65kj1bsrjq9g7656pp0ybfa8abpbzpdawji3qnw";
304 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
305 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
306 306 };
307 307 meta = {
308 308 license = [ pkgs.lib.licenses.bsdOriginal ];
309 309 };
310 310 };
311 311 "ipython" = super.buildPythonPackage {
312 312 name = "ipython-5.1.0";
313 313 doCheck = false;
314 314 propagatedBuildInputs = [
315 315 self."setuptools"
316 316 self."decorator"
317 317 self."pickleshare"
318 318 self."simplegeneric"
319 319 self."traitlets"
320 320 self."prompt-toolkit"
321 321 self."pygments"
322 322 self."pexpect"
323 323 self."backports.shutil-get-terminal-size"
324 324 self."pathlib2"
325 325 self."pexpect"
326 326 ];
327 327 src = fetchurl {
328 328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
329 329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
330 330 };
331 331 meta = {
332 332 license = [ pkgs.lib.licenses.bsdOriginal ];
333 333 };
334 334 };
335 335 "ipython-genutils" = super.buildPythonPackage {
336 336 name = "ipython-genutils-0.2.0";
337 337 doCheck = false;
338 338 src = fetchurl {
339 339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
340 340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
341 341 };
342 342 meta = {
343 343 license = [ pkgs.lib.licenses.bsdOriginal ];
344 344 };
345 345 };
346 346 "mako" = super.buildPythonPackage {
347 347 name = "mako-1.1.0";
348 348 doCheck = false;
349 349 propagatedBuildInputs = [
350 350 self."markupsafe"
351 351 ];
352 352 src = fetchurl {
353 353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
354 354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
355 355 };
356 356 meta = {
357 357 license = [ pkgs.lib.licenses.mit ];
358 358 };
359 359 };
360 360 "markupsafe" = super.buildPythonPackage {
361 361 name = "markupsafe-1.1.1";
362 362 doCheck = false;
363 363 src = fetchurl {
364 364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
365 365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
366 366 };
367 367 meta = {
368 368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
369 369 };
370 370 };
371 371 "mercurial" = super.buildPythonPackage {
372 372 name = "mercurial-5.1.1";
373 373 doCheck = false;
374 374 src = fetchurl {
375 375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
376 376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
380 380 };
381 381 };
382 382 "mock" = super.buildPythonPackage {
383 383 name = "mock-3.0.5";
384 384 doCheck = false;
385 385 propagatedBuildInputs = [
386 386 self."six"
387 387 self."funcsigs"
388 388 ];
389 389 src = fetchurl {
390 390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
391 391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
392 392 };
393 393 meta = {
394 394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
395 395 };
396 396 };
397 397 "more-itertools" = super.buildPythonPackage {
398 398 name = "more-itertools-5.0.0";
399 399 doCheck = false;
400 400 propagatedBuildInputs = [
401 401 self."six"
402 402 ];
403 403 src = fetchurl {
404 404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
405 405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
406 406 };
407 407 meta = {
408 408 license = [ pkgs.lib.licenses.mit ];
409 409 };
410 410 };
411 411 "msgpack-python" = super.buildPythonPackage {
412 412 name = "msgpack-python-0.5.6";
413 413 doCheck = false;
414 414 src = fetchurl {
415 415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
416 416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
417 417 };
418 418 meta = {
419 419 license = [ pkgs.lib.licenses.asl20 ];
420 420 };
421 421 };
422 422 "packaging" = super.buildPythonPackage {
423 name = "packaging-19.2";
423 name = "packaging-20.3";
424 424 doCheck = false;
425 425 propagatedBuildInputs = [
426 426 self."pyparsing"
427 427 self."six"
428 428 ];
429 429 src = fetchurl {
430 url = "https://files.pythonhosted.org/packages/5a/2f/449ded84226d0e2fda8da9252e5ee7731bdf14cd338f622dfcd9934e0377/packaging-19.2.tar.gz";
431 sha256 = "0izwlz9h0bw171a1chr311g2y7n657zjaf4mq4rgm8pp9lbj9f98";
430 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
431 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
432 432 };
433 433 meta = {
434 434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
435 435 };
436 436 };
437 437 "pastedeploy" = super.buildPythonPackage {
438 name = "pastedeploy-2.0.1";
438 name = "pastedeploy-2.1.0";
439 439 doCheck = false;
440 440 src = fetchurl {
441 url = "https://files.pythonhosted.org/packages/19/a0/5623701df7e2478a68a1b685d1a84518024eef994cde7e4da8449a31616f/PasteDeploy-2.0.1.tar.gz";
442 sha256 = "02imfbbx1mi2h546f3sr37m47dk9qizaqhzzlhx8bkzxa6fzn8yl";
441 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
442 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
443 443 };
444 444 meta = {
445 445 license = [ pkgs.lib.licenses.mit ];
446 446 };
447 447 };
448 448 "pathlib2" = super.buildPythonPackage {
449 449 name = "pathlib2-2.3.5";
450 450 doCheck = false;
451 451 propagatedBuildInputs = [
452 452 self."six"
453 453 self."scandir"
454 454 ];
455 455 src = fetchurl {
456 456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
457 457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
458 458 };
459 459 meta = {
460 460 license = [ pkgs.lib.licenses.mit ];
461 461 };
462 462 };
463 463 "pexpect" = super.buildPythonPackage {
464 name = "pexpect-4.7.0";
464 name = "pexpect-4.8.0";
465 465 doCheck = false;
466 466 propagatedBuildInputs = [
467 467 self."ptyprocess"
468 468 ];
469 469 src = fetchurl {
470 url = "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz";
471 sha256 = "1sv2rri15zwhds85a4kamwh9pj49qcxv7m4miyr4jfpfwv81yb4y";
470 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
471 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
472 472 };
473 473 meta = {
474 474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
475 475 };
476 476 };
477 477 "pickleshare" = super.buildPythonPackage {
478 478 name = "pickleshare-0.7.5";
479 479 doCheck = false;
480 480 propagatedBuildInputs = [
481 481 self."pathlib2"
482 482 ];
483 483 src = fetchurl {
484 484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
485 485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
486 486 };
487 487 meta = {
488 488 license = [ pkgs.lib.licenses.mit ];
489 489 };
490 490 };
491 491 "plaster" = super.buildPythonPackage {
492 492 name = "plaster-1.0";
493 493 doCheck = false;
494 494 propagatedBuildInputs = [
495 495 self."setuptools"
496 496 ];
497 497 src = fetchurl {
498 498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
499 499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
500 500 };
501 501 meta = {
502 502 license = [ pkgs.lib.licenses.mit ];
503 503 };
504 504 };
505 505 "plaster-pastedeploy" = super.buildPythonPackage {
506 506 name = "plaster-pastedeploy-0.7";
507 507 doCheck = false;
508 508 propagatedBuildInputs = [
509 509 self."pastedeploy"
510 510 self."plaster"
511 511 ];
512 512 src = fetchurl {
513 513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
514 514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
515 515 };
516 516 meta = {
517 517 license = [ pkgs.lib.licenses.mit ];
518 518 };
519 519 };
520 520 "pluggy" = super.buildPythonPackage {
521 521 name = "pluggy-0.13.1";
522 522 doCheck = false;
523 523 propagatedBuildInputs = [
524 524 self."importlib-metadata"
525 525 ];
526 526 src = fetchurl {
527 527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
528 528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
529 529 };
530 530 meta = {
531 531 license = [ pkgs.lib.licenses.mit ];
532 532 };
533 533 };
534 534 "prompt-toolkit" = super.buildPythonPackage {
535 535 name = "prompt-toolkit-1.0.18";
536 536 doCheck = false;
537 537 propagatedBuildInputs = [
538 538 self."six"
539 539 self."wcwidth"
540 540 ];
541 541 src = fetchurl {
542 542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
543 543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
544 544 };
545 545 meta = {
546 546 license = [ pkgs.lib.licenses.bsdOriginal ];
547 547 };
548 548 };
549 549 "psutil" = super.buildPythonPackage {
550 name = "psutil-5.6.5";
550 name = "psutil-5.7.0";
551 551 doCheck = false;
552 552 src = fetchurl {
553 url = "https://files.pythonhosted.org/packages/03/9a/95c4b3d0424426e5fd94b5302ff74cea44d5d4f53466e1228ac8e73e14b4/psutil-5.6.5.tar.gz";
554 sha256 = "0isil5jxwwd8awz54qk28rpgjg43i5l6yl70g40vxwa4r4m56lfh";
553 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
554 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
555 555 };
556 556 meta = {
557 557 license = [ pkgs.lib.licenses.bsdOriginal ];
558 558 };
559 559 };
560 560 "ptyprocess" = super.buildPythonPackage {
561 561 name = "ptyprocess-0.6.0";
562 562 doCheck = false;
563 563 src = fetchurl {
564 564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
565 565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
566 566 };
567 567 meta = {
568 568 license = [ ];
569 569 };
570 570 };
571 571 "py" = super.buildPythonPackage {
572 572 name = "py-1.8.0";
573 573 doCheck = false;
574 574 src = fetchurl {
575 575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
576 576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
577 577 };
578 578 meta = {
579 579 license = [ pkgs.lib.licenses.mit ];
580 580 };
581 581 };
582 582 "pycparser" = super.buildPythonPackage {
583 name = "pycparser-2.19";
583 name = "pycparser-2.20";
584 584 doCheck = false;
585 585 src = fetchurl {
586 url = "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz";
587 sha256 = "1cr5dcj9628lkz1qlwq3fv97c25363qppkmcayqvd05dpy573259";
586 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
587 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
588 588 };
589 589 meta = {
590 590 license = [ pkgs.lib.licenses.bsdOriginal ];
591 591 };
592 592 };
593 593 "pygit2" = super.buildPythonPackage {
594 594 name = "pygit2-0.28.2";
595 595 doCheck = false;
596 596 propagatedBuildInputs = [
597 597 self."cffi"
598 598 self."six"
599 599 ];
600 600 src = fetchurl {
601 601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
602 602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
603 603 };
604 604 meta = {
605 605 license = [ { fullName = "GPLv2 with linking exception"; } ];
606 606 };
607 607 };
608 608 "pygments" = super.buildPythonPackage {
609 609 name = "pygments-2.4.2";
610 610 doCheck = false;
611 611 src = fetchurl {
612 612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
613 613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
614 614 };
615 615 meta = {
616 616 license = [ pkgs.lib.licenses.bsdOriginal ];
617 617 };
618 618 };
619 619 "pyparsing" = super.buildPythonPackage {
620 name = "pyparsing-2.4.5";
620 name = "pyparsing-2.4.7";
621 621 doCheck = false;
622 622 src = fetchurl {
623 url = "https://files.pythonhosted.org/packages/00/32/8076fa13e832bb4dcff379f18f228e5a53412be0631808b9ca2610c0f566/pyparsing-2.4.5.tar.gz";
624 sha256 = "0fk8gsybiw1gm146mkjdjvaajwh20xwvpv4j7syh2zrnpq0j19jc";
623 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
624 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
625 625 };
626 626 meta = {
627 627 license = [ pkgs.lib.licenses.mit ];
628 628 };
629 629 };
630 630 "pyramid" = super.buildPythonPackage {
631 631 name = "pyramid-1.10.4";
632 632 doCheck = false;
633 633 propagatedBuildInputs = [
634 634 self."hupper"
635 635 self."plaster"
636 636 self."plaster-pastedeploy"
637 637 self."setuptools"
638 638 self."translationstring"
639 639 self."venusian"
640 640 self."webob"
641 641 self."zope.deprecation"
642 642 self."zope.interface"
643 643 self."repoze.lru"
644 644 ];
645 645 src = fetchurl {
646 646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
647 647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
648 648 };
649 649 meta = {
650 650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
651 651 };
652 652 };
653 653 "pyramid-mako" = super.buildPythonPackage {
654 654 name = "pyramid-mako-1.1.0";
655 655 doCheck = false;
656 656 propagatedBuildInputs = [
657 657 self."pyramid"
658 658 self."mako"
659 659 ];
660 660 src = fetchurl {
661 661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
662 662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
663 663 };
664 664 meta = {
665 665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
666 666 };
667 667 };
668 668 "pytest" = super.buildPythonPackage {
669 669 name = "pytest-4.6.5";
670 670 doCheck = false;
671 671 propagatedBuildInputs = [
672 672 self."py"
673 673 self."six"
674 674 self."packaging"
675 675 self."attrs"
676 676 self."atomicwrites"
677 677 self."pluggy"
678 678 self."importlib-metadata"
679 679 self."wcwidth"
680 680 self."funcsigs"
681 681 self."pathlib2"
682 682 self."more-itertools"
683 683 ];
684 684 src = fetchurl {
685 685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
686 686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
687 687 };
688 688 meta = {
689 689 license = [ pkgs.lib.licenses.mit ];
690 690 };
691 691 };
692 692 "pytest-cov" = super.buildPythonPackage {
693 693 name = "pytest-cov-2.7.1";
694 694 doCheck = false;
695 695 propagatedBuildInputs = [
696 696 self."pytest"
697 697 self."coverage"
698 698 ];
699 699 src = fetchurl {
700 700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
701 701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
702 702 };
703 703 meta = {
704 704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
705 705 };
706 706 };
707 707 "pytest-profiling" = super.buildPythonPackage {
708 708 name = "pytest-profiling-1.7.0";
709 709 doCheck = false;
710 710 propagatedBuildInputs = [
711 711 self."six"
712 712 self."pytest"
713 713 self."gprof2dot"
714 714 ];
715 715 src = fetchurl {
716 716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
717 717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
718 718 };
719 719 meta = {
720 720 license = [ pkgs.lib.licenses.mit ];
721 721 };
722 722 };
723 723 "pytest-runner" = super.buildPythonPackage {
724 724 name = "pytest-runner-5.1";
725 725 doCheck = false;
726 726 src = fetchurl {
727 727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
728 728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
729 729 };
730 730 meta = {
731 731 license = [ pkgs.lib.licenses.mit ];
732 732 };
733 733 };
734 734 "pytest-sugar" = super.buildPythonPackage {
735 735 name = "pytest-sugar-0.9.2";
736 736 doCheck = false;
737 737 propagatedBuildInputs = [
738 738 self."pytest"
739 739 self."termcolor"
740 740 self."packaging"
741 741 ];
742 742 src = fetchurl {
743 743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
744 744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
745 745 };
746 746 meta = {
747 747 license = [ pkgs.lib.licenses.bsdOriginal ];
748 748 };
749 749 };
750 750 "pytest-timeout" = super.buildPythonPackage {
751 751 name = "pytest-timeout-1.3.3";
752 752 doCheck = false;
753 753 propagatedBuildInputs = [
754 754 self."pytest"
755 755 ];
756 756 src = fetchurl {
757 757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
758 758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
759 759 };
760 760 meta = {
761 761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
762 762 };
763 763 };
764 764 "redis" = super.buildPythonPackage {
765 name = "redis-3.3.11";
765 name = "redis-3.4.1";
766 766 doCheck = false;
767 767 src = fetchurl {
768 url = "https://files.pythonhosted.org/packages/06/ca/00557c74279d2f256d3c42cabf237631355f3a132e4c74c2000e6647ad98/redis-3.3.11.tar.gz";
769 sha256 = "1hicqbi5xl92hhml82awrr2rxl9jar5fp8nbcycj9qgmsdwc43wd";
768 url = "https://files.pythonhosted.org/packages/ef/2e/2c0f59891db7db087a7eeaa79bc7c7f2c039e71a2b5b0a41391e9d462926/redis-3.4.1.tar.gz";
769 sha256 = "07yaj0j9fs7xdkg5bg926fa990khyigjbp31si8ai20vj8sv7kqd";
770 770 };
771 771 meta = {
772 772 license = [ pkgs.lib.licenses.mit ];
773 773 };
774 774 };
775 775 "repoze.lru" = super.buildPythonPackage {
776 776 name = "repoze.lru-0.7";
777 777 doCheck = false;
778 778 src = fetchurl {
779 779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
780 780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
781 781 };
782 782 meta = {
783 783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
784 784 };
785 785 };
786 786 "rhodecode-vcsserver" = super.buildPythonPackage {
787 name = "rhodecode-vcsserver-4.18.3";
787 name = "rhodecode-vcsserver-4.19.0";
788 788 buildInputs = [
789 789 self."pytest"
790 790 self."py"
791 791 self."pytest-cov"
792 792 self."pytest-sugar"
793 793 self."pytest-runner"
794 794 self."pytest-profiling"
795 795 self."pytest-timeout"
796 796 self."gprof2dot"
797 797 self."mock"
798 798 self."cov-core"
799 799 self."coverage"
800 800 self."webtest"
801 801 self."beautifulsoup4"
802 802 self."configobj"
803 803 ];
804 804 doCheck = true;
805 805 propagatedBuildInputs = [
806 806 self."configobj"
807 807 self."dogpile.cache"
808 808 self."dogpile.core"
809 809 self."decorator"
810 810 self."dulwich"
811 811 self."hgsubversion"
812 812 self."hg-evolve"
813 813 self."mako"
814 814 self."markupsafe"
815 815 self."mercurial"
816 816 self."msgpack-python"
817 817 self."pastedeploy"
818 818 self."pyramid"
819 819 self."pyramid-mako"
820 820 self."pygit2"
821 821 self."repoze.lru"
822 822 self."redis"
823 823 self."simplejson"
824 824 self."subprocess32"
825 825 self."subvertpy"
826 826 self."six"
827 827 self."translationstring"
828 828 self."webob"
829 829 self."zope.deprecation"
830 830 self."zope.interface"
831 831 self."gevent"
832 832 self."greenlet"
833 833 self."gunicorn"
834 834 self."waitress"
835 835 self."ipdb"
836 836 self."ipython"
837 837 self."pytest"
838 838 self."py"
839 839 self."pytest-cov"
840 840 self."pytest-sugar"
841 841 self."pytest-runner"
842 842 self."pytest-profiling"
843 843 self."pytest-timeout"
844 844 self."gprof2dot"
845 845 self."mock"
846 846 self."cov-core"
847 847 self."coverage"
848 848 self."webtest"
849 849 self."beautifulsoup4"
850 850 ];
851 851 src = ./.;
852 852 meta = {
853 853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
854 854 };
855 855 };
856 856 "scandir" = super.buildPythonPackage {
857 857 name = "scandir-1.10.0";
858 858 doCheck = false;
859 859 src = fetchurl {
860 860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
861 861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
862 862 };
863 863 meta = {
864 864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
865 865 };
866 866 };
867 867 "setproctitle" = super.buildPythonPackage {
868 868 name = "setproctitle-1.1.10";
869 869 doCheck = false;
870 870 src = fetchurl {
871 871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
872 872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
873 873 };
874 874 meta = {
875 875 license = [ pkgs.lib.licenses.bsdOriginal ];
876 876 };
877 877 };
878 878 "setuptools" = super.buildPythonPackage {
879 name = "setuptools-44.0.0";
879 name = "setuptools-44.1.0";
880 880 doCheck = false;
881 881 src = fetchurl {
882 url = "https://files.pythonhosted.org/packages/b0/f3/44da7482ac6da3f36f68e253cb04de37365b3dba9036a3c70773b778b485/setuptools-44.0.0.zip";
883 sha256 = "025h5cnxcmda1893l6i12hrwdvs1n8r31qs6q4pkif2v7rrggfp5";
882 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
883 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
884 884 };
885 885 meta = {
886 886 license = [ pkgs.lib.licenses.mit ];
887 887 };
888 888 };
889 889 "simplegeneric" = super.buildPythonPackage {
890 890 name = "simplegeneric-0.8.1";
891 891 doCheck = false;
892 892 src = fetchurl {
893 893 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
894 894 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
895 895 };
896 896 meta = {
897 897 license = [ pkgs.lib.licenses.zpl21 ];
898 898 };
899 899 };
900 900 "simplejson" = super.buildPythonPackage {
901 901 name = "simplejson-3.16.0";
902 902 doCheck = false;
903 903 src = fetchurl {
904 904 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
905 905 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
906 906 };
907 907 meta = {
908 908 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
909 909 };
910 910 };
911 911 "six" = super.buildPythonPackage {
912 912 name = "six-1.11.0";
913 913 doCheck = false;
914 914 src = fetchurl {
915 915 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
916 916 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
917 917 };
918 918 meta = {
919 919 license = [ pkgs.lib.licenses.mit ];
920 920 };
921 921 };
922 922 "subprocess32" = super.buildPythonPackage {
923 923 name = "subprocess32-3.5.4";
924 924 doCheck = false;
925 925 src = fetchurl {
926 926 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
927 927 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
928 928 };
929 929 meta = {
930 930 license = [ pkgs.lib.licenses.psfl ];
931 931 };
932 932 };
933 933 "subvertpy" = super.buildPythonPackage {
934 934 name = "subvertpy-0.10.1";
935 935 doCheck = false;
936 936 src = fetchurl {
937 937 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
938 938 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
939 939 };
940 940 meta = {
941 941 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
942 942 };
943 943 };
944 944 "termcolor" = super.buildPythonPackage {
945 945 name = "termcolor-1.1.0";
946 946 doCheck = false;
947 947 src = fetchurl {
948 948 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
949 949 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
950 950 };
951 951 meta = {
952 952 license = [ pkgs.lib.licenses.mit ];
953 953 };
954 954 };
955 955 "traitlets" = super.buildPythonPackage {
956 956 name = "traitlets-4.3.3";
957 957 doCheck = false;
958 958 propagatedBuildInputs = [
959 959 self."ipython-genutils"
960 960 self."six"
961 961 self."decorator"
962 962 self."enum34"
963 963 ];
964 964 src = fetchurl {
965 965 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
966 966 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
967 967 };
968 968 meta = {
969 969 license = [ pkgs.lib.licenses.bsdOriginal ];
970 970 };
971 971 };
972 972 "translationstring" = super.buildPythonPackage {
973 973 name = "translationstring-1.3";
974 974 doCheck = false;
975 975 src = fetchurl {
976 976 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
977 977 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
978 978 };
979 979 meta = {
980 980 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
981 981 };
982 982 };
983 983 "venusian" = super.buildPythonPackage {
984 984 name = "venusian-1.2.0";
985 985 doCheck = false;
986 986 src = fetchurl {
987 987 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
988 988 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
989 989 };
990 990 meta = {
991 991 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
992 992 };
993 993 };
994 994 "waitress" = super.buildPythonPackage {
995 995 name = "waitress-1.3.1";
996 996 doCheck = false;
997 997 src = fetchurl {
998 998 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
999 999 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
1000 1000 };
1001 1001 meta = {
1002 1002 license = [ pkgs.lib.licenses.zpl21 ];
1003 1003 };
1004 1004 };
1005 1005 "wcwidth" = super.buildPythonPackage {
1006 name = "wcwidth-0.1.7";
1006 name = "wcwidth-0.1.9";
1007 1007 doCheck = false;
1008 1008 src = fetchurl {
1009 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
1010 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
1009 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
1010 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
1011 1011 };
1012 1012 meta = {
1013 1013 license = [ pkgs.lib.licenses.mit ];
1014 1014 };
1015 1015 };
1016 1016 "webob" = super.buildPythonPackage {
1017 1017 name = "webob-1.8.5";
1018 1018 doCheck = false;
1019 1019 src = fetchurl {
1020 1020 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
1021 1021 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
1022 1022 };
1023 1023 meta = {
1024 1024 license = [ pkgs.lib.licenses.mit ];
1025 1025 };
1026 1026 };
1027 1027 "webtest" = super.buildPythonPackage {
1028 name = "webtest-2.0.33";
1028 name = "webtest-2.0.34";
1029 1029 doCheck = false;
1030 1030 propagatedBuildInputs = [
1031 1031 self."six"
1032 1032 self."webob"
1033 1033 self."waitress"
1034 1034 self."beautifulsoup4"
1035 1035 ];
1036 1036 src = fetchurl {
1037 url = "https://files.pythonhosted.org/packages/a8/b0/ffc9413b637dbe26e291429bb0f6ed731e518d0cd03da28524a8fe2e8a8f/WebTest-2.0.33.tar.gz";
1038 sha256 = "1l3z0cwqslsf4rcrhi2gr8kdfh74wn2dw76376i4g9i38gz8wd21";
1037 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
1038 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
1039 1039 };
1040 1040 meta = {
1041 1041 license = [ pkgs.lib.licenses.mit ];
1042 1042 };
1043 1043 };
1044 1044 "zipp" = super.buildPythonPackage {
1045 name = "zipp-0.6.0";
1045 name = "zipp-1.2.0";
1046 1046 doCheck = false;
1047 1047 propagatedBuildInputs = [
1048 self."more-itertools"
1048 self."contextlib2"
1049 1049 ];
1050 1050 src = fetchurl {
1051 url = "https://files.pythonhosted.org/packages/57/dd/585d728479d97d25aeeb9aa470d36a4ad8d0ba5610f84e14770128ce6ff7/zipp-0.6.0.tar.gz";
1052 sha256 = "13ndkf7vklw978a4gdl1yfvn8hch28429a0iam67sg4nrp5v261p";
1051 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
1052 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
1053 1053 };
1054 1054 meta = {
1055 1055 license = [ pkgs.lib.licenses.mit ];
1056 1056 };
1057 1057 };
1058 1058 "zope.deprecation" = super.buildPythonPackage {
1059 1059 name = "zope.deprecation-4.4.0";
1060 1060 doCheck = false;
1061 1061 propagatedBuildInputs = [
1062 1062 self."setuptools"
1063 1063 ];
1064 1064 src = fetchurl {
1065 1065 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
1066 1066 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
1067 1067 };
1068 1068 meta = {
1069 1069 license = [ pkgs.lib.licenses.zpl21 ];
1070 1070 };
1071 1071 };
1072 1072 "zope.interface" = super.buildPythonPackage {
1073 1073 name = "zope.interface-4.6.0";
1074 1074 doCheck = false;
1075 1075 propagatedBuildInputs = [
1076 1076 self."setuptools"
1077 1077 ];
1078 1078 src = fetchurl {
1079 1079 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
1080 1080 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
1081 1081 };
1082 1082 meta = {
1083 1083 license = [ pkgs.lib.licenses.zpl21 ];
1084 1084 };
1085 1085 };
1086 1086
1087 1087 ### Test requirements
1088 1088
1089 1089
1090 1090 }
@@ -1,48 +1,48 b''
1 1 ## dependencies
2 2
3 3 # our custom configobj
4 4 https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626#egg=configobj==5.0.6
5 5
6 6 dogpile.cache==0.9.0
7 7 dogpile.core==0.4.1
8 8 decorator==4.1.2
9 9 dulwich==0.13.0
10 10 hgsubversion==1.9.3
11 11 hg-evolve==9.1.0
12 12 mako==1.1.0
13 13 markupsafe==1.1.1
14 14 mercurial==5.1.1
15 15 msgpack-python==0.5.6
16 16
17 pastedeploy==2.0.1
17 pastedeploy==2.1.0
18 18 pyramid==1.10.4
19 19 pyramid-mako==1.1.0
20 20 pygit2==0.28.2
21 21
22 22 repoze.lru==0.7
23 redis==3.3.11
23 redis==3.4.1
24 24 simplejson==3.16.0
25 25 subprocess32==3.5.4
26 26 subvertpy==0.10.1
27 27
28 28 six==1.11.0
29 29 translationstring==1.3
30 30 webob==1.8.5
31 31 zope.deprecation==4.4.0
32 32 zope.interface==4.6.0
33 33
34 34 ## http servers
35 gevent==1.4.0
35 gevent==1.5.0
36 36 greenlet==0.4.15
37 37 gunicorn==19.9.0
38 38 waitress==1.3.1
39 39
40 40 ## debug
41 ipdb==0.12.0
41 ipdb==0.13.2
42 42 ipython==5.1.0
43 43
44 44 ## test related requirements
45 45 -r requirements_test.txt
46 46
47 47 ## uncomment to add the debug libraries
48 48 #-r requirements_debug.txt
@@ -1,18 +1,18 b''
1 1 # contains not directly required libraries we want to pin the version.
2 2
3 3 atomicwrites==1.3.0
4 4 attrs==19.3.0
5 5 contextlib2==0.6.0.post1
6 6 cffi==1.12.3
7 hupper==1.9.1
8 importlib-metadata==0.23
9 packaging==19.2.0
7 hupper==1.10.2
8 importlib-metadata==1.6.0
9 packaging==20.3
10 10 pathlib2==2.3.5
11 11 pygments==2.4.2
12 pyparsing==2.4.5
13 psutil==5.6.5
12 pyparsing==2.4.7
13 psutil==5.7.0
14 14 pluggy==0.13.1
15 15 scandir==1.10.0
16 16 setproctitle==1.1.10
17 17 venusian==1.2.0
18 wcwidth==0.1.7
18 wcwidth==0.1.9
@@ -1,16 +1,16 b''
1 1 # test related requirements
2 2 pytest==4.6.5
3 3 py==1.8.0
4 4 pytest-cov==2.7.1
5 5 pytest-sugar==0.9.2
6 6 pytest-runner==5.1.0
7 7 pytest-profiling==1.7.0
8 8 pytest-timeout==1.3.3
9 9 gprof2dot==2017.9.19
10 10
11 11 mock==3.0.5
12 12 cov-core==1.15.0
13 13 coverage==4.5.4
14 14
15 webtest==2.0.33
15 webtest==2.0.34
16 16 beautifulsoup4==4.6.3
@@ -1,1 +1,1 b''
1 4.18.3 No newline at end of file
1 4.19.0 No newline at end of file
@@ -1,28 +1,28 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import pkgutil
19 19
20 20
21 21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip()
22 22
23 23 # link to config for pyramid
24 24 CONFIG = {}
25 25
26 26 # Populated with the settings dictionary from application init in
27 27 #
28 28 PYRAMID_SETTINGS = {}
@@ -1,76 +1,76 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import sys
19 19 import traceback
20 20 import logging
21 21 import urlparse
22 22
23 23 from vcsserver.lib.rc_cache import region_meta
24 24 log = logging.getLogger(__name__)
25 25
26 26
27 27 class RepoFactory(object):
28 28 """
29 29 Utility to create instances of repository
30 30
31 31 It provides internal caching of the `repo` object based on
32 32 the :term:`call context`.
33 33 """
34 34 repo_type = None
35 35
36 36 def __init__(self):
37 37 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
38 38
39 39 def _create_config(self, path, config):
40 40 config = {}
41 41 return config
42 42
43 43 def _create_repo(self, wire, create):
44 44 raise NotImplementedError()
45 45
46 46 def repo(self, wire, create=False):
47 47 raise NotImplementedError()
48 48
49 49
50 50 def obfuscate_qs(query_string):
51 51 if query_string is None:
52 52 return None
53 53
54 54 parsed = []
55 55 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
56 56 if k in ['auth_token', 'api_key']:
57 57 v = "*****"
58 58 parsed.append((k, v))
59 59
60 60 return '&'.join('{}{}'.format(
61 61 k, '={}'.format(v) if v else '') for k, v in parsed)
62 62
63 63
64 64 def raise_from_original(new_type):
65 65 """
66 66 Raise a new exception type with original args and traceback.
67 67 """
68 68 exc_type, exc_value, exc_traceback = sys.exc_info()
69 69 new_exc = new_type(*exc_value.args)
70 70 # store the original traceback into the new exc
71 71 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
72 72
73 73 try:
74 74 raise new_exc, None, exc_traceback
75 75 finally:
76 76 del exc_traceback
@@ -1,117 +1,121 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Special exception handling over the wire.
20 20
21 21 Since we cannot assume that our client is able to import our exception classes,
22 22 this module provides a "wrapping" mechanism to raise plain exceptions
23 23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 24 different error conditions.
25 25 """
26 26
27 27 from pyramid.httpexceptions import HTTPLocked, HTTPForbidden
28 28
29 29
30 30 def _make_exception(kind, org_exc, *args):
31 31 """
32 32 Prepares a base `Exception` instance to be sent over the wire.
33 33
34 34 To give our caller a hint what this is about, it will attach an attribute
35 35 `_vcs_kind` to the exception.
36 36 """
37 37 exc = Exception(*args)
38 38 exc._vcs_kind = kind
39 39 exc._org_exc = org_exc
40 40 exc._org_exc_tb = getattr(org_exc, '_org_exc_tb', '')
41 41 return exc
42 42
43 43
44 44 def AbortException(org_exc=None):
45 45 def _make_exception_wrapper(*args):
46 46 return _make_exception('abort', org_exc, *args)
47 47 return _make_exception_wrapper
48 48
49 49
50 50 def ArchiveException(org_exc=None):
51 51 def _make_exception_wrapper(*args):
52 52 return _make_exception('archive', org_exc, *args)
53 53 return _make_exception_wrapper
54 54
55 55
56 56 def LookupException(org_exc=None):
57 57 def _make_exception_wrapper(*args):
58 58 return _make_exception('lookup', org_exc, *args)
59 59 return _make_exception_wrapper
60 60
61 61
62 62 def VcsException(org_exc=None):
63 63 def _make_exception_wrapper(*args):
64 64 return _make_exception('error', org_exc, *args)
65 65 return _make_exception_wrapper
66 66
67 67
68 68 def RepositoryLockedException(org_exc=None):
69 69 def _make_exception_wrapper(*args):
70 70 return _make_exception('repo_locked', org_exc, *args)
71 71 return _make_exception_wrapper
72 72
73 73
74 74 def RepositoryBranchProtectedException(org_exc=None):
75 75 def _make_exception_wrapper(*args):
76 76 return _make_exception('repo_branch_protected', org_exc, *args)
77 77 return _make_exception_wrapper
78 78
79 79
80 80 def RequirementException(org_exc=None):
81 81 def _make_exception_wrapper(*args):
82 82 return _make_exception('requirement', org_exc, *args)
83 83 return _make_exception_wrapper
84 84
85 85
86 86 def UnhandledException(org_exc=None):
87 87 def _make_exception_wrapper(*args):
88 88 return _make_exception('unhandled', org_exc, *args)
89 89 return _make_exception_wrapper
90 90
91 91
92 92 def URLError(org_exc=None):
93 93 def _make_exception_wrapper(*args):
94 94 return _make_exception('url_error', org_exc, *args)
95 95 return _make_exception_wrapper
96 96
97 97
98 98 def SubrepoMergeException(org_exc=None):
99 99 def _make_exception_wrapper(*args):
100 100 return _make_exception('subrepo_merge_error', org_exc, *args)
101 101 return _make_exception_wrapper
102 102
103 103
104 104 class HTTPRepoLocked(HTTPLocked):
105 105 """
106 106 Subclass of HTTPLocked response that allows to set the title and status
107 107 code via constructor arguments.
108 108 """
109 109 def __init__(self, title, status_code=None, **kwargs):
110 110 self.code = status_code or HTTPLocked.code
111 111 self.title = title
112 112 super(HTTPRepoLocked, self).__init__(**kwargs)
113 113
114 114
115 115 class HTTPRepoBranchProtected(HTTPForbidden):
116 116 def __init__(self, *args, **kwargs):
117 117 super(HTTPForbidden, self).__init__(*args, **kwargs)
118
119
120 class RefNotFoundException(KeyError):
121 pass
@@ -1,1181 +1,1192 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import collections
19 19 import logging
20 20 import os
21 21 import posixpath as vcspath
22 22 import re
23 23 import stat
24 24 import traceback
25 25 import urllib
26 26 import urllib2
27 27 from functools import wraps
28 28
29 29 import more_itertools
30 30 import pygit2
31 31 from pygit2 import Repository as LibGit2Repo
32 32 from dulwich import index, objects
33 33 from dulwich.client import HttpGitClient, LocalGitClient
34 34 from dulwich.errors import (
35 35 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 36 MissingCommitError, ObjectMissing, HangupException,
37 37 UnexpectedCommandError)
38 38 from dulwich.repo import Repo as DulwichRepo
39 39 from dulwich.server import update_server_info
40 40
41 41 from vcsserver import exceptions, settings, subprocessio
42 42 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 43 from vcsserver.base import RepoFactory, obfuscate_qs
44 44 from vcsserver.hgcompat import (
45 45 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 46 from vcsserver.git_lfs.lib import LFSOidStore
47 47 from vcsserver.vcs_base import RemoteBase
48 48
49 49 DIR_STAT = stat.S_IFDIR
50 50 FILE_MODE = stat.S_IFMT
51 51 GIT_LINK = objects.S_IFGITLINK
52 52 PEELED_REF_MARKER = '^{}'
53 53
54 54
55 55 log = logging.getLogger(__name__)
56 56
57 57
58 58 def str_to_dulwich(value):
59 59 """
60 60 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 61 """
62 62 return value.decode(settings.WIRE_ENCODING)
63 63
64 64
65 65 def reraise_safe_exceptions(func):
66 66 """Converts Dulwich exceptions to something neutral."""
67 67
68 68 @wraps(func)
69 69 def wrapper(*args, **kwargs):
70 70 try:
71 71 return func(*args, **kwargs)
72 72 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 73 exc = exceptions.LookupException(org_exc=e)
74 74 raise exc(safe_str(e))
75 75 except (HangupException, UnexpectedCommandError) as e:
76 76 exc = exceptions.VcsException(org_exc=e)
77 77 raise exc(safe_str(e))
78 78 except Exception as e:
79 79 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 80 # (KeyError on empty repos), we cannot track this and catch all
81 81 # exceptions, it's an exceptions from other handlers
82 82 #if not hasattr(e, '_vcs_kind'):
83 83 #log.exception("Unhandled exception in git remote call")
84 84 #raise_from_original(exceptions.UnhandledException)
85 85 raise
86 86 return wrapper
87 87
88 88
89 89 class Repo(DulwichRepo):
90 90 """
91 91 A wrapper for dulwich Repo class.
92 92
93 93 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 94 "Too many open files" error. We need to close all opened file descriptors
95 95 once the repo object is destroyed.
96 96 """
97 97 def __del__(self):
98 98 if hasattr(self, 'object_store'):
99 99 self.close()
100 100
101 101
102 102 class Repository(LibGit2Repo):
103 103
104 104 def __enter__(self):
105 105 return self
106 106
107 107 def __exit__(self, exc_type, exc_val, exc_tb):
108 108 self.free()
109 109
110 110
111 111 class GitFactory(RepoFactory):
112 112 repo_type = 'git'
113 113
114 114 def _create_repo(self, wire, create, use_libgit2=False):
115 115 if use_libgit2:
116 116 return Repository(wire['path'])
117 117 else:
118 118 repo_path = str_to_dulwich(wire['path'])
119 119 return Repo(repo_path)
120 120
121 121 def repo(self, wire, create=False, use_libgit2=False):
122 122 """
123 123 Get a repository instance for the given path.
124 124 """
125 125 return self._create_repo(wire, create, use_libgit2)
126 126
127 127 def repo_libgit2(self, wire):
128 128 return self.repo(wire, use_libgit2=True)
129 129
130 130
131 131 class GitRemote(RemoteBase):
132 132
133 133 def __init__(self, factory):
134 134 self._factory = factory
135 135 self._bulk_methods = {
136 136 "date": self.date,
137 137 "author": self.author,
138 138 "branch": self.branch,
139 139 "message": self.message,
140 140 "parents": self.parents,
141 141 "_commit": self.revision,
142 142 }
143 143
144 144 def _wire_to_config(self, wire):
145 145 if 'config' in wire:
146 146 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 147 return {}
148 148
149 149 def _remote_conf(self, config):
150 150 params = [
151 151 '-c', 'core.askpass=""',
152 152 ]
153 153 ssl_cert_dir = config.get('vcs_ssl_dir')
154 154 if ssl_cert_dir:
155 155 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 156 return params
157 157
158 158 @reraise_safe_exceptions
159 159 def discover_git_version(self):
160 160 stdout, _ = self.run_git_command(
161 161 {}, ['--version'], _bare=True, _safe=True)
162 162 prefix = 'git version'
163 163 if stdout.startswith(prefix):
164 164 stdout = stdout[len(prefix):]
165 165 return stdout.strip()
166 166
167 167 @reraise_safe_exceptions
168 168 def is_empty(self, wire):
169 169 repo_init = self._factory.repo_libgit2(wire)
170 170 with repo_init as repo:
171 171
172 172 try:
173 173 has_head = repo.head.name
174 174 if has_head:
175 175 return False
176 176
177 177 # NOTE(marcink): check again using more expensive method
178 178 return repo.is_empty
179 179 except Exception:
180 180 pass
181 181
182 182 return True
183 183
184 184 @reraise_safe_exceptions
185 185 def assert_correct_path(self, wire):
186 186 cache_on, context_uid, repo_id = self._cache_on(wire)
187 187 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 188 def _assert_correct_path(_context_uid, _repo_id):
189 189 try:
190 190 repo_init = self._factory.repo_libgit2(wire)
191 191 with repo_init as repo:
192 192 pass
193 193 except pygit2.GitError:
194 194 path = wire.get('path')
195 195 tb = traceback.format_exc()
196 196 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
197 197 return False
198 198
199 199 return True
200 200 return _assert_correct_path(context_uid, repo_id)
201 201
202 202 @reraise_safe_exceptions
203 203 def bare(self, wire):
204 204 repo_init = self._factory.repo_libgit2(wire)
205 205 with repo_init as repo:
206 206 return repo.is_bare
207 207
208 208 @reraise_safe_exceptions
209 209 def blob_as_pretty_string(self, wire, sha):
210 210 repo_init = self._factory.repo_libgit2(wire)
211 211 with repo_init as repo:
212 212 blob_obj = repo[sha]
213 213 blob = blob_obj.data
214 214 return blob
215 215
216 216 @reraise_safe_exceptions
217 217 def blob_raw_length(self, wire, sha):
218 218 cache_on, context_uid, repo_id = self._cache_on(wire)
219 219 @self.region.conditional_cache_on_arguments(condition=cache_on)
220 220 def _blob_raw_length(_repo_id, _sha):
221 221
222 222 repo_init = self._factory.repo_libgit2(wire)
223 223 with repo_init as repo:
224 224 blob = repo[sha]
225 225 return blob.size
226 226
227 227 return _blob_raw_length(repo_id, sha)
228 228
229 229 def _parse_lfs_pointer(self, raw_content):
230 230
231 231 spec_string = 'version https://git-lfs.github.com/spec'
232 232 if raw_content and raw_content.startswith(spec_string):
233 233 pattern = re.compile(r"""
234 234 (?:\n)?
235 235 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
236 236 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
237 237 ^size[ ](?P<oid_size>[0-9]+)\n
238 238 (?:\n)?
239 239 """, re.VERBOSE | re.MULTILINE)
240 240 match = pattern.match(raw_content)
241 241 if match:
242 242 return match.groupdict()
243 243
244 244 return {}
245 245
246 246 @reraise_safe_exceptions
247 247 def is_large_file(self, wire, commit_id):
248 248 cache_on, context_uid, repo_id = self._cache_on(wire)
249 249
250 250 @self.region.conditional_cache_on_arguments(condition=cache_on)
251 251 def _is_large_file(_repo_id, _sha):
252 252 repo_init = self._factory.repo_libgit2(wire)
253 253 with repo_init as repo:
254 254 blob = repo[commit_id]
255 255 if blob.is_binary:
256 256 return {}
257 257
258 258 return self._parse_lfs_pointer(blob.data)
259 259
260 260 return _is_large_file(repo_id, commit_id)
261 261
262 262 @reraise_safe_exceptions
263 263 def is_binary(self, wire, tree_id):
264 264 cache_on, context_uid, repo_id = self._cache_on(wire)
265 265
266 266 @self.region.conditional_cache_on_arguments(condition=cache_on)
267 267 def _is_binary(_repo_id, _tree_id):
268 268 repo_init = self._factory.repo_libgit2(wire)
269 269 with repo_init as repo:
270 270 blob_obj = repo[tree_id]
271 271 return blob_obj.is_binary
272 272
273 273 return _is_binary(repo_id, tree_id)
274 274
275 275 @reraise_safe_exceptions
276 276 def in_largefiles_store(self, wire, oid):
277 277 conf = self._wire_to_config(wire)
278 278 repo_init = self._factory.repo_libgit2(wire)
279 279 with repo_init as repo:
280 280 repo_name = repo.path
281 281
282 282 store_location = conf.get('vcs_git_lfs_store_location')
283 283 if store_location:
284 284
285 285 store = LFSOidStore(
286 286 oid=oid, repo=repo_name, store_location=store_location)
287 287 return store.has_oid()
288 288
289 289 return False
290 290
291 291 @reraise_safe_exceptions
292 292 def store_path(self, wire, oid):
293 293 conf = self._wire_to_config(wire)
294 294 repo_init = self._factory.repo_libgit2(wire)
295 295 with repo_init as repo:
296 296 repo_name = repo.path
297 297
298 298 store_location = conf.get('vcs_git_lfs_store_location')
299 299 if store_location:
300 300 store = LFSOidStore(
301 301 oid=oid, repo=repo_name, store_location=store_location)
302 302 return store.oid_path
303 303 raise ValueError('Unable to fetch oid with path {}'.format(oid))
304 304
305 305 @reraise_safe_exceptions
306 306 def bulk_request(self, wire, rev, pre_load):
307 307 cache_on, context_uid, repo_id = self._cache_on(wire)
308 308 @self.region.conditional_cache_on_arguments(condition=cache_on)
309 309 def _bulk_request(_repo_id, _rev, _pre_load):
310 310 result = {}
311 311 for attr in pre_load:
312 312 try:
313 313 method = self._bulk_methods[attr]
314 314 args = [wire, rev]
315 315 result[attr] = method(*args)
316 316 except KeyError as e:
317 317 raise exceptions.VcsException(e)(
318 318 "Unknown bulk attribute: %s" % attr)
319 319 return result
320 320
321 321 return _bulk_request(repo_id, rev, sorted(pre_load))
322 322
323 323 def _build_opener(self, url):
324 324 handlers = []
325 325 url_obj = url_parser(url)
326 326 _, authinfo = url_obj.authinfo()
327 327
328 328 if authinfo:
329 329 # create a password manager
330 330 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
331 331 passmgr.add_password(*authinfo)
332 332
333 333 handlers.extend((httpbasicauthhandler(passmgr),
334 334 httpdigestauthhandler(passmgr)))
335 335
336 336 return urllib2.build_opener(*handlers)
337 337
338 338 def _type_id_to_name(self, type_id):
339 339 return {
340 340 1: b'commit',
341 341 2: b'tree',
342 342 3: b'blob',
343 343 4: b'tag'
344 344 }[type_id]
345 345
346 346 @reraise_safe_exceptions
347 347 def check_url(self, url, config):
348 348 url_obj = url_parser(url)
349 349 test_uri, _ = url_obj.authinfo()
350 350 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
351 351 url_obj.query = obfuscate_qs(url_obj.query)
352 352 cleaned_uri = str(url_obj)
353 353 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
354 354
355 355 if not test_uri.endswith('info/refs'):
356 356 test_uri = test_uri.rstrip('/') + '/info/refs'
357 357
358 358 o = self._build_opener(url)
359 359 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
360 360
361 361 q = {"service": 'git-upload-pack'}
362 362 qs = '?%s' % urllib.urlencode(q)
363 363 cu = "%s%s" % (test_uri, qs)
364 364 req = urllib2.Request(cu, None, {})
365 365
366 366 try:
367 367 log.debug("Trying to open URL %s", cleaned_uri)
368 368 resp = o.open(req)
369 369 if resp.code != 200:
370 370 raise exceptions.URLError()('Return Code is not 200')
371 371 except Exception as e:
372 372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 373 # means it cannot be cloned
374 374 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
375 375
376 376 # now detect if it's proper git repo
377 377 gitdata = resp.read()
378 378 if 'service=git-upload-pack' in gitdata:
379 379 pass
380 380 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
381 381 # old style git can return some other format !
382 382 pass
383 383 else:
384 384 raise exceptions.URLError()(
385 385 "url [%s] does not look like an git" % (cleaned_uri,))
386 386
387 387 return True
388 388
389 389 @reraise_safe_exceptions
390 390 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
391 391 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
392 392 remote_refs = self.pull(wire, url, apply_refs=False)
393 393 repo = self._factory.repo(wire)
394 394 if isinstance(valid_refs, list):
395 395 valid_refs = tuple(valid_refs)
396 396
397 397 for k in remote_refs:
398 398 # only parse heads/tags and skip so called deferred tags
399 399 if k.startswith(valid_refs) and not k.endswith(deferred):
400 400 repo[k] = remote_refs[k]
401 401
402 402 if update_after_clone:
403 403 # we want to checkout HEAD
404 404 repo["HEAD"] = remote_refs["HEAD"]
405 405 index.build_index_from_tree(repo.path, repo.index_path(),
406 406 repo.object_store, repo["HEAD"].tree)
407 407
408 408 @reraise_safe_exceptions
409 409 def branch(self, wire, commit_id):
410 410 cache_on, context_uid, repo_id = self._cache_on(wire)
411 411 @self.region.conditional_cache_on_arguments(condition=cache_on)
412 412 def _branch(_context_uid, _repo_id, _commit_id):
413 413 regex = re.compile('^refs/heads')
414 414
415 415 def filter_with(ref):
416 416 return regex.match(ref[0]) and ref[1] == _commit_id
417 417
418 418 branches = filter(filter_with, self.get_refs(wire).items())
419 419 return [x[0].split('refs/heads/')[-1] for x in branches]
420 420
421 421 return _branch(context_uid, repo_id, commit_id)
422 422
423 423 @reraise_safe_exceptions
424 424 def commit_branches(self, wire, commit_id):
425 425 cache_on, context_uid, repo_id = self._cache_on(wire)
426 426 @self.region.conditional_cache_on_arguments(condition=cache_on)
427 427 def _commit_branches(_context_uid, _repo_id, _commit_id):
428 428 repo_init = self._factory.repo_libgit2(wire)
429 429 with repo_init as repo:
430 430 branches = [x for x in repo.branches.with_commit(_commit_id)]
431 431 return branches
432 432
433 433 return _commit_branches(context_uid, repo_id, commit_id)
434 434
435 435 @reraise_safe_exceptions
436 436 def add_object(self, wire, content):
437 437 repo_init = self._factory.repo_libgit2(wire)
438 438 with repo_init as repo:
439 439 blob = objects.Blob()
440 440 blob.set_raw_string(content)
441 441 repo.object_store.add_object(blob)
442 442 return blob.id
443 443
444 444 # TODO: this is quite complex, check if that can be simplified
445 445 @reraise_safe_exceptions
446 446 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
447 447 repo = self._factory.repo(wire)
448 448 object_store = repo.object_store
449 449
450 450 # Create tree and populates it with blobs
451 451 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
452 452
453 453 for node in updated:
454 454 # Compute subdirs if needed
455 455 dirpath, nodename = vcspath.split(node['path'])
456 456 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
457 457 parent = commit_tree
458 458 ancestors = [('', parent)]
459 459
460 460 # Tries to dig for the deepest existing tree
461 461 while dirnames:
462 462 curdir = dirnames.pop(0)
463 463 try:
464 464 dir_id = parent[curdir][1]
465 465 except KeyError:
466 466 # put curdir back into dirnames and stops
467 467 dirnames.insert(0, curdir)
468 468 break
469 469 else:
470 470 # If found, updates parent
471 471 parent = repo[dir_id]
472 472 ancestors.append((curdir, parent))
473 473 # Now parent is deepest existing tree and we need to create
474 474 # subtrees for dirnames (in reverse order)
475 475 # [this only applies for nodes from added]
476 476 new_trees = []
477 477
478 478 blob = objects.Blob.from_string(node['content'])
479 479
480 480 if dirnames:
481 481 # If there are trees which should be created we need to build
482 482 # them now (in reverse order)
483 483 reversed_dirnames = list(reversed(dirnames))
484 484 curtree = objects.Tree()
485 485 curtree[node['node_path']] = node['mode'], blob.id
486 486 new_trees.append(curtree)
487 487 for dirname in reversed_dirnames[:-1]:
488 488 newtree = objects.Tree()
489 489 newtree[dirname] = (DIR_STAT, curtree.id)
490 490 new_trees.append(newtree)
491 491 curtree = newtree
492 492 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
493 493 else:
494 494 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
495 495
496 496 new_trees.append(parent)
497 497 # Update ancestors
498 498 reversed_ancestors = reversed(
499 499 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
500 500 for parent, tree, path in reversed_ancestors:
501 501 parent[path] = (DIR_STAT, tree.id)
502 502 object_store.add_object(tree)
503 503
504 504 object_store.add_object(blob)
505 505 for tree in new_trees:
506 506 object_store.add_object(tree)
507 507
508 508 for node_path in removed:
509 509 paths = node_path.split('/')
510 510 tree = commit_tree
511 511 trees = [tree]
512 512 # Traverse deep into the forest...
513 513 for path in paths:
514 514 try:
515 515 obj = repo[tree[path][1]]
516 516 if isinstance(obj, objects.Tree):
517 517 trees.append(obj)
518 518 tree = obj
519 519 except KeyError:
520 520 break
521 521 # Cut down the blob and all rotten trees on the way back...
522 522 for path, tree in reversed(zip(paths, trees)):
523 523 del tree[path]
524 524 if tree:
525 525 # This tree still has elements - don't remove it or any
526 526 # of it's parents
527 527 break
528 528
529 529 object_store.add_object(commit_tree)
530 530
531 531 # Create commit
532 532 commit = objects.Commit()
533 533 commit.tree = commit_tree.id
534 534 for k, v in commit_data.iteritems():
535 535 setattr(commit, k, v)
536 536 object_store.add_object(commit)
537 537
538 538 self.create_branch(wire, branch, commit.id)
539 539
540 540 # dulwich set-ref
541 541 ref = 'refs/heads/%s' % branch
542 542 repo.refs[ref] = commit.id
543 543
544 544 return commit.id
545 545
546 546 @reraise_safe_exceptions
547 547 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
548 548 if url != 'default' and '://' not in url:
549 549 client = LocalGitClient(url)
550 550 else:
551 551 url_obj = url_parser(url)
552 552 o = self._build_opener(url)
553 553 url, _ = url_obj.authinfo()
554 554 client = HttpGitClient(base_url=url, opener=o)
555 555 repo = self._factory.repo(wire)
556 556
557 557 determine_wants = repo.object_store.determine_wants_all
558 558 if refs:
559 559 def determine_wants_requested(references):
560 560 return [references[r] for r in references if r in refs]
561 561 determine_wants = determine_wants_requested
562 562
563 563 try:
564 564 remote_refs = client.fetch(
565 565 path=url, target=repo, determine_wants=determine_wants)
566 566 except NotGitRepository as e:
567 567 log.warning(
568 568 'Trying to fetch from "%s" failed, not a Git repository.', url)
569 569 # Exception can contain unicode which we convert
570 570 raise exceptions.AbortException(e)(repr(e))
571 571
572 572 # mikhail: client.fetch() returns all the remote refs, but fetches only
573 573 # refs filtered by `determine_wants` function. We need to filter result
574 574 # as well
575 575 if refs:
576 576 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
577 577
578 578 if apply_refs:
579 579 # TODO: johbo: Needs proper test coverage with a git repository
580 580 # that contains a tag object, so that we would end up with
581 581 # a peeled ref at this point.
582 582 for k in remote_refs:
583 583 if k.endswith(PEELED_REF_MARKER):
584 584 log.debug("Skipping peeled reference %s", k)
585 585 continue
586 586 repo[k] = remote_refs[k]
587 587
588 588 if refs and not update_after:
589 589 # mikhail: explicitly set the head to the last ref.
590 590 repo['HEAD'] = remote_refs[refs[-1]]
591 591
592 592 if update_after:
593 593 # we want to checkout HEAD
594 594 repo["HEAD"] = remote_refs["HEAD"]
595 595 index.build_index_from_tree(repo.path, repo.index_path(),
596 596 repo.object_store, repo["HEAD"].tree)
597 597 return remote_refs
598 598
599 599 @reraise_safe_exceptions
600 600 def sync_fetch(self, wire, url, refs=None, all_refs=False):
601 601 repo = self._factory.repo(wire)
602 602 if refs and not isinstance(refs, (list, tuple)):
603 603 refs = [refs]
604 604
605 605 config = self._wire_to_config(wire)
606 606 # get all remote refs we'll use to fetch later
607 607 cmd = ['ls-remote']
608 608 if not all_refs:
609 609 cmd += ['--heads', '--tags']
610 610 cmd += [url]
611 611 output, __ = self.run_git_command(
612 612 wire, cmd, fail_on_stderr=False,
613 613 _copts=self._remote_conf(config),
614 614 extra_env={'GIT_TERMINAL_PROMPT': '0'})
615 615
616 616 remote_refs = collections.OrderedDict()
617 617 fetch_refs = []
618 618
619 619 for ref_line in output.splitlines():
620 620 sha, ref = ref_line.split('\t')
621 621 sha = sha.strip()
622 622 if ref in remote_refs:
623 623 # duplicate, skip
624 624 continue
625 625 if ref.endswith(PEELED_REF_MARKER):
626 626 log.debug("Skipping peeled reference %s", ref)
627 627 continue
628 628 # don't sync HEAD
629 629 if ref in ['HEAD']:
630 630 continue
631 631
632 632 remote_refs[ref] = sha
633 633
634 634 if refs and sha in refs:
635 635 # we filter fetch using our specified refs
636 636 fetch_refs.append('{}:{}'.format(ref, ref))
637 637 elif not refs:
638 638 fetch_refs.append('{}:{}'.format(ref, ref))
639 639 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
640 640
641 641 if fetch_refs:
642 642 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
643 643 fetch_refs_chunks = list(chunk)
644 644 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
645 645 _out, _err = self.run_git_command(
646 646 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
647 647 fail_on_stderr=False,
648 648 _copts=self._remote_conf(config),
649 649 extra_env={'GIT_TERMINAL_PROMPT': '0'})
650 650
651 651 return remote_refs
652 652
653 653 @reraise_safe_exceptions
654 654 def sync_push(self, wire, url, refs=None):
655 655 if not self.check_url(url, wire):
656 656 return
657 657 config = self._wire_to_config(wire)
658 658 self._factory.repo(wire)
659 659 self.run_git_command(
660 660 wire, ['push', url, '--mirror'], fail_on_stderr=False,
661 661 _copts=self._remote_conf(config),
662 662 extra_env={'GIT_TERMINAL_PROMPT': '0'})
663 663
664 664 @reraise_safe_exceptions
665 665 def get_remote_refs(self, wire, url):
666 666 repo = Repo(url)
667 667 return repo.get_refs()
668 668
669 669 @reraise_safe_exceptions
670 670 def get_description(self, wire):
671 671 repo = self._factory.repo(wire)
672 672 return repo.get_description()
673 673
674 674 @reraise_safe_exceptions
675 675 def get_missing_revs(self, wire, rev1, rev2, path2):
676 676 repo = self._factory.repo(wire)
677 677 LocalGitClient(thin_packs=False).fetch(path2, repo)
678 678
679 679 wire_remote = wire.copy()
680 680 wire_remote['path'] = path2
681 681 repo_remote = self._factory.repo(wire_remote)
682 682 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
683 683
684 684 revs = [
685 685 x.commit.id
686 686 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
687 687 return revs
688 688
689 689 @reraise_safe_exceptions
690 def get_object(self, wire, sha):
690 def get_object(self, wire, sha, maybe_unreachable=False):
691 691 cache_on, context_uid, repo_id = self._cache_on(wire)
692 692 @self.region.conditional_cache_on_arguments(condition=cache_on)
693 693 def _get_object(_context_uid, _repo_id, _sha):
694 694 repo_init = self._factory.repo_libgit2(wire)
695 695 with repo_init as repo:
696 696
697 697 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
698 698 try:
699 699 commit = repo.revparse_single(sha)
700 except (KeyError, ValueError) as e:
700 except KeyError:
701 # NOTE(marcink): KeyError doesn't give us any meaningful information
702 # here, we instead give something more explicit
703 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
704 raise exceptions.LookupException(e)(missing_commit_err)
705 except ValueError as e:
701 706 raise exceptions.LookupException(e)(missing_commit_err)
702 707
703 708 is_tag = False
704 709 if isinstance(commit, pygit2.Tag):
705 710 commit = repo.get(commit.target)
706 711 is_tag = True
707 712
708 713 check_dangling = True
709 714 if is_tag:
710 715 check_dangling = False
711 716
717 if check_dangling and maybe_unreachable:
718 check_dangling = False
719
712 720 # we used a reference and it parsed means we're not having a dangling commit
713 721 if sha != commit.hex:
714 722 check_dangling = False
715 723
716 724 if check_dangling:
717 725 # check for dangling commit
718 726 for branch in repo.branches.with_commit(commit.hex):
719 727 if branch:
720 728 break
721 729 else:
722 raise exceptions.LookupException(None)(missing_commit_err)
730 # NOTE(marcink): Empty error doesn't give us any meaningful information
731 # here, we instead give something more explicit
732 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
733 raise exceptions.LookupException(e)(missing_commit_err)
723 734
724 735 commit_id = commit.hex
725 736 type_id = commit.type
726 737
727 738 return {
728 739 'id': commit_id,
729 740 'type': self._type_id_to_name(type_id),
730 741 'commit_id': commit_id,
731 742 'idx': 0
732 743 }
733 744
734 745 return _get_object(context_uid, repo_id, sha)
735 746
736 747 @reraise_safe_exceptions
737 748 def get_refs(self, wire):
738 749 cache_on, context_uid, repo_id = self._cache_on(wire)
739 750 @self.region.conditional_cache_on_arguments(condition=cache_on)
740 751 def _get_refs(_context_uid, _repo_id):
741 752
742 753 repo_init = self._factory.repo_libgit2(wire)
743 754 with repo_init as repo:
744 755 regex = re.compile('^refs/(heads|tags)/')
745 756 return {x.name: x.target.hex for x in
746 757 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
747 758
748 759 return _get_refs(context_uid, repo_id)
749 760
750 761 @reraise_safe_exceptions
751 762 def get_branch_pointers(self, wire):
752 763 cache_on, context_uid, repo_id = self._cache_on(wire)
753 764 @self.region.conditional_cache_on_arguments(condition=cache_on)
754 765 def _get_branch_pointers(_context_uid, _repo_id):
755 766
756 767 repo_init = self._factory.repo_libgit2(wire)
757 768 regex = re.compile('^refs/heads')
758 769 with repo_init as repo:
759 770 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
760 771 return {x.target.hex: x.shorthand for x in branches}
761 772
762 773 return _get_branch_pointers(context_uid, repo_id)
763 774
764 775 @reraise_safe_exceptions
765 776 def head(self, wire, show_exc=True):
766 777 cache_on, context_uid, repo_id = self._cache_on(wire)
767 778 @self.region.conditional_cache_on_arguments(condition=cache_on)
768 779 def _head(_context_uid, _repo_id, _show_exc):
769 780 repo_init = self._factory.repo_libgit2(wire)
770 781 with repo_init as repo:
771 782 try:
772 783 return repo.head.peel().hex
773 784 except Exception:
774 785 if show_exc:
775 786 raise
776 787 return _head(context_uid, repo_id, show_exc)
777 788
778 789 @reraise_safe_exceptions
779 790 def init(self, wire):
780 791 repo_path = str_to_dulwich(wire['path'])
781 792 self.repo = Repo.init(repo_path)
782 793
783 794 @reraise_safe_exceptions
784 795 def init_bare(self, wire):
785 796 repo_path = str_to_dulwich(wire['path'])
786 797 self.repo = Repo.init_bare(repo_path)
787 798
788 799 @reraise_safe_exceptions
789 800 def revision(self, wire, rev):
790 801
791 802 cache_on, context_uid, repo_id = self._cache_on(wire)
792 803 @self.region.conditional_cache_on_arguments(condition=cache_on)
793 804 def _revision(_context_uid, _repo_id, _rev):
794 805 repo_init = self._factory.repo_libgit2(wire)
795 806 with repo_init as repo:
796 807 commit = repo[rev]
797 808 obj_data = {
798 809 'id': commit.id.hex,
799 810 }
800 811 # tree objects itself don't have tree_id attribute
801 812 if hasattr(commit, 'tree_id'):
802 813 obj_data['tree'] = commit.tree_id.hex
803 814
804 815 return obj_data
805 816 return _revision(context_uid, repo_id, rev)
806 817
807 818 @reraise_safe_exceptions
808 819 def date(self, wire, commit_id):
809 820 cache_on, context_uid, repo_id = self._cache_on(wire)
810 821 @self.region.conditional_cache_on_arguments(condition=cache_on)
811 822 def _date(_repo_id, _commit_id):
812 823 repo_init = self._factory.repo_libgit2(wire)
813 824 with repo_init as repo:
814 825 commit = repo[commit_id]
815 826
816 827 if hasattr(commit, 'commit_time'):
817 828 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
818 829 else:
819 830 commit = commit.get_object()
820 831 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
821 832
822 833 # TODO(marcink): check dulwich difference of offset vs timezone
823 834 return [commit_time, commit_time_offset]
824 835 return _date(repo_id, commit_id)
825 836
826 837 @reraise_safe_exceptions
827 838 def author(self, wire, commit_id):
828 839 cache_on, context_uid, repo_id = self._cache_on(wire)
829 840 @self.region.conditional_cache_on_arguments(condition=cache_on)
830 841 def _author(_repo_id, _commit_id):
831 842 repo_init = self._factory.repo_libgit2(wire)
832 843 with repo_init as repo:
833 844 commit = repo[commit_id]
834 845
835 846 if hasattr(commit, 'author'):
836 847 author = commit.author
837 848 else:
838 849 author = commit.get_object().author
839 850
840 851 if author.email:
841 852 return u"{} <{}>".format(author.name, author.email)
842 853
843 854 try:
844 855 return u"{}".format(author.name)
845 856 except Exception:
846 857 return u"{}".format(safe_unicode(author.raw_name))
847 858
848 859 return _author(repo_id, commit_id)
849 860
850 861 @reraise_safe_exceptions
851 862 def message(self, wire, commit_id):
852 863 cache_on, context_uid, repo_id = self._cache_on(wire)
853 864 @self.region.conditional_cache_on_arguments(condition=cache_on)
854 865 def _message(_repo_id, _commit_id):
855 866 repo_init = self._factory.repo_libgit2(wire)
856 867 with repo_init as repo:
857 868 commit = repo[commit_id]
858 869 return commit.message
859 870 return _message(repo_id, commit_id)
860 871
861 872 @reraise_safe_exceptions
862 873 def parents(self, wire, commit_id):
863 874 cache_on, context_uid, repo_id = self._cache_on(wire)
864 875 @self.region.conditional_cache_on_arguments(condition=cache_on)
865 876 def _parents(_repo_id, _commit_id):
866 877 repo_init = self._factory.repo_libgit2(wire)
867 878 with repo_init as repo:
868 879 commit = repo[commit_id]
869 880 if hasattr(commit, 'parent_ids'):
870 881 parent_ids = commit.parent_ids
871 882 else:
872 883 parent_ids = commit.get_object().parent_ids
873 884
874 885 return [x.hex for x in parent_ids]
875 886 return _parents(repo_id, commit_id)
876 887
877 888 @reraise_safe_exceptions
878 889 def children(self, wire, commit_id):
879 890 cache_on, context_uid, repo_id = self._cache_on(wire)
880 891 @self.region.conditional_cache_on_arguments(condition=cache_on)
881 892 def _children(_repo_id, _commit_id):
882 893 output, __ = self.run_git_command(
883 894 wire, ['rev-list', '--all', '--children'])
884 895
885 896 child_ids = []
886 897 pat = re.compile(r'^%s' % commit_id)
887 898 for l in output.splitlines():
888 899 if pat.match(l):
889 900 found_ids = l.split(' ')[1:]
890 901 child_ids.extend(found_ids)
891 902
892 903 return child_ids
893 904 return _children(repo_id, commit_id)
894 905
895 906 @reraise_safe_exceptions
896 907 def set_refs(self, wire, key, value):
897 908 repo_init = self._factory.repo_libgit2(wire)
898 909 with repo_init as repo:
899 910 repo.references.create(key, value, force=True)
900 911
901 912 @reraise_safe_exceptions
902 913 def create_branch(self, wire, branch_name, commit_id, force=False):
903 914 repo_init = self._factory.repo_libgit2(wire)
904 915 with repo_init as repo:
905 916 commit = repo[commit_id]
906 917
907 918 if force:
908 919 repo.branches.local.create(branch_name, commit, force=force)
909 920 elif not repo.branches.get(branch_name):
910 921 # create only if that branch isn't existing
911 922 repo.branches.local.create(branch_name, commit, force=force)
912 923
913 924 @reraise_safe_exceptions
914 925 def remove_ref(self, wire, key):
915 926 repo_init = self._factory.repo_libgit2(wire)
916 927 with repo_init as repo:
917 928 repo.references.delete(key)
918 929
919 930 @reraise_safe_exceptions
920 931 def tag_remove(self, wire, tag_name):
921 932 repo_init = self._factory.repo_libgit2(wire)
922 933 with repo_init as repo:
923 934 key = 'refs/tags/{}'.format(tag_name)
924 935 repo.references.delete(key)
925 936
926 937 @reraise_safe_exceptions
927 938 def tree_changes(self, wire, source_id, target_id):
928 939 # TODO(marcink): remove this seems it's only used by tests
929 940 repo = self._factory.repo(wire)
930 941 source = repo[source_id].tree if source_id else None
931 942 target = repo[target_id].tree
932 943 result = repo.object_store.tree_changes(source, target)
933 944 return list(result)
934 945
935 946 @reraise_safe_exceptions
936 947 def tree_and_type_for_path(self, wire, commit_id, path):
937 948
938 949 cache_on, context_uid, repo_id = self._cache_on(wire)
939 950 @self.region.conditional_cache_on_arguments(condition=cache_on)
940 951 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
941 952 repo_init = self._factory.repo_libgit2(wire)
942 953
943 954 with repo_init as repo:
944 955 commit = repo[commit_id]
945 956 try:
946 957 tree = commit.tree[path]
947 958 except KeyError:
948 959 return None, None, None
949 960
950 961 return tree.id.hex, tree.type, tree.filemode
951 962 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
952 963
953 964 @reraise_safe_exceptions
954 965 def tree_items(self, wire, tree_id):
955 966 cache_on, context_uid, repo_id = self._cache_on(wire)
956 967 @self.region.conditional_cache_on_arguments(condition=cache_on)
957 968 def _tree_items(_repo_id, _tree_id):
958 969
959 970 repo_init = self._factory.repo_libgit2(wire)
960 971 with repo_init as repo:
961 972 try:
962 973 tree = repo[tree_id]
963 974 except KeyError:
964 975 raise ObjectMissing('No tree with id: {}'.format(tree_id))
965 976
966 977 result = []
967 978 for item in tree:
968 979 item_sha = item.hex
969 980 item_mode = item.filemode
970 981 item_type = item.type
971 982
972 983 if item_type == 'commit':
973 984 # NOTE(marcink): submodules we translate to 'link' for backward compat
974 985 item_type = 'link'
975 986
976 987 result.append((item.name, item_mode, item_sha, item_type))
977 988 return result
978 989 return _tree_items(repo_id, tree_id)
979 990
980 991 @reraise_safe_exceptions
981 992 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
982 993 """
983 994 Old version that uses subprocess to call diff
984 995 """
985 996
986 997 flags = [
987 998 '-U%s' % context, '--patch',
988 999 '--binary',
989 1000 '--find-renames',
990 1001 '--no-indent-heuristic',
991 1002 # '--indent-heuristic',
992 1003 #'--full-index',
993 1004 #'--abbrev=40'
994 1005 ]
995 1006
996 1007 if opt_ignorews:
997 1008 flags.append('--ignore-all-space')
998 1009
999 1010 if commit_id_1 == self.EMPTY_COMMIT:
1000 1011 cmd = ['show'] + flags + [commit_id_2]
1001 1012 else:
1002 1013 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1003 1014
1004 1015 if file_filter:
1005 1016 cmd.extend(['--', file_filter])
1006 1017
1007 1018 diff, __ = self.run_git_command(wire, cmd)
1008 1019 # If we used 'show' command, strip first few lines (until actual diff
1009 1020 # starts)
1010 1021 if commit_id_1 == self.EMPTY_COMMIT:
1011 1022 lines = diff.splitlines()
1012 1023 x = 0
1013 1024 for line in lines:
1014 1025 if line.startswith('diff'):
1015 1026 break
1016 1027 x += 1
1017 1028 # Append new line just like 'diff' command do
1018 1029 diff = '\n'.join(lines[x:]) + '\n'
1019 1030 return diff
1020 1031
1021 1032 @reraise_safe_exceptions
1022 1033 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1023 1034 repo_init = self._factory.repo_libgit2(wire)
1024 1035 with repo_init as repo:
1025 1036 swap = True
1026 1037 flags = 0
1027 1038 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1028 1039
1029 1040 if opt_ignorews:
1030 1041 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1031 1042
1032 1043 if commit_id_1 == self.EMPTY_COMMIT:
1033 1044 comm1 = repo[commit_id_2]
1034 1045 diff_obj = comm1.tree.diff_to_tree(
1035 1046 flags=flags, context_lines=context, swap=swap)
1036 1047
1037 1048 else:
1038 1049 comm1 = repo[commit_id_2]
1039 1050 comm2 = repo[commit_id_1]
1040 1051 diff_obj = comm1.tree.diff_to_tree(
1041 1052 comm2.tree, flags=flags, context_lines=context, swap=swap)
1042 1053 similar_flags = 0
1043 1054 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1044 1055 diff_obj.find_similar(flags=similar_flags)
1045 1056
1046 1057 if file_filter:
1047 1058 for p in diff_obj:
1048 1059 if p.delta.old_file.path == file_filter:
1049 1060 return p.patch or ''
1050 1061 # fo matching path == no diff
1051 1062 return ''
1052 1063 return diff_obj.patch or ''
1053 1064
1054 1065 @reraise_safe_exceptions
1055 1066 def node_history(self, wire, commit_id, path, limit):
1056 1067 cache_on, context_uid, repo_id = self._cache_on(wire)
1057 1068 @self.region.conditional_cache_on_arguments(condition=cache_on)
1058 1069 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1059 1070 # optimize for n==1, rev-list is much faster for that use-case
1060 1071 if limit == 1:
1061 1072 cmd = ['rev-list', '-1', commit_id, '--', path]
1062 1073 else:
1063 1074 cmd = ['log']
1064 1075 if limit:
1065 1076 cmd.extend(['-n', str(safe_int(limit, 0))])
1066 1077 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1067 1078
1068 1079 output, __ = self.run_git_command(wire, cmd)
1069 1080 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1070 1081
1071 1082 return [x for x in commit_ids]
1072 1083 return _node_history(context_uid, repo_id, commit_id, path, limit)
1073 1084
1074 1085 @reraise_safe_exceptions
1075 1086 def node_annotate(self, wire, commit_id, path):
1076 1087
1077 1088 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1078 1089 # -l ==> outputs long shas (and we need all 40 characters)
1079 1090 # --root ==> doesn't put '^' character for boundaries
1080 1091 # -r commit_id ==> blames for the given commit
1081 1092 output, __ = self.run_git_command(wire, cmd)
1082 1093
1083 1094 result = []
1084 1095 for i, blame_line in enumerate(output.split('\n')[:-1]):
1085 1096 line_no = i + 1
1086 1097 commit_id, line = re.split(r' ', blame_line, 1)
1087 1098 result.append((line_no, commit_id, line))
1088 1099 return result
1089 1100
1090 1101 @reraise_safe_exceptions
1091 1102 def update_server_info(self, wire):
1092 1103 repo = self._factory.repo(wire)
1093 1104 update_server_info(repo)
1094 1105
1095 1106 @reraise_safe_exceptions
1096 1107 def get_all_commit_ids(self, wire):
1097 1108
1098 1109 cache_on, context_uid, repo_id = self._cache_on(wire)
1099 1110 @self.region.conditional_cache_on_arguments(condition=cache_on)
1100 1111 def _get_all_commit_ids(_context_uid, _repo_id):
1101 1112
1102 1113 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1103 1114 try:
1104 1115 output, __ = self.run_git_command(wire, cmd)
1105 1116 return output.splitlines()
1106 1117 except Exception:
1107 1118 # Can be raised for empty repositories
1108 1119 return []
1109 1120 return _get_all_commit_ids(context_uid, repo_id)
1110 1121
1111 1122 @reraise_safe_exceptions
1112 1123 def run_git_command(self, wire, cmd, **opts):
1113 1124 path = wire.get('path', None)
1114 1125
1115 1126 if path and os.path.isdir(path):
1116 1127 opts['cwd'] = path
1117 1128
1118 1129 if '_bare' in opts:
1119 1130 _copts = []
1120 1131 del opts['_bare']
1121 1132 else:
1122 1133 _copts = ['-c', 'core.quotepath=false', ]
1123 1134 safe_call = False
1124 1135 if '_safe' in opts:
1125 1136 # no exc on failure
1126 1137 del opts['_safe']
1127 1138 safe_call = True
1128 1139
1129 1140 if '_copts' in opts:
1130 1141 _copts.extend(opts['_copts'] or [])
1131 1142 del opts['_copts']
1132 1143
1133 1144 gitenv = os.environ.copy()
1134 1145 gitenv.update(opts.pop('extra_env', {}))
1135 1146 # need to clean fix GIT_DIR !
1136 1147 if 'GIT_DIR' in gitenv:
1137 1148 del gitenv['GIT_DIR']
1138 1149 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1139 1150 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1140 1151
1141 1152 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1142 1153 _opts = {'env': gitenv, 'shell': False}
1143 1154
1144 1155 proc = None
1145 1156 try:
1146 1157 _opts.update(opts)
1147 1158 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1148 1159
1149 1160 return ''.join(proc), ''.join(proc.error)
1150 1161 except (EnvironmentError, OSError) as err:
1151 1162 cmd = ' '.join(cmd) # human friendly CMD
1152 1163 tb_err = ("Couldn't run git command (%s).\n"
1153 1164 "Original error was:%s\n"
1154 1165 "Call options:%s\n"
1155 1166 % (cmd, err, _opts))
1156 1167 log.exception(tb_err)
1157 1168 if safe_call:
1158 1169 return '', err
1159 1170 else:
1160 1171 raise exceptions.VcsException()(tb_err)
1161 1172 finally:
1162 1173 if proc:
1163 1174 proc.close()
1164 1175
1165 1176 @reraise_safe_exceptions
1166 1177 def install_hooks(self, wire, force=False):
1167 1178 from vcsserver.hook_utils import install_git_hooks
1168 1179 bare = self.bare(wire)
1169 1180 path = wire['path']
1170 1181 return install_git_hooks(path, bare, force_create=force)
1171 1182
1172 1183 @reraise_safe_exceptions
1173 1184 def get_hooks_info(self, wire):
1174 1185 from vcsserver.hook_utils import (
1175 1186 get_git_pre_hook_version, get_git_post_hook_version)
1176 1187 bare = self.bare(wire)
1177 1188 path = wire['path']
1178 1189 return {
1179 1190 'pre_version': get_git_pre_hook_version(path, bare),
1180 1191 'post_version': get_git_post_hook_version(path, bare),
1181 1192 }
@@ -1,19 +1,19 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 19 from app import create_app
@@ -1,292 +1,292 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import re
19 19 import logging
20 20 from wsgiref.util import FileWrapper
21 21
22 22 import simplejson as json
23 23 from pyramid.config import Configurator
24 24 from pyramid.response import Response, FileIter
25 25 from pyramid.httpexceptions import (
26 26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 27 HTTPUnprocessableEntity)
28 28
29 29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 31 from vcsserver.utils import safe_int
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
37 37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38 38
39 39
40 40 def write_response_error(http_exception, text=None):
41 41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 42 _exception = http_exception(content_type=content_type)
43 43 _exception.content_type = content_type
44 44 if text:
45 45 _exception.body = json.dumps({'message': text})
46 46 log.debug('LFS: writing response of type %s to client with text:%s',
47 47 http_exception, text)
48 48 return _exception
49 49
50 50
51 51 class AuthHeaderRequired(object):
52 52 """
53 53 Decorator to check if request has proper auth-header
54 54 """
55 55
56 56 def __call__(self, func):
57 57 return get_cython_compat_decorator(self.__wrapper, func)
58 58
59 59 def __wrapper(self, func, *fargs, **fkwargs):
60 60 request = fargs[1]
61 61 auth = request.authorization
62 62 if not auth:
63 63 return write_response_error(HTTPForbidden)
64 64 return func(*fargs[1:], **fkwargs)
65 65
66 66
67 67 # views
68 68
69 69 def lfs_objects(request):
70 70 # indicate not supported, V1 API
71 71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73 73
74 74
75 75 @AuthHeaderRequired()
76 76 def lfs_objects_batch(request):
77 77 """
78 78 The client sends the following information to the Batch endpoint to transfer some objects:
79 79
80 80 operation - Should be download or upload.
81 81 transfers - An optional Array of String identifiers for transfer
82 82 adapters that the client has configured. If omitted, the basic
83 83 transfer adapter MUST be assumed by the server.
84 84 objects - An Array of objects to download.
85 85 oid - String OID of the LFS object.
86 86 size - Integer byte size of the LFS object. Must be at least zero.
87 87 """
88 88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 89 auth = request.authorization
90 90 repo = request.matchdict.get('repo')
91 91 data = request.json
92 92 operation = data.get('operation')
93 93 http_scheme = request.registry.git_lfs_http_scheme
94 94
95 95 if operation not in ('download', 'upload'):
96 96 log.debug('LFS: unsupported operation:%s', operation)
97 97 return write_response_error(
98 98 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
99 99
100 100 if 'objects' not in data:
101 101 log.debug('LFS: missing objects data')
102 102 return write_response_error(
103 103 HTTPBadRequest, 'missing objects data')
104 104
105 105 log.debug('LFS: handling operation of type: %s', operation)
106 106
107 107 objects = []
108 108 for o in data['objects']:
109 109 try:
110 110 oid = o['oid']
111 111 obj_size = o['size']
112 112 except KeyError:
113 113 log.exception('LFS, failed to extract data')
114 114 return write_response_error(
115 115 HTTPBadRequest, 'unsupported data in objects')
116 116
117 117 obj_data = {'oid': oid}
118 118
119 119 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid,
120 120 _scheme=http_scheme)
121 121 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo,
122 122 _scheme=http_scheme)
123 123 store = LFSOidStore(
124 124 oid, repo, store_location=request.registry.git_lfs_store_path)
125 125 handler = OidHandler(
126 126 store, repo, auth, oid, obj_size, obj_data,
127 127 obj_href, obj_verify_href)
128 128
129 129 # this verifies also OIDs
130 130 actions, errors = handler.exec_operation(operation)
131 131 if errors:
132 132 log.warning('LFS: got following errors: %s', errors)
133 133 obj_data['errors'] = errors
134 134
135 135 if actions:
136 136 obj_data['actions'] = actions
137 137
138 138 obj_data['size'] = obj_size
139 139 obj_data['authenticated'] = True
140 140 objects.append(obj_data)
141 141
142 142 result = {'objects': objects, 'transfer': 'basic'}
143 143 log.debug('LFS Response %s', safe_result(result))
144 144
145 145 return result
146 146
147 147
148 148 def lfs_objects_oid_upload(request):
149 149 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
150 150 repo = request.matchdict.get('repo')
151 151 oid = request.matchdict.get('oid')
152 152 store = LFSOidStore(
153 153 oid, repo, store_location=request.registry.git_lfs_store_path)
154 154 engine = store.get_engine(mode='wb')
155 155 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
156 156
157 157 body = request.environ['wsgi.input']
158 158
159 159 with engine as f:
160 160 blksize = 64 * 1024 # 64kb
161 161 while True:
162 162 # read in chunks as stream comes in from Gunicorn
163 163 # this is a specific Gunicorn support function.
164 164 # might work differently on waitress
165 165 chunk = body.read(blksize)
166 166 if not chunk:
167 167 break
168 168 f.write(chunk)
169 169
170 170 return {'upload': 'ok'}
171 171
172 172
173 173 def lfs_objects_oid_download(request):
174 174 repo = request.matchdict.get('repo')
175 175 oid = request.matchdict.get('oid')
176 176
177 177 store = LFSOidStore(
178 178 oid, repo, store_location=request.registry.git_lfs_store_path)
179 179 if not store.has_oid():
180 180 log.debug('LFS: oid %s does not exists in store', oid)
181 181 return write_response_error(
182 182 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
183 183
184 184 # TODO(marcink): support range header ?
185 185 # Range: bytes=0-, `bytes=(\d+)\-.*`
186 186
187 187 f = open(store.oid_path, 'rb')
188 188 response = Response(
189 189 content_type='application/octet-stream', app_iter=FileIter(f))
190 190 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
191 191 return response
192 192
193 193
194 194 def lfs_objects_verify(request):
195 195 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
196 196 repo = request.matchdict.get('repo')
197 197
198 198 data = request.json
199 199 oid = data.get('oid')
200 200 size = safe_int(data.get('size'))
201 201
202 202 if not (oid and size):
203 203 return write_response_error(
204 204 HTTPBadRequest, 'missing oid and size in request data')
205 205
206 206 store = LFSOidStore(
207 207 oid, repo, store_location=request.registry.git_lfs_store_path)
208 208 if not store.has_oid():
209 209 log.debug('LFS: oid %s does not exists in store', oid)
210 210 return write_response_error(
211 211 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
212 212
213 213 store_size = store.size_oid()
214 214 if store_size != size:
215 215 msg = 'requested file size mismatch store size:%s requested:%s' % (
216 216 store_size, size)
217 217 return write_response_error(
218 218 HTTPUnprocessableEntity, msg)
219 219
220 220 return {'message': {'size': 'ok', 'in_store': 'ok'}}
221 221
222 222
223 223 def lfs_objects_lock(request):
224 224 return write_response_error(
225 225 HTTPNotImplemented, 'GIT LFS locking api not supported')
226 226
227 227
228 228 def not_found(request):
229 229 return write_response_error(
230 230 HTTPNotFound, 'request path not found')
231 231
232 232
233 233 def lfs_disabled(request):
234 234 return write_response_error(
235 235 HTTPNotImplemented, 'GIT LFS disabled for this repo')
236 236
237 237
238 238 def git_lfs_app(config):
239 239
240 240 # v1 API deprecation endpoint
241 241 config.add_route('lfs_objects',
242 242 '/{repo:.*?[^/]}/info/lfs/objects')
243 243 config.add_view(lfs_objects, route_name='lfs_objects',
244 244 request_method='POST', renderer='json')
245 245
246 246 # locking API
247 247 config.add_route('lfs_objects_lock',
248 248 '/{repo:.*?[^/]}/info/lfs/locks')
249 249 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
250 250 request_method=('POST', 'GET'), renderer='json')
251 251
252 252 config.add_route('lfs_objects_lock_verify',
253 253 '/{repo:.*?[^/]}/info/lfs/locks/verify')
254 254 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
255 255 request_method=('POST', 'GET'), renderer='json')
256 256
257 257 # batch API
258 258 config.add_route('lfs_objects_batch',
259 259 '/{repo:.*?[^/]}/info/lfs/objects/batch')
260 260 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
261 261 request_method='POST', renderer='json')
262 262
263 263 # oid upload/download API
264 264 config.add_route('lfs_objects_oid',
265 265 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
266 266 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
267 267 request_method='PUT', renderer='json')
268 268 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
269 269 request_method='GET', renderer='json')
270 270
271 271 # verification API
272 272 config.add_route('lfs_objects_verify',
273 273 '/{repo:.*?[^/]}/info/lfs/verify')
274 274 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
275 275 request_method='POST', renderer='json')
276 276
277 277 # not found handler for API
278 278 config.add_notfound_view(not_found, renderer='json')
279 279
280 280
281 281 def create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
282 282 config = Configurator()
283 283 if git_lfs_enabled:
284 284 config.include(git_lfs_app)
285 285 config.registry.git_lfs_store_path = git_lfs_store_path
286 286 config.registry.git_lfs_http_scheme = git_lfs_http_scheme
287 287 else:
288 288 # not found handler for API, reporting disabled LFS support
289 289 config.add_notfound_view(lfs_disabled, renderer='json')
290 290
291 291 app = config.make_wsgi_app()
292 292 return app
@@ -1,175 +1,175 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import logging
21 21 from collections import OrderedDict
22 22
23 23 log = logging.getLogger(__name__)
24 24
25 25
26 26 class OidHandler(object):
27 27
28 28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 29 obj_verify_href=None):
30 30 self.current_store = store
31 31 self.repo_name = repo_name
32 32 self.auth = auth
33 33 self.oid = oid
34 34 self.obj_size = obj_size
35 35 self.obj_data = obj_data
36 36 self.obj_href = obj_href
37 37 self.obj_verify_href = obj_verify_href
38 38
39 39 def get_store(self, mode=None):
40 40 return self.current_store
41 41
42 42 def get_auth(self):
43 43 """returns auth header for re-use in upload/download"""
44 44 return " ".join(self.auth)
45 45
46 46 def download(self):
47 47
48 48 store = self.get_store()
49 49 response = None
50 50 has_errors = None
51 51
52 52 if not store.has_oid():
53 53 # error reply back to client that something is wrong with dl
54 54 err_msg = 'object: {} does not exist in store'.format(store.oid)
55 55 has_errors = OrderedDict(
56 56 error=OrderedDict(
57 57 code=404,
58 58 message=err_msg
59 59 )
60 60 )
61 61
62 62 download_action = OrderedDict(
63 63 href=self.obj_href,
64 64 header=OrderedDict([("Authorization", self.get_auth())])
65 65 )
66 66 if not has_errors:
67 67 response = OrderedDict(download=download_action)
68 68 return response, has_errors
69 69
70 70 def upload(self, skip_existing=True):
71 71 """
72 72 Write upload action for git-lfs server
73 73 """
74 74
75 75 store = self.get_store()
76 76 response = None
77 77 has_errors = None
78 78
79 79 # verify if we have the OID before, if we do, reply with empty
80 80 if store.has_oid():
81 81 log.debug('LFS: store already has oid %s', store.oid)
82 82
83 83 # validate size
84 84 store_size = store.size_oid()
85 85 size_match = store_size == self.obj_size
86 86 if not size_match:
87 87 log.warning(
88 88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
89 89 self.oid, store_size, self.obj_size)
90 90 elif skip_existing:
91 91 log.debug('LFS: skipping further action as oid is existing')
92 92 return response, has_errors
93 93
94 94 chunked = ("Transfer-Encoding", "chunked")
95 95 upload_action = OrderedDict(
96 96 href=self.obj_href,
97 97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
98 98 )
99 99 if not has_errors:
100 100 response = OrderedDict(upload=upload_action)
101 101 # if specified in handler, return the verification endpoint
102 102 if self.obj_verify_href:
103 103 verify_action = OrderedDict(
104 104 href=self.obj_verify_href,
105 105 header=OrderedDict([("Authorization", self.get_auth())])
106 106 )
107 107 response['verify'] = verify_action
108 108 return response, has_errors
109 109
110 110 def exec_operation(self, operation, *args, **kwargs):
111 111 handler = getattr(self, operation)
112 112 log.debug('LFS: handling request using %s handler', handler)
113 113 return handler(*args, **kwargs)
114 114
115 115
116 116 class LFSOidStore(object):
117 117
118 118 def __init__(self, oid, repo, store_location=None):
119 119 self.oid = oid
120 120 self.repo = repo
121 121 self.store_path = store_location or self.get_default_store()
122 122 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
123 123 self.oid_path = os.path.join(self.store_path, oid)
124 124 self.fd = None
125 125
126 126 def get_engine(self, mode):
127 127 """
128 128 engine = .get_engine(mode='wb')
129 129 with engine as f:
130 130 f.write('...')
131 131 """
132 132
133 133 class StoreEngine(object):
134 134 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
135 135 self.mode = mode
136 136 self.store_path = store_path
137 137 self.oid_path = oid_path
138 138 self.tmp_oid_path = tmp_oid_path
139 139
140 140 def __enter__(self):
141 141 if not os.path.isdir(self.store_path):
142 142 os.makedirs(self.store_path)
143 143
144 144 # TODO(marcink): maybe write metadata here with size/oid ?
145 145 fd = open(self.tmp_oid_path, self.mode)
146 146 self.fd = fd
147 147 return fd
148 148
149 149 def __exit__(self, exc_type, exc_value, traceback):
150 150 # close tmp file, and rename to final destination
151 151 self.fd.close()
152 152 shutil.move(self.tmp_oid_path, self.oid_path)
153 153
154 154 return StoreEngine(
155 155 mode, self.store_path, self.oid_path, self.tmp_oid_path)
156 156
157 157 def get_default_store(self):
158 158 """
159 159 Default store, consistent with defaults of Mercurial large files store
160 160 which is /home/username/.cache/largefiles
161 161 """
162 162 user_home = os.path.expanduser("~")
163 163 return os.path.join(user_home, '.cache', 'lfs-store')
164 164
165 165 def has_oid(self):
166 166 return os.path.exists(os.path.join(self.store_path, self.oid))
167 167
168 168 def size_oid(self):
169 169 size = -1
170 170
171 171 if self.has_oid():
172 172 oid = os.path.join(self.store_path, self.oid)
173 173 size = os.stat(oid).st_size
174 174
175 175 return size
@@ -1,16 +1,16 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,272 +1,272 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import pytest
20 20 from webtest.app import TestApp as WebObTestApp
21 21 import simplejson as json
22 22
23 23 from vcsserver.git_lfs.app import create_app
24 24
25 25
26 26 @pytest.fixture(scope='function')
27 27 def git_lfs_app(tmpdir):
28 28 custom_app = WebObTestApp(create_app(
29 29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
30 30 git_lfs_http_scheme='http'))
31 31 custom_app._store = str(tmpdir)
32 32 return custom_app
33 33
34 34
35 35 @pytest.fixture(scope='function')
36 36 def git_lfs_https_app(tmpdir):
37 37 custom_app = WebObTestApp(create_app(
38 38 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir),
39 39 git_lfs_http_scheme='https'))
40 40 custom_app._store = str(tmpdir)
41 41 return custom_app
42 42
43 43
44 44 @pytest.fixture()
45 45 def http_auth():
46 46 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
47 47
48 48
49 49 class TestLFSApplication(object):
50 50
51 51 def test_app_wrong_path(self, git_lfs_app):
52 52 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
53 53
54 54 def test_app_deprecated_endpoint(self, git_lfs_app):
55 55 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
56 56 assert response.status_code == 501
57 57 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
58 58
59 59 def test_app_lock_verify_api_not_available(self, git_lfs_app):
60 60 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
61 61 assert response.status_code == 501
62 62 assert json.loads(response.text) == {
63 63 u'message': u'GIT LFS locking api not supported'}
64 64
65 65 def test_app_lock_api_not_available(self, git_lfs_app):
66 66 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
67 67 assert response.status_code == 501
68 68 assert json.loads(response.text) == {
69 69 u'message': u'GIT LFS locking api not supported'}
70 70
71 71 def test_app_batch_api_missing_auth(self, git_lfs_app):
72 72 git_lfs_app.post_json(
73 73 '/repo/info/lfs/objects/batch', params={}, status=403)
74 74
75 75 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
76 76 response = git_lfs_app.post_json(
77 77 '/repo/info/lfs/objects/batch', params={}, status=400,
78 78 extra_environ=http_auth)
79 79 assert json.loads(response.text) == {
80 80 u'message': u'unsupported operation mode: `None`'}
81 81
82 82 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
83 83 response = git_lfs_app.post_json(
84 84 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
85 85 status=400, extra_environ=http_auth)
86 86 assert json.loads(response.text) == {
87 87 u'message': u'missing objects data'}
88 88
89 89 def test_app_batch_api_unsupported_data_in_objects(
90 90 self, git_lfs_app, http_auth):
91 91 params = {'operation': 'download',
92 92 'objects': [{}]}
93 93 response = git_lfs_app.post_json(
94 94 '/repo/info/lfs/objects/batch', params=params, status=400,
95 95 extra_environ=http_auth)
96 96 assert json.loads(response.text) == {
97 97 u'message': u'unsupported data in objects'}
98 98
99 99 def test_app_batch_api_download_missing_object(
100 100 self, git_lfs_app, http_auth):
101 101 params = {'operation': 'download',
102 102 'objects': [{'oid': '123', 'size': '1024'}]}
103 103 response = git_lfs_app.post_json(
104 104 '/repo/info/lfs/objects/batch', params=params,
105 105 extra_environ=http_auth)
106 106
107 107 expected_objects = [
108 108 {u'authenticated': True,
109 109 u'errors': {u'error': {
110 110 u'code': 404,
111 111 u'message': u'object: 123 does not exist in store'}},
112 112 u'oid': u'123',
113 113 u'size': u'1024'}
114 114 ]
115 115 assert json.loads(response.text) == {
116 116 'objects': expected_objects, 'transfer': 'basic'}
117 117
118 118 def test_app_batch_api_download(self, git_lfs_app, http_auth):
119 119 oid = '456'
120 120 oid_path = os.path.join(git_lfs_app._store, oid)
121 121 if not os.path.isdir(os.path.dirname(oid_path)):
122 122 os.makedirs(os.path.dirname(oid_path))
123 123 with open(oid_path, 'wb') as f:
124 124 f.write('OID_CONTENT')
125 125
126 126 params = {'operation': 'download',
127 127 'objects': [{'oid': oid, 'size': '1024'}]}
128 128 response = git_lfs_app.post_json(
129 129 '/repo/info/lfs/objects/batch', params=params,
130 130 extra_environ=http_auth)
131 131
132 132 expected_objects = [
133 133 {u'authenticated': True,
134 134 u'actions': {
135 135 u'download': {
136 136 u'header': {u'Authorization': u'Basic XXXXX'},
137 137 u'href': u'http://localhost/repo/info/lfs/objects/456'},
138 138 },
139 139 u'oid': u'456',
140 140 u'size': u'1024'}
141 141 ]
142 142 assert json.loads(response.text) == {
143 143 'objects': expected_objects, 'transfer': 'basic'}
144 144
145 145 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
146 146 params = {'operation': 'upload',
147 147 'objects': [{'oid': '123', 'size': '1024'}]}
148 148 response = git_lfs_app.post_json(
149 149 '/repo/info/lfs/objects/batch', params=params,
150 150 extra_environ=http_auth)
151 151 expected_objects = [
152 152 {u'authenticated': True,
153 153 u'actions': {
154 154 u'upload': {
155 155 u'header': {u'Authorization': u'Basic XXXXX',
156 156 u'Transfer-Encoding': u'chunked'},
157 157 u'href': u'http://localhost/repo/info/lfs/objects/123'},
158 158 u'verify': {
159 159 u'header': {u'Authorization': u'Basic XXXXX'},
160 160 u'href': u'http://localhost/repo/info/lfs/verify'}
161 161 },
162 162 u'oid': u'123',
163 163 u'size': u'1024'}
164 164 ]
165 165 assert json.loads(response.text) == {
166 166 'objects': expected_objects, 'transfer': 'basic'}
167 167
168 168 def test_app_batch_api_upload_for_https(self, git_lfs_https_app, http_auth):
169 169 params = {'operation': 'upload',
170 170 'objects': [{'oid': '123', 'size': '1024'}]}
171 171 response = git_lfs_https_app.post_json(
172 172 '/repo/info/lfs/objects/batch', params=params,
173 173 extra_environ=http_auth)
174 174 expected_objects = [
175 175 {u'authenticated': True,
176 176 u'actions': {
177 177 u'upload': {
178 178 u'header': {u'Authorization': u'Basic XXXXX',
179 179 u'Transfer-Encoding': u'chunked'},
180 180 u'href': u'https://localhost/repo/info/lfs/objects/123'},
181 181 u'verify': {
182 182 u'header': {u'Authorization': u'Basic XXXXX'},
183 183 u'href': u'https://localhost/repo/info/lfs/verify'}
184 184 },
185 185 u'oid': u'123',
186 186 u'size': u'1024'}
187 187 ]
188 188 assert json.loads(response.text) == {
189 189 'objects': expected_objects, 'transfer': 'basic'}
190 190
191 191 def test_app_verify_api_missing_data(self, git_lfs_app):
192 192 params = {'oid': 'missing'}
193 193 response = git_lfs_app.post_json(
194 194 '/repo/info/lfs/verify', params=params,
195 195 status=400)
196 196
197 197 assert json.loads(response.text) == {
198 198 u'message': u'missing oid and size in request data'}
199 199
200 200 def test_app_verify_api_missing_obj(self, git_lfs_app):
201 201 params = {'oid': 'missing', 'size': '1024'}
202 202 response = git_lfs_app.post_json(
203 203 '/repo/info/lfs/verify', params=params,
204 204 status=404)
205 205
206 206 assert json.loads(response.text) == {
207 207 u'message': u'oid `missing` does not exists in store'}
208 208
209 209 def test_app_verify_api_size_mismatch(self, git_lfs_app):
210 210 oid = 'existing'
211 211 oid_path = os.path.join(git_lfs_app._store, oid)
212 212 if not os.path.isdir(os.path.dirname(oid_path)):
213 213 os.makedirs(os.path.dirname(oid_path))
214 214 with open(oid_path, 'wb') as f:
215 215 f.write('OID_CONTENT')
216 216
217 217 params = {'oid': oid, 'size': '1024'}
218 218 response = git_lfs_app.post_json(
219 219 '/repo/info/lfs/verify', params=params, status=422)
220 220
221 221 assert json.loads(response.text) == {
222 222 u'message': u'requested file size mismatch '
223 223 u'store size:11 requested:1024'}
224 224
225 225 def test_app_verify_api(self, git_lfs_app):
226 226 oid = 'existing'
227 227 oid_path = os.path.join(git_lfs_app._store, oid)
228 228 if not os.path.isdir(os.path.dirname(oid_path)):
229 229 os.makedirs(os.path.dirname(oid_path))
230 230 with open(oid_path, 'wb') as f:
231 231 f.write('OID_CONTENT')
232 232
233 233 params = {'oid': oid, 'size': 11}
234 234 response = git_lfs_app.post_json(
235 235 '/repo/info/lfs/verify', params=params)
236 236
237 237 assert json.loads(response.text) == {
238 238 u'message': {u'size': u'ok', u'in_store': u'ok'}}
239 239
240 240 def test_app_download_api_oid_not_existing(self, git_lfs_app):
241 241 oid = 'missing'
242 242
243 243 response = git_lfs_app.get(
244 244 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
245 245
246 246 assert json.loads(response.text) == {
247 247 u'message': u'requested file with oid `missing` not found in store'}
248 248
249 249 def test_app_download_api(self, git_lfs_app):
250 250 oid = 'existing'
251 251 oid_path = os.path.join(git_lfs_app._store, oid)
252 252 if not os.path.isdir(os.path.dirname(oid_path)):
253 253 os.makedirs(os.path.dirname(oid_path))
254 254 with open(oid_path, 'wb') as f:
255 255 f.write('OID_CONTENT')
256 256
257 257 response = git_lfs_app.get(
258 258 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
259 259 assert response
260 260
261 261 def test_app_upload(self, git_lfs_app):
262 262 oid = 'uploaded'
263 263
264 264 response = git_lfs_app.put(
265 265 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
266 266
267 267 assert json.loads(response.text) == {u'upload': u'ok'}
268 268
269 269 # verify that we actually wrote that OID
270 270 oid_path = os.path.join(git_lfs_app._store, oid)
271 271 assert os.path.isfile(oid_path)
272 272 assert 'CONTENT' == open(oid_path).read()
@@ -1,141 +1,141 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import pytest
20 20 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21 21
22 22
23 23 @pytest.fixture()
24 24 def lfs_store(tmpdir):
25 25 repo = 'test'
26 26 oid = '123456789'
27 27 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
28 28 return store
29 29
30 30
31 31 @pytest.fixture()
32 32 def oid_handler(lfs_store):
33 33 store = lfs_store
34 34 repo = store.repo
35 35 oid = store.oid
36 36
37 37 oid_handler = OidHandler(
38 38 store=store, repo_name=repo, auth=('basic', 'xxxx'),
39 39 oid=oid,
40 40 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
41 41 obj_verify_href='http://localhost/verify')
42 42 return oid_handler
43 43
44 44
45 45 class TestOidHandler(object):
46 46
47 47 @pytest.mark.parametrize('exec_action', [
48 48 'download',
49 49 'upload',
50 50 ])
51 51 def test_exec_action(self, exec_action, oid_handler):
52 52 handler = oid_handler.exec_operation(exec_action)
53 53 assert handler
54 54
55 55 def test_exec_action_undefined(self, oid_handler):
56 56 with pytest.raises(AttributeError):
57 57 oid_handler.exec_operation('wrong')
58 58
59 59 def test_download_oid_not_existing(self, oid_handler):
60 60 response, has_errors = oid_handler.exec_operation('download')
61 61
62 62 assert response is None
63 63 assert has_errors['error'] == {
64 64 'code': 404,
65 65 'message': 'object: 123456789 does not exist in store'}
66 66
67 67 def test_download_oid(self, oid_handler):
68 68 store = oid_handler.get_store()
69 69 if not os.path.isdir(os.path.dirname(store.oid_path)):
70 70 os.makedirs(os.path.dirname(store.oid_path))
71 71
72 72 with open(store.oid_path, 'wb') as f:
73 73 f.write('CONTENT')
74 74
75 75 response, has_errors = oid_handler.exec_operation('download')
76 76
77 77 assert has_errors is None
78 78 assert response['download'] == {
79 79 'header': {'Authorization': 'basic xxxx'},
80 80 'href': 'http://localhost/handle_oid'
81 81 }
82 82
83 83 def test_upload_oid_that_exists(self, oid_handler):
84 84 store = oid_handler.get_store()
85 85 if not os.path.isdir(os.path.dirname(store.oid_path)):
86 86 os.makedirs(os.path.dirname(store.oid_path))
87 87
88 88 with open(store.oid_path, 'wb') as f:
89 89 f.write('CONTENT')
90 90 oid_handler.obj_size = 7
91 91 response, has_errors = oid_handler.exec_operation('upload')
92 92 assert has_errors is None
93 93 assert response is None
94 94
95 95 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
96 96 store = oid_handler.get_store()
97 97 if not os.path.isdir(os.path.dirname(store.oid_path)):
98 98 os.makedirs(os.path.dirname(store.oid_path))
99 99
100 100 with open(store.oid_path, 'wb') as f:
101 101 f.write('CONTENT')
102 102
103 103 oid_handler.obj_size = 10240
104 104 response, has_errors = oid_handler.exec_operation('upload')
105 105 assert has_errors is None
106 106 assert response['upload'] == {
107 107 'header': {'Authorization': 'basic xxxx',
108 108 'Transfer-Encoding': 'chunked'},
109 109 'href': 'http://localhost/handle_oid',
110 110 }
111 111
112 112 def test_upload_oid(self, oid_handler):
113 113 response, has_errors = oid_handler.exec_operation('upload')
114 114 assert has_errors is None
115 115 assert response['upload'] == {
116 116 'header': {'Authorization': 'basic xxxx',
117 117 'Transfer-Encoding': 'chunked'},
118 118 'href': 'http://localhost/handle_oid'
119 119 }
120 120
121 121
122 122 class TestLFSStore(object):
123 123 def test_write_oid(self, lfs_store):
124 124 oid_location = lfs_store.oid_path
125 125
126 126 assert not os.path.isfile(oid_location)
127 127
128 128 engine = lfs_store.get_engine(mode='wb')
129 129 with engine as f:
130 130 f.write('CONTENT')
131 131
132 132 assert os.path.isfile(oid_location)
133 133
134 134 def test_detect_has_oid(self, lfs_store):
135 135
136 136 assert lfs_store.has_oid() is False
137 137 engine = lfs_store.get_engine(mode='wb')
138 138 with engine as f:
139 139 f.write('CONTENT')
140 140
141 141 assert lfs_store.has_oid() is True No newline at end of file
@@ -1,50 +1,50 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import copy
18 18 from functools import wraps
19 19
20 20
21 21 def get_cython_compat_decorator(wrapper, func):
22 22 """
23 23 Creates a cython compatible decorator. The previously used
24 24 decorator.decorator() function seems to be incompatible with cython.
25 25
26 26 :param wrapper: __wrapper method of the decorator class
27 27 :param func: decorated function
28 28 """
29 29 @wraps(func)
30 30 def local_wrapper(*args, **kwds):
31 31 return wrapper(func, *args, **kwds)
32 32 local_wrapper.__wrapped__ = func
33 33 return local_wrapper
34 34
35 35
36 36 def safe_result(result):
37 37 """clean result for better representation in logs"""
38 38 clean_copy = copy.deepcopy(result)
39 39
40 40 try:
41 41 if 'objects' in clean_copy:
42 42 for oid_data in clean_copy['objects']:
43 43 if 'actions' in oid_data:
44 44 for action_name, data in oid_data['actions'].items():
45 45 if 'header' in data:
46 46 data['header'] = {'Authorization': '*****'}
47 47 except Exception:
48 48 return result
49 49
50 50 return clean_copy
@@ -1,1009 +1,1009 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23 import traceback
24 24
25 25 from hgext import largefiles, rebase, purge
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29 from mercurial import verify
30 30 from mercurial import repair
31 31
32 32 import vcsserver
33 33 from vcsserver import exceptions
34 34 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
35 35 from vcsserver.hgcompat import (
36 36 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 37 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 38 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 39 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 40 RepoLookupError, InterventionRequired, RequirementError)
41 41 from vcsserver.vcs_base import RemoteBase
42 42
43 43 log = logging.getLogger(__name__)
44 44
45 45
46 46 def make_ui_from_config(repo_config):
47 47
48 48 class LoggingUI(ui.ui):
49 49 def status(self, *msg, **opts):
50 50 log.info(' '.join(msg).rstrip('\n'))
51 51 super(LoggingUI, self).status(*msg, **opts)
52 52
53 53 def warn(self, *msg, **opts):
54 54 log.warn(' '.join(msg).rstrip('\n'))
55 55 super(LoggingUI, self).warn(*msg, **opts)
56 56
57 57 def error(self, *msg, **opts):
58 58 log.error(' '.join(msg).rstrip('\n'))
59 59 super(LoggingUI, self).error(*msg, **opts)
60 60
61 61 def note(self, *msg, **opts):
62 62 log.info(' '.join(msg).rstrip('\n'))
63 63 super(LoggingUI, self).note(*msg, **opts)
64 64
65 65 def debug(self, *msg, **opts):
66 66 log.debug(' '.join(msg).rstrip('\n'))
67 67 super(LoggingUI, self).debug(*msg, **opts)
68 68
69 69 baseui = LoggingUI()
70 70
71 71 # clean the baseui object
72 72 baseui._ocfg = hgconfig.config()
73 73 baseui._ucfg = hgconfig.config()
74 74 baseui._tcfg = hgconfig.config()
75 75
76 76 for section, option, value in repo_config:
77 77 baseui.setconfig(section, option, value)
78 78
79 79 # make our hgweb quiet so it doesn't print output
80 80 baseui.setconfig('ui', 'quiet', 'true')
81 81
82 82 baseui.setconfig('ui', 'paginate', 'never')
83 83 # for better Error reporting of Mercurial
84 84 baseui.setconfig('ui', 'message-output', 'stderr')
85 85
86 86 # force mercurial to only use 1 thread, otherwise it may try to set a
87 87 # signal in a non-main thread, thus generating a ValueError.
88 88 baseui.setconfig('worker', 'numcpus', 1)
89 89
90 90 # If there is no config for the largefiles extension, we explicitly disable
91 91 # it here. This overrides settings from repositories hgrc file. Recent
92 92 # mercurial versions enable largefiles in hgrc on clone from largefile
93 93 # repo.
94 94 if not baseui.hasconfig('extensions', 'largefiles'):
95 95 log.debug('Explicitly disable largefiles extension for repo.')
96 96 baseui.setconfig('extensions', 'largefiles', '!')
97 97
98 98 return baseui
99 99
100 100
101 101 def reraise_safe_exceptions(func):
102 102 """Decorator for converting mercurial exceptions to something neutral."""
103 103
104 104 def wrapper(*args, **kwargs):
105 105 try:
106 106 return func(*args, **kwargs)
107 107 except (Abort, InterventionRequired) as e:
108 108 raise_from_original(exceptions.AbortException(e))
109 109 except RepoLookupError as e:
110 110 raise_from_original(exceptions.LookupException(e))
111 111 except RequirementError as e:
112 112 raise_from_original(exceptions.RequirementException(e))
113 113 except RepoError as e:
114 114 raise_from_original(exceptions.VcsException(e))
115 115 except LookupError as e:
116 116 raise_from_original(exceptions.LookupException(e))
117 117 except Exception as e:
118 118 if not hasattr(e, '_vcs_kind'):
119 119 log.exception("Unhandled exception in hg remote call")
120 120 raise_from_original(exceptions.UnhandledException(e))
121 121
122 122 raise
123 123 return wrapper
124 124
125 125
126 126 class MercurialFactory(RepoFactory):
127 127 repo_type = 'hg'
128 128
129 129 def _create_config(self, config, hooks=True):
130 130 if not hooks:
131 131 hooks_to_clean = frozenset((
132 132 'changegroup.repo_size', 'preoutgoing.pre_pull',
133 133 'outgoing.pull_logger', 'prechangegroup.pre_push'))
134 134 new_config = []
135 135 for section, option, value in config:
136 136 if section == 'hooks' and option in hooks_to_clean:
137 137 continue
138 138 new_config.append((section, option, value))
139 139 config = new_config
140 140
141 141 baseui = make_ui_from_config(config)
142 142 return baseui
143 143
144 144 def _create_repo(self, wire, create):
145 145 baseui = self._create_config(wire["config"])
146 146 return instance(baseui, wire["path"], create)
147 147
148 148 def repo(self, wire, create=False):
149 149 """
150 150 Get a repository instance for the given path.
151 151 """
152 152 return self._create_repo(wire, create)
153 153
154 154
155 155 def patch_ui_message_output(baseui):
156 156 baseui.setconfig('ui', 'quiet', 'false')
157 157 output = io.BytesIO()
158 158
159 159 def write(data, **unused_kwargs):
160 160 output.write(data)
161 161
162 162 baseui.status = write
163 163 baseui.write = write
164 164 baseui.warn = write
165 165 baseui.debug = write
166 166
167 167 return baseui, output
168 168
169 169
170 170 class HgRemote(RemoteBase):
171 171
172 172 def __init__(self, factory):
173 173 self._factory = factory
174 174 self._bulk_methods = {
175 175 "affected_files": self.ctx_files,
176 176 "author": self.ctx_user,
177 177 "branch": self.ctx_branch,
178 178 "children": self.ctx_children,
179 179 "date": self.ctx_date,
180 180 "message": self.ctx_description,
181 181 "parents": self.ctx_parents,
182 182 "status": self.ctx_status,
183 183 "obsolete": self.ctx_obsolete,
184 184 "phase": self.ctx_phase,
185 185 "hidden": self.ctx_hidden,
186 186 "_file_paths": self.ctx_list,
187 187 }
188 188
189 189 def _get_ctx(self, repo, ref):
190 190 return get_ctx(repo, ref)
191 191
192 192 @reraise_safe_exceptions
193 193 def discover_hg_version(self):
194 194 from mercurial import util
195 195 return util.version()
196 196
197 197 @reraise_safe_exceptions
198 198 def is_empty(self, wire):
199 199 repo = self._factory.repo(wire)
200 200
201 201 try:
202 202 return len(repo) == 0
203 203 except Exception:
204 204 log.exception("failed to read object_store")
205 205 return False
206 206
207 207 @reraise_safe_exceptions
208 208 def archive_repo(self, archive_path, mtime, file_info, kind):
209 209 if kind == "tgz":
210 210 archiver = archival.tarit(archive_path, mtime, "gz")
211 211 elif kind == "tbz2":
212 212 archiver = archival.tarit(archive_path, mtime, "bz2")
213 213 elif kind == 'zip':
214 214 archiver = archival.zipit(archive_path, mtime)
215 215 else:
216 216 raise exceptions.ArchiveException()(
217 217 'Remote does not support: "%s".' % kind)
218 218
219 219 for f_path, f_mode, f_is_link, f_content in file_info:
220 220 archiver.addfile(f_path, f_mode, f_is_link, f_content)
221 221 archiver.done()
222 222
223 223 @reraise_safe_exceptions
224 224 def bookmarks(self, wire):
225 225 cache_on, context_uid, repo_id = self._cache_on(wire)
226 226 @self.region.conditional_cache_on_arguments(condition=cache_on)
227 227 def _bookmarks(_context_uid, _repo_id):
228 228 repo = self._factory.repo(wire)
229 229 return dict(repo._bookmarks)
230 230
231 231 return _bookmarks(context_uid, repo_id)
232 232
233 233 @reraise_safe_exceptions
234 234 def branches(self, wire, normal, closed):
235 235 cache_on, context_uid, repo_id = self._cache_on(wire)
236 236 @self.region.conditional_cache_on_arguments(condition=cache_on)
237 237 def _branches(_context_uid, _repo_id, _normal, _closed):
238 238 repo = self._factory.repo(wire)
239 239 iter_branches = repo.branchmap().iterbranches()
240 240 bt = {}
241 241 for branch_name, _heads, tip, is_closed in iter_branches:
242 242 if normal and not is_closed:
243 243 bt[branch_name] = tip
244 244 if closed and is_closed:
245 245 bt[branch_name] = tip
246 246
247 247 return bt
248 248
249 249 return _branches(context_uid, repo_id, normal, closed)
250 250
251 251 @reraise_safe_exceptions
252 252 def bulk_request(self, wire, commit_id, pre_load):
253 253 cache_on, context_uid, repo_id = self._cache_on(wire)
254 254 @self.region.conditional_cache_on_arguments(condition=cache_on)
255 255 def _bulk_request(_repo_id, _commit_id, _pre_load):
256 256 result = {}
257 257 for attr in pre_load:
258 258 try:
259 259 method = self._bulk_methods[attr]
260 260 result[attr] = method(wire, commit_id)
261 261 except KeyError as e:
262 262 raise exceptions.VcsException(e)(
263 263 'Unknown bulk attribute: "%s"' % attr)
264 264 return result
265 265
266 266 return _bulk_request(repo_id, commit_id, sorted(pre_load))
267 267
268 268 @reraise_safe_exceptions
269 269 def ctx_branch(self, wire, commit_id):
270 270 cache_on, context_uid, repo_id = self._cache_on(wire)
271 271 @self.region.conditional_cache_on_arguments(condition=cache_on)
272 272 def _ctx_branch(_repo_id, _commit_id):
273 273 repo = self._factory.repo(wire)
274 274 ctx = self._get_ctx(repo, commit_id)
275 275 return ctx.branch()
276 276 return _ctx_branch(repo_id, commit_id)
277 277
278 278 @reraise_safe_exceptions
279 279 def ctx_date(self, wire, commit_id):
280 280 cache_on, context_uid, repo_id = self._cache_on(wire)
281 281 @self.region.conditional_cache_on_arguments(condition=cache_on)
282 282 def _ctx_date(_repo_id, _commit_id):
283 283 repo = self._factory.repo(wire)
284 284 ctx = self._get_ctx(repo, commit_id)
285 285 return ctx.date()
286 286 return _ctx_date(repo_id, commit_id)
287 287
288 288 @reraise_safe_exceptions
289 289 def ctx_description(self, wire, revision):
290 290 repo = self._factory.repo(wire)
291 291 ctx = self._get_ctx(repo, revision)
292 292 return ctx.description()
293 293
294 294 @reraise_safe_exceptions
295 295 def ctx_files(self, wire, commit_id):
296 296 cache_on, context_uid, repo_id = self._cache_on(wire)
297 297 @self.region.conditional_cache_on_arguments(condition=cache_on)
298 298 def _ctx_files(_repo_id, _commit_id):
299 299 repo = self._factory.repo(wire)
300 300 ctx = self._get_ctx(repo, commit_id)
301 301 return ctx.files()
302 302
303 303 return _ctx_files(repo_id, commit_id)
304 304
305 305 @reraise_safe_exceptions
306 306 def ctx_list(self, path, revision):
307 307 repo = self._factory.repo(path)
308 308 ctx = self._get_ctx(repo, revision)
309 309 return list(ctx)
310 310
311 311 @reraise_safe_exceptions
312 312 def ctx_parents(self, wire, commit_id):
313 313 cache_on, context_uid, repo_id = self._cache_on(wire)
314 314 @self.region.conditional_cache_on_arguments(condition=cache_on)
315 315 def _ctx_parents(_repo_id, _commit_id):
316 316 repo = self._factory.repo(wire)
317 317 ctx = self._get_ctx(repo, commit_id)
318 318 return [parent.hex() for parent in ctx.parents()
319 319 if not (parent.hidden() or parent.obsolete())]
320 320
321 321 return _ctx_parents(repo_id, commit_id)
322 322
323 323 @reraise_safe_exceptions
324 324 def ctx_children(self, wire, commit_id):
325 325 cache_on, context_uid, repo_id = self._cache_on(wire)
326 326 @self.region.conditional_cache_on_arguments(condition=cache_on)
327 327 def _ctx_children(_repo_id, _commit_id):
328 328 repo = self._factory.repo(wire)
329 329 ctx = self._get_ctx(repo, commit_id)
330 330 return [child.hex() for child in ctx.children()
331 331 if not (child.hidden() or child.obsolete())]
332 332
333 333 return _ctx_children(repo_id, commit_id)
334 334
335 335 @reraise_safe_exceptions
336 336 def ctx_phase(self, wire, commit_id):
337 337 cache_on, context_uid, repo_id = self._cache_on(wire)
338 338 @self.region.conditional_cache_on_arguments(condition=cache_on)
339 339 def _ctx_phase(_context_uid, _repo_id, _commit_id):
340 340 repo = self._factory.repo(wire)
341 341 ctx = self._get_ctx(repo, commit_id)
342 342 # public=0, draft=1, secret=3
343 343 return ctx.phase()
344 344 return _ctx_phase(context_uid, repo_id, commit_id)
345 345
346 346 @reraise_safe_exceptions
347 347 def ctx_obsolete(self, wire, commit_id):
348 348 cache_on, context_uid, repo_id = self._cache_on(wire)
349 349 @self.region.conditional_cache_on_arguments(condition=cache_on)
350 350 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
351 351 repo = self._factory.repo(wire)
352 352 ctx = self._get_ctx(repo, commit_id)
353 353 return ctx.obsolete()
354 354 return _ctx_obsolete(context_uid, repo_id, commit_id)
355 355
356 356 @reraise_safe_exceptions
357 357 def ctx_hidden(self, wire, commit_id):
358 358 cache_on, context_uid, repo_id = self._cache_on(wire)
359 359 @self.region.conditional_cache_on_arguments(condition=cache_on)
360 360 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
361 361 repo = self._factory.repo(wire)
362 362 ctx = self._get_ctx(repo, commit_id)
363 363 return ctx.hidden()
364 364 return _ctx_hidden(context_uid, repo_id, commit_id)
365 365
366 366 @reraise_safe_exceptions
367 367 def ctx_substate(self, wire, revision):
368 368 repo = self._factory.repo(wire)
369 369 ctx = self._get_ctx(repo, revision)
370 370 return ctx.substate
371 371
372 372 @reraise_safe_exceptions
373 373 def ctx_status(self, wire, revision):
374 374 repo = self._factory.repo(wire)
375 375 ctx = self._get_ctx(repo, revision)
376 376 status = repo[ctx.p1().node()].status(other=ctx.node())
377 377 # object of status (odd, custom named tuple in mercurial) is not
378 378 # correctly serializable, we make it a list, as the underling
379 379 # API expects this to be a list
380 380 return list(status)
381 381
382 382 @reraise_safe_exceptions
383 383 def ctx_user(self, wire, revision):
384 384 repo = self._factory.repo(wire)
385 385 ctx = self._get_ctx(repo, revision)
386 386 return ctx.user()
387 387
388 388 @reraise_safe_exceptions
389 389 def check_url(self, url, config):
390 390 _proto = None
391 391 if '+' in url[:url.find('://')]:
392 392 _proto = url[0:url.find('+')]
393 393 url = url[url.find('+') + 1:]
394 394 handlers = []
395 395 url_obj = url_parser(url)
396 396 test_uri, authinfo = url_obj.authinfo()
397 397 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
398 398 url_obj.query = obfuscate_qs(url_obj.query)
399 399
400 400 cleaned_uri = str(url_obj)
401 401 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
402 402
403 403 if authinfo:
404 404 # create a password manager
405 405 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
406 406 passmgr.add_password(*authinfo)
407 407
408 408 handlers.extend((httpbasicauthhandler(passmgr),
409 409 httpdigestauthhandler(passmgr)))
410 410
411 411 o = urllib2.build_opener(*handlers)
412 412 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
413 413 ('Accept', 'application/mercurial-0.1')]
414 414
415 415 q = {"cmd": 'between'}
416 416 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
417 417 qs = '?%s' % urllib.urlencode(q)
418 418 cu = "%s%s" % (test_uri, qs)
419 419 req = urllib2.Request(cu, None, {})
420 420
421 421 try:
422 422 log.debug("Trying to open URL %s", cleaned_uri)
423 423 resp = o.open(req)
424 424 if resp.code != 200:
425 425 raise exceptions.URLError()('Return Code is not 200')
426 426 except Exception as e:
427 427 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
428 428 # means it cannot be cloned
429 429 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
430 430
431 431 # now check if it's a proper hg repo, but don't do it for svn
432 432 try:
433 433 if _proto == 'svn':
434 434 pass
435 435 else:
436 436 # check for pure hg repos
437 437 log.debug(
438 438 "Verifying if URL is a Mercurial repository: %s",
439 439 cleaned_uri)
440 440 ui = make_ui_from_config(config)
441 441 peer_checker = makepeer(ui, url)
442 442 peer_checker.lookup('tip')
443 443 except Exception as e:
444 444 log.warning("URL is not a valid Mercurial repository: %s",
445 445 cleaned_uri)
446 446 raise exceptions.URLError(e)(
447 447 "url [%s] does not look like an hg repo org_exc: %s"
448 448 % (cleaned_uri, e))
449 449
450 450 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
451 451 return True
452 452
453 453 @reraise_safe_exceptions
454 454 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
455 455 repo = self._factory.repo(wire)
456 456
457 457 if file_filter:
458 458 match_filter = match(file_filter[0], '', [file_filter[1]])
459 459 else:
460 460 match_filter = file_filter
461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
461 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
462 462
463 463 try:
464 464 return "".join(patch.diff(
465 465 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
466 466 except RepoLookupError as e:
467 467 raise exceptions.LookupException(e)()
468 468
469 469 @reraise_safe_exceptions
470 470 def node_history(self, wire, revision, path, limit):
471 471 cache_on, context_uid, repo_id = self._cache_on(wire)
472 472 @self.region.conditional_cache_on_arguments(condition=cache_on)
473 473 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
474 474 repo = self._factory.repo(wire)
475 475
476 476 ctx = self._get_ctx(repo, revision)
477 477 fctx = ctx.filectx(path)
478 478
479 479 def history_iter():
480 480 limit_rev = fctx.rev()
481 481 for obj in reversed(list(fctx.filelog())):
482 482 obj = fctx.filectx(obj)
483 483 ctx = obj.changectx()
484 484 if ctx.hidden() or ctx.obsolete():
485 485 continue
486 486
487 487 if limit_rev >= obj.rev():
488 488 yield obj
489 489
490 490 history = []
491 491 for cnt, obj in enumerate(history_iter()):
492 492 if limit and cnt >= limit:
493 493 break
494 494 history.append(hex(obj.node()))
495 495
496 496 return [x for x in history]
497 497 return _node_history(context_uid, repo_id, revision, path, limit)
498 498
499 499 @reraise_safe_exceptions
500 500 def node_history_untill(self, wire, revision, path, limit):
501 501 cache_on, context_uid, repo_id = self._cache_on(wire)
502 502 @self.region.conditional_cache_on_arguments(condition=cache_on)
503 503 def _node_history_until(_context_uid, _repo_id):
504 504 repo = self._factory.repo(wire)
505 505 ctx = self._get_ctx(repo, revision)
506 506 fctx = ctx.filectx(path)
507 507
508 508 file_log = list(fctx.filelog())
509 509 if limit:
510 510 # Limit to the last n items
511 511 file_log = file_log[-limit:]
512 512
513 513 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
514 514 return _node_history_until(context_uid, repo_id, revision, path, limit)
515 515
516 516 @reraise_safe_exceptions
517 517 def fctx_annotate(self, wire, revision, path):
518 518 repo = self._factory.repo(wire)
519 519 ctx = self._get_ctx(repo, revision)
520 520 fctx = ctx.filectx(path)
521 521
522 522 result = []
523 523 for i, annotate_obj in enumerate(fctx.annotate(), 1):
524 524 ln_no = i
525 525 sha = hex(annotate_obj.fctx.node())
526 526 content = annotate_obj.text
527 527 result.append((ln_no, sha, content))
528 528 return result
529 529
530 530 @reraise_safe_exceptions
531 531 def fctx_node_data(self, wire, revision, path):
532 532 repo = self._factory.repo(wire)
533 533 ctx = self._get_ctx(repo, revision)
534 534 fctx = ctx.filectx(path)
535 535 return fctx.data()
536 536
537 537 @reraise_safe_exceptions
538 538 def fctx_flags(self, wire, commit_id, path):
539 539 cache_on, context_uid, repo_id = self._cache_on(wire)
540 540 @self.region.conditional_cache_on_arguments(condition=cache_on)
541 541 def _fctx_flags(_repo_id, _commit_id, _path):
542 542 repo = self._factory.repo(wire)
543 543 ctx = self._get_ctx(repo, commit_id)
544 544 fctx = ctx.filectx(path)
545 545 return fctx.flags()
546 546
547 547 return _fctx_flags(repo_id, commit_id, path)
548 548
549 549 @reraise_safe_exceptions
550 550 def fctx_size(self, wire, commit_id, path):
551 551 cache_on, context_uid, repo_id = self._cache_on(wire)
552 552 @self.region.conditional_cache_on_arguments(condition=cache_on)
553 553 def _fctx_size(_repo_id, _revision, _path):
554 554 repo = self._factory.repo(wire)
555 555 ctx = self._get_ctx(repo, commit_id)
556 556 fctx = ctx.filectx(path)
557 557 return fctx.size()
558 558 return _fctx_size(repo_id, commit_id, path)
559 559
560 560 @reraise_safe_exceptions
561 561 def get_all_commit_ids(self, wire, name):
562 562 cache_on, context_uid, repo_id = self._cache_on(wire)
563 563 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 564 def _get_all_commit_ids(_context_uid, _repo_id, _name):
565 565 repo = self._factory.repo(wire)
566 566 repo = repo.filtered(name)
567 567 revs = map(lambda x: hex(x[7]), repo.changelog.index)
568 568 return revs
569 569 return _get_all_commit_ids(context_uid, repo_id, name)
570 570
571 571 @reraise_safe_exceptions
572 572 def get_config_value(self, wire, section, name, untrusted=False):
573 573 repo = self._factory.repo(wire)
574 574 return repo.ui.config(section, name, untrusted=untrusted)
575 575
576 576 @reraise_safe_exceptions
577 577 def is_large_file(self, wire, commit_id, path):
578 578 cache_on, context_uid, repo_id = self._cache_on(wire)
579 579 @self.region.conditional_cache_on_arguments(condition=cache_on)
580 580 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
581 581 return largefiles.lfutil.isstandin(path)
582 582
583 583 return _is_large_file(context_uid, repo_id, commit_id, path)
584 584
585 585 @reraise_safe_exceptions
586 586 def is_binary(self, wire, revision, path):
587 587 cache_on, context_uid, repo_id = self._cache_on(wire)
588 588
589 589 @self.region.conditional_cache_on_arguments(condition=cache_on)
590 590 def _is_binary(_repo_id, _sha, _path):
591 591 repo = self._factory.repo(wire)
592 592 ctx = self._get_ctx(repo, revision)
593 593 fctx = ctx.filectx(path)
594 594 return fctx.isbinary()
595 595
596 596 return _is_binary(repo_id, revision, path)
597 597
598 598 @reraise_safe_exceptions
599 599 def in_largefiles_store(self, wire, sha):
600 600 repo = self._factory.repo(wire)
601 601 return largefiles.lfutil.instore(repo, sha)
602 602
603 603 @reraise_safe_exceptions
604 604 def in_user_cache(self, wire, sha):
605 605 repo = self._factory.repo(wire)
606 606 return largefiles.lfutil.inusercache(repo.ui, sha)
607 607
608 608 @reraise_safe_exceptions
609 609 def store_path(self, wire, sha):
610 610 repo = self._factory.repo(wire)
611 611 return largefiles.lfutil.storepath(repo, sha)
612 612
613 613 @reraise_safe_exceptions
614 614 def link(self, wire, sha, path):
615 615 repo = self._factory.repo(wire)
616 616 largefiles.lfutil.link(
617 617 largefiles.lfutil.usercachepath(repo.ui, sha), path)
618 618
619 619 @reraise_safe_exceptions
620 620 def localrepository(self, wire, create=False):
621 621 self._factory.repo(wire, create=create)
622 622
623 623 @reraise_safe_exceptions
624 624 def lookup(self, wire, revision, both):
625 625 cache_on, context_uid, repo_id = self._cache_on(wire)
626 626 @self.region.conditional_cache_on_arguments(condition=cache_on)
627 627 def _lookup(_context_uid, _repo_id, _revision, _both):
628 628
629 629 repo = self._factory.repo(wire)
630 630 rev = _revision
631 631 if isinstance(rev, int):
632 632 # NOTE(marcink):
633 633 # since Mercurial doesn't support negative indexes properly
634 634 # we need to shift accordingly by one to get proper index, e.g
635 635 # repo[-1] => repo[-2]
636 636 # repo[0] => repo[-1]
637 637 if rev <= 0:
638 638 rev = rev + -1
639 639 try:
640 640 ctx = self._get_ctx(repo, rev)
641 641 except (TypeError, RepoLookupError) as e:
642 642 e._org_exc_tb = traceback.format_exc()
643 643 raise exceptions.LookupException(e)(rev)
644 644 except LookupError as e:
645 645 e._org_exc_tb = traceback.format_exc()
646 646 raise exceptions.LookupException(e)(e.name)
647 647
648 648 if not both:
649 649 return ctx.hex()
650 650
651 651 ctx = repo[ctx.hex()]
652 652 return ctx.hex(), ctx.rev()
653 653
654 654 return _lookup(context_uid, repo_id, revision, both)
655 655
656 656 @reraise_safe_exceptions
657 657 def sync_push(self, wire, url):
658 658 if not self.check_url(url, wire['config']):
659 659 return
660 660
661 661 repo = self._factory.repo(wire)
662 662
663 663 # Disable any prompts for this repo
664 664 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
665 665
666 666 bookmarks = dict(repo._bookmarks).keys()
667 667 remote = peer(repo, {}, url)
668 668 # Disable any prompts for this remote
669 669 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
670 670
671 671 return exchange.push(
672 672 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
673 673
674 674 @reraise_safe_exceptions
675 675 def revision(self, wire, rev):
676 676 repo = self._factory.repo(wire)
677 677 ctx = self._get_ctx(repo, rev)
678 678 return ctx.rev()
679 679
680 680 @reraise_safe_exceptions
681 681 def rev_range(self, wire, commit_filter):
682 682 cache_on, context_uid, repo_id = self._cache_on(wire)
683 683
684 684 @self.region.conditional_cache_on_arguments(condition=cache_on)
685 685 def _rev_range(_context_uid, _repo_id, _filter):
686 686 repo = self._factory.repo(wire)
687 687 revisions = [rev for rev in revrange(repo, commit_filter)]
688 688 return revisions
689 689
690 690 return _rev_range(context_uid, repo_id, sorted(commit_filter))
691 691
692 692 @reraise_safe_exceptions
693 693 def rev_range_hash(self, wire, node):
694 694 repo = self._factory.repo(wire)
695 695
696 696 def get_revs(repo, rev_opt):
697 697 if rev_opt:
698 698 revs = revrange(repo, rev_opt)
699 699 if len(revs) == 0:
700 700 return (nullrev, nullrev)
701 701 return max(revs), min(revs)
702 702 else:
703 703 return len(repo) - 1, 0
704 704
705 705 stop, start = get_revs(repo, [node + ':'])
706 706 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
707 707 return revs
708 708
709 709 @reraise_safe_exceptions
710 710 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
711 711 other_path = kwargs.pop('other_path', None)
712 712
713 713 # case when we want to compare two independent repositories
714 714 if other_path and other_path != wire["path"]:
715 715 baseui = self._factory._create_config(wire["config"])
716 716 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
717 717 else:
718 718 repo = self._factory.repo(wire)
719 719 return list(repo.revs(rev_spec, *args))
720 720
721 721 @reraise_safe_exceptions
722 722 def verify(self, wire,):
723 723 repo = self._factory.repo(wire)
724 724 baseui = self._factory._create_config(wire['config'])
725 725
726 726 baseui, output = patch_ui_message_output(baseui)
727 727
728 728 repo.ui = baseui
729 729 verify.verify(repo)
730 730 return output.getvalue()
731 731
732 732 @reraise_safe_exceptions
733 733 def hg_update_cache(self, wire,):
734 734 repo = self._factory.repo(wire)
735 735 baseui = self._factory._create_config(wire['config'])
736 736 baseui, output = patch_ui_message_output(baseui)
737 737
738 738 repo.ui = baseui
739 739 with repo.wlock(), repo.lock():
740 740 repo.updatecaches(full=True)
741 741
742 742 return output.getvalue()
743 743
744 744 @reraise_safe_exceptions
745 745 def hg_rebuild_fn_cache(self, wire,):
746 746 repo = self._factory.repo(wire)
747 747 baseui = self._factory._create_config(wire['config'])
748 748 baseui, output = patch_ui_message_output(baseui)
749 749
750 750 repo.ui = baseui
751 751
752 752 repair.rebuildfncache(baseui, repo)
753 753
754 754 return output.getvalue()
755 755
756 756 @reraise_safe_exceptions
757 757 def tags(self, wire):
758 758 cache_on, context_uid, repo_id = self._cache_on(wire)
759 759 @self.region.conditional_cache_on_arguments(condition=cache_on)
760 760 def _tags(_context_uid, _repo_id):
761 761 repo = self._factory.repo(wire)
762 762 return repo.tags()
763 763
764 764 return _tags(context_uid, repo_id)
765 765
766 766 @reraise_safe_exceptions
767 767 def update(self, wire, node=None, clean=False):
768 768 repo = self._factory.repo(wire)
769 769 baseui = self._factory._create_config(wire['config'])
770 770 commands.update(baseui, repo, node=node, clean=clean)
771 771
772 772 @reraise_safe_exceptions
773 773 def identify(self, wire):
774 774 repo = self._factory.repo(wire)
775 775 baseui = self._factory._create_config(wire['config'])
776 776 output = io.BytesIO()
777 777 baseui.write = output.write
778 778 # This is required to get a full node id
779 779 baseui.debugflag = True
780 780 commands.identify(baseui, repo, id=True)
781 781
782 782 return output.getvalue()
783 783
784 784 @reraise_safe_exceptions
785 785 def heads(self, wire, branch=None):
786 786 repo = self._factory.repo(wire)
787 787 baseui = self._factory._create_config(wire['config'])
788 788 output = io.BytesIO()
789 789
790 790 def write(data, **unused_kwargs):
791 791 output.write(data)
792 792
793 793 baseui.write = write
794 794 if branch:
795 795 args = [branch]
796 796 else:
797 797 args = []
798 798 commands.heads(baseui, repo, template='{node} ', *args)
799 799
800 800 return output.getvalue()
801 801
802 802 @reraise_safe_exceptions
803 803 def ancestor(self, wire, revision1, revision2):
804 804 repo = self._factory.repo(wire)
805 805 changelog = repo.changelog
806 806 lookup = repo.lookup
807 807 a = changelog.ancestor(lookup(revision1), lookup(revision2))
808 808 return hex(a)
809 809
810 810 @reraise_safe_exceptions
811 811 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
812 812 baseui = self._factory._create_config(wire["config"], hooks=hooks)
813 813 clone(baseui, source, dest, noupdate=not update_after_clone)
814 814
815 815 @reraise_safe_exceptions
816 816 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
817 817
818 818 repo = self._factory.repo(wire)
819 819 baseui = self._factory._create_config(wire['config'])
820 820 publishing = baseui.configbool('phases', 'publish')
821 821 if publishing:
822 822 new_commit = 'public'
823 823 else:
824 824 new_commit = 'draft'
825 825
826 826 def _filectxfn(_repo, ctx, path):
827 827 """
828 828 Marks given path as added/changed/removed in a given _repo. This is
829 829 for internal mercurial commit function.
830 830 """
831 831
832 832 # check if this path is removed
833 833 if path in removed:
834 834 # returning None is a way to mark node for removal
835 835 return None
836 836
837 837 # check if this path is added
838 838 for node in updated:
839 839 if node['path'] == path:
840 840 return memfilectx(
841 841 _repo,
842 842 changectx=ctx,
843 843 path=node['path'],
844 844 data=node['content'],
845 845 islink=False,
846 846 isexec=bool(node['mode'] & stat.S_IXUSR),
847 847 copysource=False)
848 848
849 849 raise exceptions.AbortException()(
850 850 "Given path haven't been marked as added, "
851 851 "changed or removed (%s)" % path)
852 852
853 853 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
854 854
855 855 commit_ctx = memctx(
856 856 repo=repo,
857 857 parents=parents,
858 858 text=message,
859 859 files=files,
860 860 filectxfn=_filectxfn,
861 861 user=user,
862 862 date=(commit_time, commit_timezone),
863 863 extra=extra)
864 864
865 865 n = repo.commitctx(commit_ctx)
866 866 new_id = hex(n)
867 867
868 868 return new_id
869 869
870 870 @reraise_safe_exceptions
871 871 def pull(self, wire, url, commit_ids=None):
872 872 repo = self._factory.repo(wire)
873 873 # Disable any prompts for this repo
874 874 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
875 875
876 876 remote = peer(repo, {}, url)
877 877 # Disable any prompts for this remote
878 878 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
879 879
880 880 if commit_ids:
881 881 commit_ids = [bin(commit_id) for commit_id in commit_ids]
882 882
883 883 return exchange.pull(
884 884 repo, remote, heads=commit_ids, force=None).cgresult
885 885
886 886 @reraise_safe_exceptions
887 887 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
888 888 repo = self._factory.repo(wire)
889 889 baseui = self._factory._create_config(wire['config'], hooks=hooks)
890 890
891 891 # Mercurial internally has a lot of logic that checks ONLY if
892 892 # option is defined, we just pass those if they are defined then
893 893 opts = {}
894 894 if bookmark:
895 895 opts['bookmark'] = bookmark
896 896 if branch:
897 897 opts['branch'] = branch
898 898 if revision:
899 899 opts['rev'] = revision
900 900
901 901 commands.pull(baseui, repo, source, **opts)
902 902
903 903 @reraise_safe_exceptions
904 904 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
905 905 repo = self._factory.repo(wire)
906 906 baseui = self._factory._create_config(wire['config'], hooks=hooks)
907 907 commands.push(baseui, repo, dest=dest_path, rev=revisions,
908 908 new_branch=push_branches)
909 909
910 910 @reraise_safe_exceptions
911 911 def strip(self, wire, revision, update, backup):
912 912 repo = self._factory.repo(wire)
913 913 ctx = self._get_ctx(repo, revision)
914 914 hgext_strip(
915 915 repo.baseui, repo, ctx.node(), update=update, backup=backup)
916 916
917 917 @reraise_safe_exceptions
918 918 def get_unresolved_files(self, wire):
919 919 repo = self._factory.repo(wire)
920 920
921 921 log.debug('Calculating unresolved files for repo: %s', repo)
922 922 output = io.BytesIO()
923 923
924 924 def write(data, **unused_kwargs):
925 925 output.write(data)
926 926
927 927 baseui = self._factory._create_config(wire['config'])
928 928 baseui.write = write
929 929
930 930 commands.resolve(baseui, repo, list=True)
931 931 unresolved = output.getvalue().splitlines(0)
932 932 return unresolved
933 933
934 934 @reraise_safe_exceptions
935 935 def merge(self, wire, revision):
936 936 repo = self._factory.repo(wire)
937 937 baseui = self._factory._create_config(wire['config'])
938 938 repo.ui.setconfig('ui', 'merge', 'internal:dump')
939 939
940 940 # In case of sub repositories are used mercurial prompts the user in
941 941 # case of merge conflicts or different sub repository sources. By
942 942 # setting the interactive flag to `False` mercurial doesn't prompt the
943 943 # used but instead uses a default value.
944 944 repo.ui.setconfig('ui', 'interactive', False)
945 945 commands.merge(baseui, repo, rev=revision)
946 946
947 947 @reraise_safe_exceptions
948 948 def merge_state(self, wire):
949 949 repo = self._factory.repo(wire)
950 950 repo.ui.setconfig('ui', 'merge', 'internal:dump')
951 951
952 952 # In case of sub repositories are used mercurial prompts the user in
953 953 # case of merge conflicts or different sub repository sources. By
954 954 # setting the interactive flag to `False` mercurial doesn't prompt the
955 955 # used but instead uses a default value.
956 956 repo.ui.setconfig('ui', 'interactive', False)
957 957 ms = hg_merge.mergestate(repo)
958 958 return [x for x in ms.unresolved()]
959 959
960 960 @reraise_safe_exceptions
961 961 def commit(self, wire, message, username, close_branch=False):
962 962 repo = self._factory.repo(wire)
963 963 baseui = self._factory._create_config(wire['config'])
964 964 repo.ui.setconfig('ui', 'username', username)
965 965 commands.commit(baseui, repo, message=message, close_branch=close_branch)
966 966
967 967 @reraise_safe_exceptions
968 968 def rebase(self, wire, source=None, dest=None, abort=False):
969 969 repo = self._factory.repo(wire)
970 970 baseui = self._factory._create_config(wire['config'])
971 971 repo.ui.setconfig('ui', 'merge', 'internal:dump')
972 972 # In case of sub repositories are used mercurial prompts the user in
973 973 # case of merge conflicts or different sub repository sources. By
974 974 # setting the interactive flag to `False` mercurial doesn't prompt the
975 975 # used but instead uses a default value.
976 976 repo.ui.setconfig('ui', 'interactive', False)
977 977 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
978 978
979 979 @reraise_safe_exceptions
980 980 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
981 981 repo = self._factory.repo(wire)
982 982 ctx = self._get_ctx(repo, revision)
983 983 node = ctx.node()
984 984
985 985 date = (tag_time, tag_timezone)
986 986 try:
987 987 hg_tag.tag(repo, name, node, message, local, user, date)
988 988 except Abort as e:
989 989 log.exception("Tag operation aborted")
990 990 # Exception can contain unicode which we convert
991 991 raise exceptions.AbortException(e)(repr(e))
992 992
993 993 @reraise_safe_exceptions
994 994 def bookmark(self, wire, bookmark, revision=None):
995 995 repo = self._factory.repo(wire)
996 996 baseui = self._factory._create_config(wire['config'])
997 997 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
998 998
999 999 @reraise_safe_exceptions
1000 1000 def install_hooks(self, wire, force=False):
1001 1001 # we don't need any special hooks for Mercurial
1002 1002 pass
1003 1003
1004 1004 @reraise_safe_exceptions
1005 1005 def get_hooks_info(self, wire):
1006 1006 return {
1007 1007 'pre_version': vcsserver.__version__,
1008 1008 'post_version': vcsserver.__version__,
1009 1009 }
@@ -1,79 +1,79 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Mercurial libs compatibility
20 20 """
21 21
22 22 import mercurial
23 23 from mercurial import demandimport
24 24 # patch demandimport, due to bug in mercurial when it always triggers
25 25 # demandimport.enable()
26 26 demandimport.enable = lambda *args, **kwargs: 1
27 27
28 28 from mercurial import ui
29 29 from mercurial import patch
30 30 from mercurial import config
31 31 from mercurial import extensions
32 32 from mercurial import scmutil
33 33 from mercurial import archival
34 34 from mercurial import discovery
35 35 from mercurial import unionrepo
36 36 from mercurial import localrepo
37 37 from mercurial import merge as hg_merge
38 38 from mercurial import subrepo
39 39 from mercurial import subrepoutil
40 40 from mercurial import tags as hg_tag
41 41
42 42 from mercurial.commands import clone, nullid, pull
43 43 from mercurial.context import memctx, memfilectx
44 44 from mercurial.error import (
45 45 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
46 46 RequirementError, ProgrammingError)
47 47 from mercurial.hgweb import hgweb_mod
48 48 from mercurial.localrepo import instance
49 49 from mercurial.match import match
50 50 from mercurial.mdiff import diffopts
51 51 from mercurial.node import bin, hex
52 52 from mercurial.encoding import tolocal
53 53 from mercurial.discovery import findcommonoutgoing
54 54 from mercurial.hg import peer
55 55 from mercurial.httppeer import makepeer
56 56 from mercurial.util import url as hg_url
57 57 from mercurial.scmutil import revrange, revsymbol
58 58 from mercurial.node import nullrev
59 59 from mercurial import exchange
60 60 from hgext import largefiles
61 61
62 62 # those authnadlers are patched for python 2.6.5 bug an
63 63 # infinit looping when given invalid resources
64 64 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
65 65
66 66
67 67 def get_ctx(repo, ref):
68 68 try:
69 69 ctx = repo[ref]
70 70 except ProgrammingError:
71 71 # we're unable to find the rev using a regular lookup, we fallback
72 72 # to slower, but backward compat revsymbol usage
73 73 ctx = revsymbol(repo, ref)
74 74 except (LookupError, RepoLookupError):
75 75 # Similar case as above but only for refs that are not numeric
76 76 if isinstance(ref, (int, long)):
77 77 raise
78 78 ctx = revsymbol(repo, ref)
79 79 return ctx
@@ -1,134 +1,134 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Adjustments to Mercurial
20 20
21 21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 22 be applied without having to import the whole Mercurial machinery.
23 23
24 24 Imports are function local, so that just importing this module does not cause
25 25 side-effects other than these functions being defined.
26 26 """
27 27
28 28 import logging
29 29
30 30
31 31 def patch_largefiles_capabilities():
32 32 """
33 33 Patches the capabilities function in the largefiles extension.
34 34 """
35 35 from vcsserver import hgcompat
36 36 lfproto = hgcompat.largefiles.proto
37 37 wrapper = _dynamic_capabilities_wrapper(
38 38 lfproto, hgcompat.extensions.extensions)
39 39 lfproto._capabilities = wrapper
40 40
41 41
42 42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43 43
44 44 wrapped_capabilities = lfproto._capabilities
45 45 logger = logging.getLogger('vcsserver.hg')
46 46
47 47 def _dynamic_capabilities(orig, repo, proto):
48 48 """
49 49 Adds dynamic behavior, so that the capability is only added if the
50 50 extension is enabled in the current ui object.
51 51 """
52 52 if 'largefiles' in dict(extensions(repo.ui)):
53 53 logger.debug('Extension largefiles enabled')
54 54 calc_capabilities = wrapped_capabilities
55 55 return calc_capabilities(orig, repo, proto)
56 56 else:
57 57 logger.debug('Extension largefiles disabled')
58 58 return orig(repo, proto)
59 59
60 60 return _dynamic_capabilities
61 61
62 62
63 63 def patch_subrepo_type_mapping():
64 64 from collections import defaultdict
65 65 from hgcompat import subrepo, subrepoutil
66 66 from vcsserver.exceptions import SubrepoMergeException
67 67
68 68 class NoOpSubrepo(subrepo.abstractsubrepo):
69 69
70 70 def __init__(self, ctx, path, *args, **kwargs):
71 71 """Initialize abstractsubrepo part
72 72
73 73 ``ctx`` is the context referring this subrepository in the
74 74 parent repository.
75 75
76 76 ``path`` is the path to this subrepository as seen from
77 77 innermost repository.
78 78 """
79 79 self.ui = ctx.repo().ui
80 80 self._ctx = ctx
81 81 self._path = path
82 82
83 83 def storeclean(self, path):
84 84 """
85 85 returns true if the repository has not changed since it was last
86 86 cloned from or pushed to a given repository.
87 87 """
88 88 return True
89 89
90 90 def dirty(self, ignoreupdate=False, missing=False):
91 91 """returns true if the dirstate of the subrepo is dirty or does not
92 92 match current stored state. If ignoreupdate is true, only check
93 93 whether the subrepo has uncommitted changes in its dirstate.
94 94 """
95 95 return False
96 96
97 97 def basestate(self):
98 98 """current working directory base state, disregarding .hgsubstate
99 99 state and working directory modifications"""
100 100 substate = subrepoutil.state(self._ctx, self.ui)
101 101 file_system_path, rev, repotype = substate.get(self._path)
102 102 return rev
103 103
104 104 def remove(self):
105 105 """remove the subrepo
106 106
107 107 (should verify the dirstate is not dirty first)
108 108 """
109 109 pass
110 110
111 111 def get(self, state, overwrite=False):
112 112 """run whatever commands are needed to put the subrepo into
113 113 this state
114 114 """
115 115 pass
116 116
117 117 def merge(self, state):
118 118 """merge currently-saved state with the new state."""
119 119 raise SubrepoMergeException()()
120 120
121 121 def push(self, opts):
122 122 """perform whatever action is analogous to 'hg push'
123 123
124 124 This may be a no-op on some systems.
125 125 """
126 126 pass
127 127
128 128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 129 # whenever a subrepo class is looked up.
130 130 subrepo.types = {
131 131 'hg': NoOpSubrepo,
132 132 'git': NoOpSubrepo,
133 133 'svn': NoOpSubrepo
134 134 }
@@ -1,205 +1,205 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import re
21 21 import os
22 22 import sys
23 23 import datetime
24 24 import logging
25 25 import pkg_resources
26 26
27 27 import vcsserver
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31
32 32 def get_git_hooks_path(repo_path, bare):
33 33 hooks_path = os.path.join(repo_path, 'hooks')
34 34 if not bare:
35 35 hooks_path = os.path.join(repo_path, '.git', 'hooks')
36 36
37 37 return hooks_path
38 38
39 39
40 40 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
41 41 """
42 42 Creates a RhodeCode hook inside a git repository
43 43
44 44 :param repo_path: path to repository
45 45 :param executable: binary executable to put in the hooks
46 46 :param force_create: Create even if same name hook exists
47 47 """
48 48 executable = executable or sys.executable
49 49 hooks_path = get_git_hooks_path(repo_path, bare)
50 50
51 51 if not os.path.isdir(hooks_path):
52 52 os.makedirs(hooks_path, mode=0o777)
53 53
54 54 tmpl_post = pkg_resources.resource_string(
55 55 'vcsserver', '/'.join(
56 56 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
57 57 tmpl_pre = pkg_resources.resource_string(
58 58 'vcsserver', '/'.join(
59 59 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
60 60
61 61 path = '' # not used for now
62 62 timestamp = datetime.datetime.utcnow().isoformat()
63 63
64 64 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
65 65 log.debug('Installing git hook in repo %s', repo_path)
66 66 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
67 67 _rhodecode_hook = check_rhodecode_hook(_hook_file)
68 68
69 69 if _rhodecode_hook or force_create:
70 70 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
71 71 try:
72 72 with open(_hook_file, 'wb') as f:
73 73 template = template.replace(
74 74 '_TMPL_', vcsserver.__version__)
75 75 template = template.replace('_DATE_', timestamp)
76 76 template = template.replace('_ENV_', executable)
77 77 template = template.replace('_PATH_', path)
78 78 f.write(template)
79 79 os.chmod(_hook_file, 0o755)
80 80 except IOError:
81 81 log.exception('error writing hook file %s', _hook_file)
82 82 else:
83 83 log.debug('skipping writing hook file')
84 84
85 85 return True
86 86
87 87
88 88 def get_svn_hooks_path(repo_path):
89 89 hooks_path = os.path.join(repo_path, 'hooks')
90 90
91 91 return hooks_path
92 92
93 93
94 94 def install_svn_hooks(repo_path, executable=None, force_create=False):
95 95 """
96 96 Creates RhodeCode hooks inside a svn repository
97 97
98 98 :param repo_path: path to repository
99 99 :param executable: binary executable to put in the hooks
100 100 :param force_create: Create even if same name hook exists
101 101 """
102 102 executable = executable or sys.executable
103 103 hooks_path = get_svn_hooks_path(repo_path)
104 104 if not os.path.isdir(hooks_path):
105 105 os.makedirs(hooks_path, mode=0o777)
106 106
107 107 tmpl_post = pkg_resources.resource_string(
108 108 'vcsserver', '/'.join(
109 109 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
110 110 tmpl_pre = pkg_resources.resource_string(
111 111 'vcsserver', '/'.join(
112 112 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
113 113
114 114 path = '' # not used for now
115 115 timestamp = datetime.datetime.utcnow().isoformat()
116 116
117 117 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
118 118 log.debug('Installing svn hook in repo %s', repo_path)
119 119 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
120 120 _rhodecode_hook = check_rhodecode_hook(_hook_file)
121 121
122 122 if _rhodecode_hook or force_create:
123 123 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
124 124
125 125 try:
126 126 with open(_hook_file, 'wb') as f:
127 127 template = template.replace(
128 128 '_TMPL_', vcsserver.__version__)
129 129 template = template.replace('_DATE_', timestamp)
130 130 template = template.replace('_ENV_', executable)
131 131 template = template.replace('_PATH_', path)
132 132
133 133 f.write(template)
134 134 os.chmod(_hook_file, 0o755)
135 135 except IOError:
136 136 log.exception('error writing hook file %s', _hook_file)
137 137 else:
138 138 log.debug('skipping writing hook file')
139 139
140 140 return True
141 141
142 142
143 143 def get_version_from_hook(hook_path):
144 144 version = ''
145 145 hook_content = read_hook_content(hook_path)
146 146 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
147 147 if matches:
148 148 try:
149 149 version = matches.groups()[0]
150 150 log.debug('got version %s from hooks.', version)
151 151 except Exception:
152 152 log.exception("Exception while reading the hook version.")
153 153 return version.replace("'", "")
154 154
155 155
156 156 def check_rhodecode_hook(hook_path):
157 157 """
158 158 Check if the hook was created by RhodeCode
159 159 """
160 160 if not os.path.exists(hook_path):
161 161 return True
162 162
163 163 log.debug('hook exists, checking if it is from RhodeCode')
164 164
165 165 version = get_version_from_hook(hook_path)
166 166 if version:
167 167 return True
168 168
169 169 return False
170 170
171 171
172 172 def read_hook_content(hook_path):
173 173 content = ''
174 174 if os.path.isfile(hook_path):
175 175 with open(hook_path, 'rb') as f:
176 176 content = f.read()
177 177 return content
178 178
179 179
180 180 def get_git_pre_hook_version(repo_path, bare):
181 181 hooks_path = get_git_hooks_path(repo_path, bare)
182 182 _hook_file = os.path.join(hooks_path, 'pre-receive')
183 183 version = get_version_from_hook(_hook_file)
184 184 return version
185 185
186 186
187 187 def get_git_post_hook_version(repo_path, bare):
188 188 hooks_path = get_git_hooks_path(repo_path, bare)
189 189 _hook_file = os.path.join(hooks_path, 'post-receive')
190 190 version = get_version_from_hook(_hook_file)
191 191 return version
192 192
193 193
194 194 def get_svn_pre_hook_version(repo_path):
195 195 hooks_path = get_svn_hooks_path(repo_path)
196 196 _hook_file = os.path.join(hooks_path, 'pre-commit')
197 197 version = get_version_from_hook(_hook_file)
198 198 return version
199 199
200 200
201 201 def get_svn_post_hook_version(repo_path):
202 202 hooks_path = get_svn_hooks_path(repo_path)
203 203 _hook_file = os.path.join(hooks_path, 'post-commit')
204 204 version = get_version_from_hook(_hook_file)
205 205 return version
@@ -1,729 +1,729 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 49 try:
50 50 connection.request('POST', '/', body)
51 51 except Exception:
52 52 log.error('Connection failed on %s', connection)
53 53 raise
54 54 response = connection.getresponse()
55 55
56 56 response_data = response.read()
57 57
58 58 try:
59 59 return json.loads(response_data)
60 60 except Exception:
61 61 log.exception('Failed to decode hook response json data. '
62 62 'response_code:%s, raw_data:%s',
63 63 response.status, response_data)
64 64 raise
65 65
66 66 def _serialize(self, hook_name, extras):
67 67 data = {
68 68 'method': hook_name,
69 69 'extras': extras
70 70 }
71 71 return json.dumps(data)
72 72
73 73
74 74 class HooksDummyClient(object):
75 75 def __init__(self, hooks_module):
76 76 self._hooks_module = importlib.import_module(hooks_module)
77 77
78 78 def __call__(self, hook_name, extras):
79 79 with self._hooks_module.Hooks() as hooks:
80 80 return getattr(hooks, hook_name)(extras)
81 81
82 82
83 83 class HooksShadowRepoClient(object):
84 84
85 85 def __call__(self, hook_name, extras):
86 86 return {'output': '', 'status': 0}
87 87
88 88
89 89 class RemoteMessageWriter(object):
90 90 """Writer base class."""
91 91 def write(self, message):
92 92 raise NotImplementedError()
93 93
94 94
95 95 class HgMessageWriter(RemoteMessageWriter):
96 96 """Writer that knows how to send messages to mercurial clients."""
97 97
98 98 def __init__(self, ui):
99 99 self.ui = ui
100 100
101 101 def write(self, message):
102 102 # TODO: Check why the quiet flag is set by default.
103 103 old = self.ui.quiet
104 104 self.ui.quiet = False
105 105 self.ui.status(message.encode('utf-8'))
106 106 self.ui.quiet = old
107 107
108 108
109 109 class GitMessageWriter(RemoteMessageWriter):
110 110 """Writer that knows how to send messages to git clients."""
111 111
112 112 def __init__(self, stdout=None):
113 113 self.stdout = stdout or sys.stdout
114 114
115 115 def write(self, message):
116 116 self.stdout.write(message.encode('utf-8'))
117 117
118 118
119 119 class SvnMessageWriter(RemoteMessageWriter):
120 120 """Writer that knows how to send messages to svn clients."""
121 121
122 122 def __init__(self, stderr=None):
123 123 # SVN needs data sent to stderr for back-to-client messaging
124 124 self.stderr = stderr or sys.stderr
125 125
126 126 def write(self, message):
127 127 self.stderr.write(message.encode('utf-8'))
128 128
129 129
130 130 def _handle_exception(result):
131 131 exception_class = result.get('exception')
132 132 exception_traceback = result.get('exception_traceback')
133 133
134 134 if exception_traceback:
135 135 log.error('Got traceback from remote call:%s', exception_traceback)
136 136
137 137 if exception_class == 'HTTPLockedRC':
138 138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 139 elif exception_class == 'HTTPBranchProtected':
140 140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 141 elif exception_class == 'RepositoryError':
142 142 raise exceptions.VcsException()(*result['exception_args'])
143 143 elif exception_class:
144 144 raise Exception('Got remote exception "%s" with args "%s"' %
145 145 (exception_class, result['exception_args']))
146 146
147 147
148 148 def _get_hooks_client(extras):
149 149 hooks_uri = extras.get('hooks_uri')
150 150 is_shadow_repo = extras.get('is_shadow_repo')
151 151 if hooks_uri:
152 152 return HooksHttpClient(extras['hooks_uri'])
153 153 elif is_shadow_repo:
154 154 return HooksShadowRepoClient()
155 155 else:
156 156 return HooksDummyClient(extras['hooks_module'])
157 157
158 158
159 159 def _call_hook(hook_name, extras, writer):
160 160 hooks_client = _get_hooks_client(extras)
161 161 log.debug('Hooks, using client:%s', hooks_client)
162 162 result = hooks_client(hook_name, extras)
163 163 log.debug('Hooks got result: %s', result)
164 164
165 165 _handle_exception(result)
166 166 writer.write(result['output'])
167 167
168 168 return result['status']
169 169
170 170
171 171 def _extras_from_ui(ui):
172 172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 173 if not hook_data:
174 174 # maybe it's inside environ ?
175 175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 176 if env_hook_data:
177 177 hook_data = env_hook_data
178 178
179 179 extras = {}
180 180 if hook_data:
181 181 extras = json.loads(hook_data)
182 182 return extras
183 183
184 184
185 185 def _rev_range_hash(repo, node, check_heads=False):
186 186 from vcsserver.hgcompat import get_ctx
187 187
188 188 commits = []
189 189 revs = []
190 190 start = get_ctx(repo, node).rev()
191 191 end = len(repo)
192 192 for rev in range(start, end):
193 193 revs.append(rev)
194 194 ctx = get_ctx(repo, rev)
195 195 commit_id = mercurial.node.hex(ctx.node())
196 196 branch = ctx.branch()
197 197 commits.append((commit_id, branch))
198 198
199 199 parent_heads = []
200 200 if check_heads:
201 201 parent_heads = _check_heads(repo, start, end, revs)
202 202 return commits, parent_heads
203 203
204 204
205 205 def _check_heads(repo, start, end, commits):
206 206 from vcsserver.hgcompat import get_ctx
207 207 changelog = repo.changelog
208 208 parents = set()
209 209
210 210 for new_rev in commits:
211 211 for p in changelog.parentrevs(new_rev):
212 212 if p == mercurial.node.nullrev:
213 213 continue
214 214 if p < start:
215 215 parents.add(p)
216 216
217 217 for p in parents:
218 218 branch = get_ctx(repo, p).branch()
219 219 # The heads descending from that parent, on the same branch
220 220 parent_heads = set([p])
221 221 reachable = set([p])
222 222 for x in xrange(p + 1, end):
223 223 if get_ctx(repo, x).branch() != branch:
224 224 continue
225 225 for pp in changelog.parentrevs(x):
226 226 if pp in reachable:
227 227 reachable.add(x)
228 228 parent_heads.discard(pp)
229 229 parent_heads.add(x)
230 230 # More than one head? Suggest merging
231 231 if len(parent_heads) > 1:
232 232 return list(parent_heads)
233 233
234 234 return []
235 235
236 236
237 237 def _get_git_env():
238 238 env = {}
239 239 for k, v in os.environ.items():
240 240 if k.startswith('GIT'):
241 241 env[k] = v
242 242
243 243 # serialized version
244 244 return [(k, v) for k, v in env.items()]
245 245
246 246
247 247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 248 env = {}
249 249 for k, v in os.environ.items():
250 250 if k.startswith('HG'):
251 251 env[k] = v
252 252
253 253 env['HG_NODE'] = old_rev
254 254 env['HG_NODE_LAST'] = new_rev
255 255 env['HG_TXNID'] = txnid
256 256 env['HG_PENDING'] = repo_path
257 257
258 258 return [(k, v) for k, v in env.items()]
259 259
260 260
261 261 def repo_size(ui, repo, **kwargs):
262 262 extras = _extras_from_ui(ui)
263 263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264 264
265 265
266 266 def pre_pull(ui, repo, **kwargs):
267 267 extras = _extras_from_ui(ui)
268 268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269 269
270 270
271 271 def pre_pull_ssh(ui, repo, **kwargs):
272 272 extras = _extras_from_ui(ui)
273 273 if extras and extras.get('SSH'):
274 274 return pre_pull(ui, repo, **kwargs)
275 275 return 0
276 276
277 277
278 278 def post_pull(ui, repo, **kwargs):
279 279 extras = _extras_from_ui(ui)
280 280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281 281
282 282
283 283 def post_pull_ssh(ui, repo, **kwargs):
284 284 extras = _extras_from_ui(ui)
285 285 if extras and extras.get('SSH'):
286 286 return post_pull(ui, repo, **kwargs)
287 287 return 0
288 288
289 289
290 290 def pre_push(ui, repo, node=None, **kwargs):
291 291 """
292 292 Mercurial pre_push hook
293 293 """
294 294 extras = _extras_from_ui(ui)
295 295 detect_force_push = extras.get('detect_force_push')
296 296
297 297 rev_data = []
298 298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 299 branches = collections.defaultdict(list)
300 300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 301 for commit_id, branch in commits:
302 302 branches[branch].append(commit_id)
303 303
304 304 for branch, commits in branches.items():
305 305 old_rev = kwargs.get('node_last') or commits[0]
306 306 rev_data.append({
307 307 'total_commits': len(commits),
308 308 'old_rev': old_rev,
309 309 'new_rev': commits[-1],
310 310 'ref': '',
311 311 'type': 'branch',
312 312 'name': branch,
313 313 })
314 314
315 315 for push_ref in rev_data:
316 316 push_ref['multiple_heads'] = _heads
317 317
318 318 repo_path = os.path.join(
319 319 extras.get('repo_store', ''), extras.get('repository', ''))
320 320 push_ref['hg_env'] = _get_hg_env(
321 321 old_rev=push_ref['old_rev'],
322 322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 323 repo_path=repo_path)
324 324
325 325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 326 extras['commit_ids'] = rev_data
327 327
328 328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329 329
330 330
331 331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 332 extras = _extras_from_ui(ui)
333 333 if extras.get('SSH'):
334 334 return pre_push(ui, repo, node, **kwargs)
335 335
336 336 return 0
337 337
338 338
339 339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 340 """
341 341 Mercurial pre_push hook for SSH
342 342 """
343 343 extras = _extras_from_ui(ui)
344 344 if extras.get('SSH'):
345 345 permission = extras['SSH_PERMISSIONS']
346 346
347 347 if 'repository.write' == permission or 'repository.admin' == permission:
348 348 return 0
349 349
350 350 # non-zero ret code
351 351 return 1
352 352
353 353 return 0
354 354
355 355
356 356 def post_push(ui, repo, node, **kwargs):
357 357 """
358 358 Mercurial post_push hook
359 359 """
360 360 extras = _extras_from_ui(ui)
361 361
362 362 commit_ids = []
363 363 branches = []
364 364 bookmarks = []
365 365 tags = []
366 366
367 367 commits, _heads = _rev_range_hash(repo, node)
368 368 for commit_id, branch in commits:
369 369 commit_ids.append(commit_id)
370 370 if branch not in branches:
371 371 branches.append(branch)
372 372
373 373 if hasattr(ui, '_rc_pushkey_branches'):
374 374 bookmarks = ui._rc_pushkey_branches
375 375
376 376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 377 extras['commit_ids'] = commit_ids
378 378 extras['new_refs'] = {
379 379 'branches': branches,
380 380 'bookmarks': bookmarks,
381 381 'tags': tags
382 382 }
383 383
384 384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385 385
386 386
387 387 def post_push_ssh(ui, repo, node, **kwargs):
388 388 """
389 389 Mercurial post_push hook for SSH
390 390 """
391 391 if _extras_from_ui(ui).get('SSH'):
392 392 return post_push(ui, repo, node, **kwargs)
393 393 return 0
394 394
395 395
396 396 def key_push(ui, repo, **kwargs):
397 397 from vcsserver.hgcompat import get_ctx
398 398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 399 # store new bookmarks in our UI object propagated later to post_push
400 400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 401 return
402 402
403 403
404 404 # backward compat
405 405 log_pull_action = post_pull
406 406
407 407 # backward compat
408 408 log_push_action = post_push
409 409
410 410
411 411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 412 """
413 413 Old hook name: keep here for backward compatibility.
414 414
415 415 This is only required when the installed git hooks are not upgraded.
416 416 """
417 417 pass
418 418
419 419
420 420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 421 """
422 422 Old hook name: keep here for backward compatibility.
423 423
424 424 This is only required when the installed git hooks are not upgraded.
425 425 """
426 426 pass
427 427
428 428
429 429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430 430
431 431
432 432 def git_pre_pull(extras):
433 433 """
434 434 Pre pull hook.
435 435
436 436 :param extras: dictionary containing the keys defined in simplevcs
437 437 :type extras: dict
438 438
439 439 :return: status code of the hook. 0 for success.
440 440 :rtype: int
441 441 """
442 442 if 'pull' not in extras['hooks']:
443 443 return HookResponse(0, '')
444 444
445 445 stdout = io.BytesIO()
446 446 try:
447 447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 448 except Exception as error:
449 449 status = 128
450 450 stdout.write('ERROR: %s\n' % str(error))
451 451
452 452 return HookResponse(status, stdout.getvalue())
453 453
454 454
455 455 def git_post_pull(extras):
456 456 """
457 457 Post pull hook.
458 458
459 459 :param extras: dictionary containing the keys defined in simplevcs
460 460 :type extras: dict
461 461
462 462 :return: status code of the hook. 0 for success.
463 463 :rtype: int
464 464 """
465 465 if 'pull' not in extras['hooks']:
466 466 return HookResponse(0, '')
467 467
468 468 stdout = io.BytesIO()
469 469 try:
470 470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 471 except Exception as error:
472 472 status = 128
473 473 stdout.write('ERROR: %s\n' % error)
474 474
475 475 return HookResponse(status, stdout.getvalue())
476 476
477 477
478 478 def _parse_git_ref_lines(revision_lines):
479 479 rev_data = []
480 480 for revision_line in revision_lines or []:
481 481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 482 ref_data = ref.split('/', 2)
483 483 if ref_data[1] in ('tags', 'heads'):
484 484 rev_data.append({
485 485 # NOTE(marcink):
486 486 # we're unable to tell total_commits for git at this point
487 487 # but we set the variable for consistency with GIT
488 488 'total_commits': -1,
489 489 'old_rev': old_rev,
490 490 'new_rev': new_rev,
491 491 'ref': ref,
492 492 'type': ref_data[1],
493 493 'name': ref_data[2],
494 494 })
495 495 return rev_data
496 496
497 497
498 498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 499 """
500 500 Pre push hook.
501 501
502 502 :param extras: dictionary containing the keys defined in simplevcs
503 503 :type extras: dict
504 504
505 505 :return: status code of the hook. 0 for success.
506 506 :rtype: int
507 507 """
508 508 extras = json.loads(env['RC_SCM_DATA'])
509 509 rev_data = _parse_git_ref_lines(revision_lines)
510 510 if 'push' not in extras['hooks']:
511 511 return 0
512 512 empty_commit_id = '0' * 40
513 513
514 514 detect_force_push = extras.get('detect_force_push')
515 515
516 516 for push_ref in rev_data:
517 517 # store our git-env which holds the temp store
518 518 push_ref['git_env'] = _get_git_env()
519 519 push_ref['pruned_sha'] = ''
520 520 if not detect_force_push:
521 521 # don't check for forced-push when we don't need to
522 522 continue
523 523
524 524 type_ = push_ref['type']
525 525 new_branch = push_ref['old_rev'] == empty_commit_id
526 526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 527 if type_ == 'heads' and not (new_branch or delete_branch):
528 528 old_rev = push_ref['old_rev']
529 529 new_rev = push_ref['new_rev']
530 530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 531 stdout, stderr = subprocessio.run_command(
532 532 cmd, env=os.environ.copy())
533 533 # means we're having some non-reachable objects, this forced push was used
534 534 if stdout:
535 535 push_ref['pruned_sha'] = stdout.splitlines()
536 536
537 537 extras['hook_type'] = 'pre_receive'
538 538 extras['commit_ids'] = rev_data
539 539 return _call_hook('pre_push', extras, GitMessageWriter())
540 540
541 541
542 542 def git_post_receive(unused_repo_path, revision_lines, env):
543 543 """
544 544 Post push hook.
545 545
546 546 :param extras: dictionary containing the keys defined in simplevcs
547 547 :type extras: dict
548 548
549 549 :return: status code of the hook. 0 for success.
550 550 :rtype: int
551 551 """
552 552 extras = json.loads(env['RC_SCM_DATA'])
553 553 if 'push' not in extras['hooks']:
554 554 return 0
555 555
556 556 rev_data = _parse_git_ref_lines(revision_lines)
557 557
558 558 git_revs = []
559 559
560 560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 561 # subcommand sets the PATH environment variable so that it point to the
562 562 # correct version of the git executable.
563 563 empty_commit_id = '0' * 40
564 564 branches = []
565 565 tags = []
566 566 for push_ref in rev_data:
567 567 type_ = push_ref['type']
568 568
569 569 if type_ == 'heads':
570 570 if push_ref['old_rev'] == empty_commit_id:
571 571 # starting new branch case
572 572 if push_ref['name'] not in branches:
573 573 branches.append(push_ref['name'])
574 574
575 575 # Fix up head revision if needed
576 576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 577 try:
578 578 subprocessio.run_command(cmd, env=os.environ.copy())
579 579 except Exception:
580 580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
581 581 'refs/heads/%s' % push_ref['name']]
582 582 print("Setting default branch to %s" % push_ref['name'])
583 583 subprocessio.run_command(cmd, env=os.environ.copy())
584 584
585 585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 586 '--format=%(refname)', 'refs/heads/*']
587 587 stdout, stderr = subprocessio.run_command(
588 588 cmd, env=os.environ.copy())
589 589 heads = stdout
590 590 heads = heads.replace(push_ref['ref'], '')
591 591 heads = ' '.join(head for head
592 592 in heads.splitlines() if head) or '.'
593 593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 595 '--not', heads]
596 596 stdout, stderr = subprocessio.run_command(
597 597 cmd, env=os.environ.copy())
598 598 git_revs.extend(stdout.splitlines())
599 599 elif push_ref['new_rev'] == empty_commit_id:
600 600 # delete branch case
601 601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 602 else:
603 603 if push_ref['name'] not in branches:
604 604 branches.append(push_ref['name'])
605 605
606 606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 607 '{old_rev}..{new_rev}'.format(**push_ref),
608 608 '--reverse', '--pretty=format:%H']
609 609 stdout, stderr = subprocessio.run_command(
610 610 cmd, env=os.environ.copy())
611 611 git_revs.extend(stdout.splitlines())
612 612 elif type_ == 'tags':
613 613 if push_ref['name'] not in tags:
614 614 tags.append(push_ref['name'])
615 615 git_revs.append('tag=>%s' % push_ref['name'])
616 616
617 617 extras['hook_type'] = 'post_receive'
618 618 extras['commit_ids'] = git_revs
619 619 extras['new_refs'] = {
620 620 'branches': branches,
621 621 'bookmarks': [],
622 622 'tags': tags,
623 623 }
624 624
625 625 if 'repo_size' in extras['hooks']:
626 626 try:
627 627 _call_hook('repo_size', extras, GitMessageWriter())
628 628 except:
629 629 pass
630 630
631 631 return _call_hook('post_push', extras, GitMessageWriter())
632 632
633 633
634 634 def _get_extras_from_txn_id(path, txn_id):
635 635 extras = {}
636 636 try:
637 637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 638 '-t', txn_id,
639 639 '--revprop', path, 'rc-scm-extras']
640 640 stdout, stderr = subprocessio.run_command(
641 641 cmd, env=os.environ.copy())
642 642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 643 except Exception:
644 644 log.exception('Failed to extract extras info from txn_id')
645 645
646 646 return extras
647 647
648 648
649 649 def _get_extras_from_commit_id(commit_id, path):
650 650 extras = {}
651 651 try:
652 652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 653 '-r', commit_id,
654 654 '--revprop', path, 'rc-scm-extras']
655 655 stdout, stderr = subprocessio.run_command(
656 656 cmd, env=os.environ.copy())
657 657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 658 except Exception:
659 659 log.exception('Failed to extract extras info from commit_id')
660 660
661 661 return extras
662 662
663 663
664 664 def svn_pre_commit(repo_path, commit_data, env):
665 665 path, txn_id = commit_data
666 666 branches = []
667 667 tags = []
668 668
669 669 if env.get('RC_SCM_DATA'):
670 670 extras = json.loads(env['RC_SCM_DATA'])
671 671 else:
672 672 # fallback method to read from TXN-ID stored data
673 673 extras = _get_extras_from_txn_id(path, txn_id)
674 674 if not extras:
675 675 return 0
676 676
677 677 extras['hook_type'] = 'pre_commit'
678 678 extras['commit_ids'] = [txn_id]
679 679 extras['txn_id'] = txn_id
680 680 extras['new_refs'] = {
681 681 'total_commits': 1,
682 682 'branches': branches,
683 683 'bookmarks': [],
684 684 'tags': tags,
685 685 }
686 686
687 687 return _call_hook('pre_push', extras, SvnMessageWriter())
688 688
689 689
690 690 def svn_post_commit(repo_path, commit_data, env):
691 691 """
692 692 commit_data is path, rev, txn_id
693 693 """
694 694 if len(commit_data) == 3:
695 695 path, commit_id, txn_id = commit_data
696 696 elif len(commit_data) == 2:
697 697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 698 'Some functionality might be limited')
699 699 path, commit_id = commit_data
700 700 txn_id = None
701 701
702 702 branches = []
703 703 tags = []
704 704
705 705 if env.get('RC_SCM_DATA'):
706 706 extras = json.loads(env['RC_SCM_DATA'])
707 707 else:
708 708 # fallback method to read from TXN-ID stored data
709 709 extras = _get_extras_from_commit_id(commit_id, path)
710 710 if not extras:
711 711 return 0
712 712
713 713 extras['hook_type'] = 'post_commit'
714 714 extras['commit_ids'] = [commit_id]
715 715 extras['txn_id'] = txn_id
716 716 extras['new_refs'] = {
717 717 'branches': branches,
718 718 'bookmarks': [],
719 719 'tags': tags,
720 720 'total_commits': 1,
721 721 }
722 722
723 723 if 'repo_size' in extras['hooks']:
724 724 try:
725 725 _call_hook('repo_size', extras, SvnMessageWriter())
726 726 except Exception:
727 727 pass
728 728
729 729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,675 +1,688 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import base64
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import wsgiref.util
25 25 import traceback
26 26 import tempfile
27 27 from itertools import chain
28 28 from cStringIO import StringIO
29 29
30 30 import simplejson as json
31 31 import msgpack
32 32 from pyramid.config import Configurator
33 33 from pyramid.settings import asbool, aslist
34 34 from pyramid.wsgi import wsgiapp
35 35 from pyramid.compat import configparser
36 36 from pyramid.response import Response
37 37
38 38 from vcsserver.utils import safe_int
39 39
40 40 log = logging.getLogger(__name__)
41 41
42 42 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
43 43 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
44 44
45 45 try:
46 46 locale.setlocale(locale.LC_ALL, '')
47 47 except locale.Error as e:
48 48 log.error(
49 49 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
50 50 os.environ['LC_ALL'] = 'C'
51 51
52 52 import vcsserver
53 53 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
54 54 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
55 55 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
56 56 from vcsserver.echo_stub.echo_app import EchoApp
57 57 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
58 58 from vcsserver.lib.exc_tracking import store_exception
59 59 from vcsserver.server import VcsServer
60 60
61 61 try:
62 62 from vcsserver.git import GitFactory, GitRemote
63 63 except ImportError:
64 64 GitFactory = None
65 65 GitRemote = None
66 66
67 67 try:
68 68 from vcsserver.hg import MercurialFactory, HgRemote
69 69 except ImportError:
70 70 MercurialFactory = None
71 71 HgRemote = None
72 72
73 73 try:
74 74 from vcsserver.svn import SubversionFactory, SvnRemote
75 75 except ImportError:
76 76 SubversionFactory = None
77 77 SvnRemote = None
78 78
79 79
80 80 def _is_request_chunked(environ):
81 81 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
82 82 return stream
83 83
84 84
85 85 def _int_setting(settings, name, default):
86 86 settings[name] = int(settings.get(name, default))
87 87 return settings[name]
88 88
89 89
90 90 def _bool_setting(settings, name, default):
91 91 input_val = settings.get(name, default)
92 92 if isinstance(input_val, unicode):
93 93 input_val = input_val.encode('utf8')
94 94 settings[name] = asbool(input_val)
95 95 return settings[name]
96 96
97 97
98 98 def _list_setting(settings, name, default):
99 99 raw_value = settings.get(name, default)
100 100
101 101 # Otherwise we assume it uses pyramids space/newline separation.
102 102 settings[name] = aslist(raw_value)
103 103 return settings[name]
104 104
105 105
106 106 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
107 107 value = settings.get(name, default)
108 108
109 109 if default_when_empty and not value:
110 110 # use default value when value is empty
111 111 value = default
112 112
113 113 if lower:
114 114 value = value.lower()
115 115 settings[name] = value
116 116 return settings[name]
117 117
118 118
119 119 class VCS(object):
120 120 def __init__(self, locale_conf=None, cache_config=None):
121 121 self.locale = locale_conf
122 122 self.cache_config = cache_config
123 123 self._configure_locale()
124 124
125 125 if GitFactory and GitRemote:
126 126 git_factory = GitFactory()
127 127 self._git_remote = GitRemote(git_factory)
128 128 else:
129 129 log.info("Git client import failed")
130 130
131 131 if MercurialFactory and HgRemote:
132 132 hg_factory = MercurialFactory()
133 133 self._hg_remote = HgRemote(hg_factory)
134 134 else:
135 135 log.info("Mercurial client import failed")
136 136
137 137 if SubversionFactory and SvnRemote:
138 138 svn_factory = SubversionFactory()
139 139
140 140 # hg factory is used for svn url validation
141 141 hg_factory = MercurialFactory()
142 142 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
143 143 else:
144 144 log.info("Subversion client import failed")
145 145
146 146 self._vcsserver = VcsServer()
147 147
148 148 def _configure_locale(self):
149 149 if self.locale:
150 150 log.info('Settings locale: `LC_ALL` to %s', self.locale)
151 151 else:
152 152 log.info(
153 153 'Configuring locale subsystem based on environment variables')
154 154 try:
155 155 # If self.locale is the empty string, then the locale
156 156 # module will use the environment variables. See the
157 157 # documentation of the package `locale`.
158 158 locale.setlocale(locale.LC_ALL, self.locale)
159 159
160 160 language_code, encoding = locale.getlocale()
161 161 log.info(
162 162 'Locale set to language code "%s" with encoding "%s".',
163 163 language_code, encoding)
164 164 except locale.Error:
165 165 log.exception(
166 166 'Cannot set locale, not configuring the locale system')
167 167
168 168
169 169 class WsgiProxy(object):
170 170 def __init__(self, wsgi):
171 171 self.wsgi = wsgi
172 172
173 173 def __call__(self, environ, start_response):
174 174 input_data = environ['wsgi.input'].read()
175 175 input_data = msgpack.unpackb(input_data)
176 176
177 177 error = None
178 178 try:
179 179 data, status, headers = self.wsgi.handle(
180 180 input_data['environment'], input_data['input_data'],
181 181 *input_data['args'], **input_data['kwargs'])
182 182 except Exception as e:
183 183 data, status, headers = [], None, None
184 184 error = {
185 185 'message': str(e),
186 186 '_vcs_kind': getattr(e, '_vcs_kind', None)
187 187 }
188 188
189 189 start_response(200, {})
190 190 return self._iterator(error, status, headers, data)
191 191
192 192 def _iterator(self, error, status, headers, data):
193 193 initial_data = [
194 194 error,
195 195 status,
196 196 headers,
197 197 ]
198 198
199 199 for d in chain(initial_data, data):
200 200 yield msgpack.packb(d)
201 201
202 202
203 203 def not_found(request):
204 204 return {'status': '404 NOT FOUND'}
205 205
206 206
207 207 class VCSViewPredicate(object):
208 208 def __init__(self, val, config):
209 209 self.remotes = val
210 210
211 211 def text(self):
212 212 return 'vcs view method = %s' % (self.remotes.keys(),)
213 213
214 214 phash = text
215 215
216 216 def __call__(self, context, request):
217 217 """
218 218 View predicate that returns true if given backend is supported by
219 219 defined remotes.
220 220 """
221 221 backend = request.matchdict.get('backend')
222 222 return backend in self.remotes
223 223
224 224
225 225 class HTTPApplication(object):
226 226 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
227 227
228 228 remote_wsgi = remote_wsgi
229 229 _use_echo_app = False
230 230
231 231 def __init__(self, settings=None, global_config=None):
232 232 self._sanitize_settings_and_apply_defaults(settings)
233 233
234 234 self.config = Configurator(settings=settings)
235 235 self.global_config = global_config
236 236 self.config.include('vcsserver.lib.rc_cache')
237 237
238 238 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
239 239 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
240 240 self._remotes = {
241 241 'hg': vcs._hg_remote,
242 242 'git': vcs._git_remote,
243 243 'svn': vcs._svn_remote,
244 244 'server': vcs._vcsserver,
245 245 }
246 246 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
247 247 self._use_echo_app = True
248 248 log.warning("Using EchoApp for VCS operations.")
249 249 self.remote_wsgi = remote_wsgi_stub
250 250
251 251 self._configure_settings(global_config, settings)
252 252 self._configure()
253 253
254 254 def _configure_settings(self, global_config, app_settings):
255 255 """
256 256 Configure the settings module.
257 257 """
258 258 settings_merged = global_config.copy()
259 259 settings_merged.update(app_settings)
260 260
261 261 git_path = app_settings.get('git_path', None)
262 262 if git_path:
263 263 settings.GIT_EXECUTABLE = git_path
264 264 binary_dir = app_settings.get('core.binary_dir', None)
265 265 if binary_dir:
266 266 settings.BINARY_DIR = binary_dir
267 267
268 268 # Store the settings to make them available to other modules.
269 269 vcsserver.PYRAMID_SETTINGS = settings_merged
270 270 vcsserver.CONFIG = settings_merged
271 271
272 272 def _sanitize_settings_and_apply_defaults(self, settings):
273 273 temp_store = tempfile.gettempdir()
274 274 default_cache_dir = os.path.join(temp_store, 'rc_cache')
275 275
276 276 # save default, cache dir, and use it for all backends later.
277 277 default_cache_dir = _string_setting(
278 278 settings,
279 279 'cache_dir',
280 280 default_cache_dir, lower=False, default_when_empty=True)
281 281
282 282 # ensure we have our dir created
283 283 if not os.path.isdir(default_cache_dir):
284 284 os.makedirs(default_cache_dir, mode=0o755)
285 285
286 286 # exception store cache
287 287 _string_setting(
288 288 settings,
289 289 'exception_tracker.store_path',
290 290 temp_store, lower=False, default_when_empty=True)
291 291
292 292 # repo_object cache
293 293 _string_setting(
294 294 settings,
295 295 'rc_cache.repo_object.backend',
296 296 'dogpile.cache.rc.file_namespace', lower=False)
297 297 _int_setting(
298 298 settings,
299 299 'rc_cache.repo_object.expiration_time',
300 300 30 * 24 * 60 * 60)
301 301 _string_setting(
302 302 settings,
303 303 'rc_cache.repo_object.arguments.filename',
304 304 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
305 305
306 306 def _configure(self):
307 307 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
308 308
309 309 self.config.add_route('service', '/_service')
310 310 self.config.add_route('status', '/status')
311 311 self.config.add_route('hg_proxy', '/proxy/hg')
312 312 self.config.add_route('git_proxy', '/proxy/git')
313 313
314 314 # rpc methods
315 315 self.config.add_route('vcs', '/{backend}')
316 316
317 317 # streaming rpc remote methods
318 318 self.config.add_route('vcs_stream', '/{backend}/stream')
319 319
320 320 # vcs operations clone/push as streaming
321 321 self.config.add_route('stream_git', '/stream/git/*repo_name')
322 322 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
323 323
324 324 self.config.add_view(self.status_view, route_name='status', renderer='json')
325 325 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
326 326
327 327 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
328 328 self.config.add_view(self.git_proxy(), route_name='git_proxy')
329 329 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
330 330 vcs_view=self._remotes)
331 331 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
332 332 vcs_view=self._remotes)
333 333
334 334 self.config.add_view(self.hg_stream(), route_name='stream_hg')
335 335 self.config.add_view(self.git_stream(), route_name='stream_git')
336 336
337 337 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
338 338
339 339 self.config.add_notfound_view(not_found, renderer='json')
340 340
341 341 self.config.add_view(self.handle_vcs_exception, context=Exception)
342 342
343 343 self.config.add_tween(
344 344 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
345 345 )
346 346 self.config.add_request_method(
347 347 'vcsserver.lib.request_counter.get_request_counter',
348 348 'request_count')
349 349
350 350 def wsgi_app(self):
351 351 return self.config.make_wsgi_app()
352 352
353 353 def _vcs_view_params(self, request):
354 354 remote = self._remotes[request.matchdict['backend']]
355 355 payload = msgpack.unpackb(request.body, use_list=True)
356 356 method = payload.get('method')
357 357 params = payload['params']
358 358 wire = params.get('wire')
359 359 args = params.get('args')
360 360 kwargs = params.get('kwargs')
361 361 context_uid = None
362 362
363 363 if wire:
364 364 try:
365 365 wire['context'] = context_uid = uuid.UUID(wire['context'])
366 366 except KeyError:
367 367 pass
368 368 args.insert(0, wire)
369 369 repo_state_uid = wire.get('repo_state_uid') if wire else None
370 370
371 371 # NOTE(marcink): trading complexity for slight performance
372 372 if log.isEnabledFor(logging.DEBUG):
373 373 no_args_methods = [
374 374 'archive_repo'
375 375 ]
376 376 if method in no_args_methods:
377 377 call_args = ''
378 378 else:
379 379 call_args = args[1:]
380 380
381 381 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
382 382 method, call_args, kwargs, context_uid, repo_state_uid)
383 383
384 384 return payload, remote, method, args, kwargs
385 385
386 386 def vcs_view(self, request):
387 387
388 388 payload, remote, method, args, kwargs = self._vcs_view_params(request)
389 389 payload_id = payload.get('id')
390 390
391 391 try:
392 392 resp = getattr(remote, method)(*args, **kwargs)
393 393 except Exception as e:
394 394 exc_info = list(sys.exc_info())
395 395 exc_type, exc_value, exc_traceback = exc_info
396 396
397 397 org_exc = getattr(e, '_org_exc', None)
398 398 org_exc_name = None
399 399 org_exc_tb = ''
400 400 if org_exc:
401 401 org_exc_name = org_exc.__class__.__name__
402 402 org_exc_tb = getattr(e, '_org_exc_tb', '')
403 403 # replace our "faked" exception with our org
404 404 exc_info[0] = org_exc.__class__
405 405 exc_info[1] = org_exc
406 406
407 store_exception(id(exc_info), exc_info)
407 should_store_exc = True
408 if org_exc:
409 def get_exc_fqn(_exc_obj):
410 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
411 return module_name + '.' + org_exc_name
412
413 exc_fqn = get_exc_fqn(org_exc)
414
415 if exc_fqn in ['mercurial.error.RepoLookupError',
416 'vcsserver.exceptions.RefNotFoundException']:
417 should_store_exc = False
418
419 if should_store_exc:
420 store_exception(id(exc_info), exc_info)
408 421
409 422 tb_info = ''.join(
410 423 traceback.format_exception(exc_type, exc_value, exc_traceback))
411 424
412 425 type_ = e.__class__.__name__
413 426 if type_ not in self.ALLOWED_EXCEPTIONS:
414 427 type_ = None
415 428
416 429 resp = {
417 430 'id': payload_id,
418 431 'error': {
419 432 'message': e.message,
420 433 'traceback': tb_info,
421 434 'org_exc': org_exc_name,
422 435 'org_exc_tb': org_exc_tb,
423 436 'type': type_
424 437 }
425 438 }
426 439 try:
427 440 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
428 441 except AttributeError:
429 442 pass
430 443 else:
431 444 resp = {
432 445 'id': payload_id,
433 446 'result': resp
434 447 }
435 448
436 449 return resp
437 450
438 451 def vcs_stream_view(self, request):
439 452 payload, remote, method, args, kwargs = self._vcs_view_params(request)
440 453 # this method has a stream: marker we remove it here
441 454 method = method.split('stream:')[-1]
442 455 chunk_size = safe_int(payload.get('chunk_size')) or 4096
443 456
444 457 try:
445 458 resp = getattr(remote, method)(*args, **kwargs)
446 459 except Exception as e:
447 460 raise
448 461
449 462 def get_chunked_data(method_resp):
450 463 stream = StringIO(method_resp)
451 464 while 1:
452 465 chunk = stream.read(chunk_size)
453 466 if not chunk:
454 467 break
455 468 yield chunk
456 469
457 470 response = Response(app_iter=get_chunked_data(resp))
458 471 response.content_type = 'application/octet-stream'
459 472
460 473 return response
461 474
462 475 def status_view(self, request):
463 476 import vcsserver
464 477 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
465 478 'pid': os.getpid()}
466 479
467 480 def service_view(self, request):
468 481 import vcsserver
469 482
470 483 payload = msgpack.unpackb(request.body, use_list=True)
471 484 server_config, app_config = {}, {}
472 485
473 486 try:
474 487 path = self.global_config['__file__']
475 488 config = configparser.RawConfigParser()
476 489
477 490 config.read(path)
478 491
479 492 if config.has_section('server:main'):
480 493 server_config = dict(config.items('server:main'))
481 494 if config.has_section('app:main'):
482 495 app_config = dict(config.items('app:main'))
483 496
484 497 except Exception:
485 498 log.exception('Failed to read .ini file for display')
486 499
487 500 environ = os.environ.items()
488 501
489 502 resp = {
490 503 'id': payload.get('id'),
491 504 'result': dict(
492 505 version=vcsserver.__version__,
493 506 config=server_config,
494 507 app_config=app_config,
495 508 environ=environ,
496 509 payload=payload,
497 510 )
498 511 }
499 512 return resp
500 513
501 514 def _msgpack_renderer_factory(self, info):
502 515 def _render(value, system):
503 516 request = system.get('request')
504 517 if request is not None:
505 518 response = request.response
506 519 ct = response.content_type
507 520 if ct == response.default_content_type:
508 521 response.content_type = 'application/x-msgpack'
509 522 return msgpack.packb(value)
510 523 return _render
511 524
512 525 def set_env_from_config(self, environ, config):
513 526 dict_conf = {}
514 527 try:
515 528 for elem in config:
516 529 if elem[0] == 'rhodecode':
517 530 dict_conf = json.loads(elem[2])
518 531 break
519 532 except Exception:
520 533 log.exception('Failed to fetch SCM CONFIG')
521 534 return
522 535
523 536 username = dict_conf.get('username')
524 537 if username:
525 538 environ['REMOTE_USER'] = username
526 539 # mercurial specific, some extension api rely on this
527 540 environ['HGUSER'] = username
528 541
529 542 ip = dict_conf.get('ip')
530 543 if ip:
531 544 environ['REMOTE_HOST'] = ip
532 545
533 546 if _is_request_chunked(environ):
534 547 # set the compatibility flag for webob
535 548 environ['wsgi.input_terminated'] = True
536 549
537 550 def hg_proxy(self):
538 551 @wsgiapp
539 552 def _hg_proxy(environ, start_response):
540 553 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
541 554 return app(environ, start_response)
542 555 return _hg_proxy
543 556
544 557 def git_proxy(self):
545 558 @wsgiapp
546 559 def _git_proxy(environ, start_response):
547 560 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
548 561 return app(environ, start_response)
549 562 return _git_proxy
550 563
551 564 def hg_stream(self):
552 565 if self._use_echo_app:
553 566 @wsgiapp
554 567 def _hg_stream(environ, start_response):
555 568 app = EchoApp('fake_path', 'fake_name', None)
556 569 return app(environ, start_response)
557 570 return _hg_stream
558 571 else:
559 572 @wsgiapp
560 573 def _hg_stream(environ, start_response):
561 574 log.debug('http-app: handling hg stream')
562 575 repo_path = environ['HTTP_X_RC_REPO_PATH']
563 576 repo_name = environ['HTTP_X_RC_REPO_NAME']
564 577 packed_config = base64.b64decode(
565 578 environ['HTTP_X_RC_REPO_CONFIG'])
566 579 config = msgpack.unpackb(packed_config)
567 580 app = scm_app.create_hg_wsgi_app(
568 581 repo_path, repo_name, config)
569 582
570 583 # Consistent path information for hgweb
571 584 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
572 585 environ['REPO_NAME'] = repo_name
573 586 self.set_env_from_config(environ, config)
574 587
575 588 log.debug('http-app: starting app handler '
576 589 'with %s and process request', app)
577 590 return app(environ, ResponseFilter(start_response))
578 591 return _hg_stream
579 592
580 593 def git_stream(self):
581 594 if self._use_echo_app:
582 595 @wsgiapp
583 596 def _git_stream(environ, start_response):
584 597 app = EchoApp('fake_path', 'fake_name', None)
585 598 return app(environ, start_response)
586 599 return _git_stream
587 600 else:
588 601 @wsgiapp
589 602 def _git_stream(environ, start_response):
590 603 log.debug('http-app: handling git stream')
591 604 repo_path = environ['HTTP_X_RC_REPO_PATH']
592 605 repo_name = environ['HTTP_X_RC_REPO_NAME']
593 606 packed_config = base64.b64decode(
594 607 environ['HTTP_X_RC_REPO_CONFIG'])
595 608 config = msgpack.unpackb(packed_config)
596 609
597 610 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
598 611 self.set_env_from_config(environ, config)
599 612
600 613 content_type = environ.get('CONTENT_TYPE', '')
601 614
602 615 path = environ['PATH_INFO']
603 616 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
604 617 log.debug(
605 618 'LFS: Detecting if request `%s` is LFS server path based '
606 619 'on content type:`%s`, is_lfs:%s',
607 620 path, content_type, is_lfs_request)
608 621
609 622 if not is_lfs_request:
610 623 # fallback detection by path
611 624 if GIT_LFS_PROTO_PAT.match(path):
612 625 is_lfs_request = True
613 626 log.debug(
614 627 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
615 628 path, is_lfs_request)
616 629
617 630 if is_lfs_request:
618 631 app = scm_app.create_git_lfs_wsgi_app(
619 632 repo_path, repo_name, config)
620 633 else:
621 634 app = scm_app.create_git_wsgi_app(
622 635 repo_path, repo_name, config)
623 636
624 637 log.debug('http-app: starting app handler '
625 638 'with %s and process request', app)
626 639
627 640 return app(environ, start_response)
628 641
629 642 return _git_stream
630 643
631 644 def handle_vcs_exception(self, exception, request):
632 645 _vcs_kind = getattr(exception, '_vcs_kind', '')
633 646 if _vcs_kind == 'repo_locked':
634 647 # Get custom repo-locked status code if present.
635 648 status_code = request.headers.get('X-RC-Locked-Status-Code')
636 649 return HTTPRepoLocked(
637 650 title=exception.message, status_code=status_code)
638 651
639 652 elif _vcs_kind == 'repo_branch_protected':
640 653 # Get custom repo-branch-protected status code if present.
641 654 return HTTPRepoBranchProtected(title=exception.message)
642 655
643 656 exc_info = request.exc_info
644 657 store_exception(id(exc_info), exc_info)
645 658
646 659 traceback_info = 'unavailable'
647 660 if request.exc_info:
648 661 exc_type, exc_value, exc_tb = request.exc_info
649 662 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
650 663
651 664 log.error(
652 665 'error occurred handling this request for path: %s, \n tb: %s',
653 666 request.path, traceback_info)
654 667 raise exception
655 668
656 669
657 670 class ResponseFilter(object):
658 671
659 672 def __init__(self, start_response):
660 673 self._start_response = start_response
661 674
662 675 def __call__(self, status, response_headers, exc_info=None):
663 676 headers = tuple(
664 677 (h, v) for h, v in response_headers
665 678 if not wsgiref.util.is_hop_by_hop(h))
666 679 return self._start_response(status, headers, exc_info)
667 680
668 681
669 682 def main(global_config, **settings):
670 683 if MercurialFactory:
671 684 hgpatches.patch_largefiles_capabilities()
672 685 hgpatches.patch_subrepo_type_mapping()
673 686
674 687 app = HTTPApplication(settings=settings, global_config=global_config)
675 688 return app.wsgi_app()
@@ -1,16 +1,16 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,169 +1,169 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20
21 21 import os
22 22 import time
23 23 import datetime
24 24 import msgpack
25 25 import logging
26 26 import traceback
27 27 import tempfile
28 28
29 29 from pyramid import compat
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33 # NOTE: Any changes should be synced with exc_tracking at rhodecode.lib.exc_tracking
34 34 global_prefix = 'vcsserver'
35 35 exc_store_dir_name = 'rc_exception_store_v1'
36 36
37 37
38 38 def exc_serialize(exc_id, tb, exc_type):
39 39
40 40 data = {
41 41 'version': 'v1',
42 42 'exc_id': exc_id,
43 43 'exc_utc_date': datetime.datetime.utcnow().isoformat(),
44 44 'exc_timestamp': repr(time.time()),
45 45 'exc_message': tb,
46 46 'exc_type': exc_type,
47 47 }
48 48 return msgpack.packb(data), data
49 49
50 50
51 51 def exc_unserialize(tb):
52 52 return msgpack.unpackb(tb)
53 53
54 54
55 55 def get_exc_store():
56 56 """
57 57 Get and create exception store if it's not existing
58 58 """
59 59 import vcsserver as app
60 60
61 61 exc_store_dir = app.CONFIG.get('exception_tracker.store_path', '') or tempfile.gettempdir()
62 62 _exc_store_path = os.path.join(exc_store_dir, exc_store_dir_name)
63 63
64 64 _exc_store_path = os.path.abspath(_exc_store_path)
65 65 if not os.path.isdir(_exc_store_path):
66 66 os.makedirs(_exc_store_path)
67 67 log.debug('Initializing exceptions store at %s', _exc_store_path)
68 68 return _exc_store_path
69 69
70 70
71 71 def _store_exception(exc_id, exc_info, prefix):
72 72 exc_type, exc_value, exc_traceback = exc_info
73 73
74 74 tb = ''.join(traceback.format_exception(
75 75 exc_type, exc_value, exc_traceback, None))
76 76
77 77 detailed_tb = getattr(exc_value, '_org_exc_tb', None)
78 78
79 79 if detailed_tb:
80 80 if isinstance(detailed_tb, compat.string_types):
81 81 remote_tb = [detailed_tb]
82 82
83 83 tb += (
84 84 '\n+++ BEG SOURCE EXCEPTION +++\n\n'
85 85 '{}\n'
86 86 '+++ END SOURCE EXCEPTION +++\n'
87 87 ''.format('\n'.join(remote_tb))
88 88 )
89 89
90 90 # Avoid that remote_tb also appears in the frame
91 91 del remote_tb
92 92
93 93 exc_type_name = exc_type.__name__
94 94 exc_store_path = get_exc_store()
95 95 exc_data, org_data = exc_serialize(exc_id, tb, exc_type_name)
96 96 exc_pref_id = '{}_{}_{}'.format(exc_id, prefix, org_data['exc_timestamp'])
97 97 if not os.path.isdir(exc_store_path):
98 98 os.makedirs(exc_store_path)
99 99 stored_exc_path = os.path.join(exc_store_path, exc_pref_id)
100 100 with open(stored_exc_path, 'wb') as f:
101 101 f.write(exc_data)
102 102 log.debug('Stored generated exception %s as: %s', exc_id, stored_exc_path)
103 103
104 104
105 105 def store_exception(exc_id, exc_info, prefix=global_prefix):
106 106 """
107 107 Example usage::
108 108
109 109 exc_info = sys.exc_info()
110 110 store_exception(id(exc_info), exc_info)
111 111 """
112 112
113 113 try:
114 114 _store_exception(exc_id=exc_id, exc_info=exc_info, prefix=prefix)
115 115 except Exception:
116 116 log.exception('Failed to store exception `%s` information', exc_id)
117 117 # there's no way this can fail, it will crash server badly if it does.
118 118 pass
119 119
120 120
121 121 def _find_exc_file(exc_id, prefix=global_prefix):
122 122 exc_store_path = get_exc_store()
123 123 if prefix:
124 124 exc_id = '{}_{}'.format(exc_id, prefix)
125 125 else:
126 126 # search without a prefix
127 127 exc_id = '{}'.format(exc_id)
128 128
129 129 # we need to search the store for such start pattern as above
130 130 for fname in os.listdir(exc_store_path):
131 131 if fname.startswith(exc_id):
132 132 exc_id = os.path.join(exc_store_path, fname)
133 133 break
134 134 continue
135 135 else:
136 136 exc_id = None
137 137
138 138 return exc_id
139 139
140 140
141 141 def _read_exception(exc_id, prefix):
142 142 exc_id_file_path = _find_exc_file(exc_id=exc_id, prefix=prefix)
143 143 if exc_id_file_path:
144 144 with open(exc_id_file_path, 'rb') as f:
145 145 return exc_unserialize(f.read())
146 146 else:
147 147 log.debug('Exception File `%s` not found', exc_id_file_path)
148 148 return None
149 149
150 150
151 151 def read_exception(exc_id, prefix=global_prefix):
152 152 try:
153 153 return _read_exception(exc_id=exc_id, prefix=prefix)
154 154 except Exception:
155 155 log.exception('Failed to read exception `%s` information', exc_id)
156 156 # there's no way this can fail, it will crash server badly if it does.
157 157 return None
158 158
159 159
160 160 def delete_exception(exc_id, prefix=global_prefix):
161 161 try:
162 162 exc_id_file_path = _find_exc_file(exc_id, prefix=prefix)
163 163 if exc_id_file_path:
164 164 os.remove(exc_id_file_path)
165 165
166 166 except Exception:
167 167 log.exception('Failed to remove exception `%s` information', exc_id)
168 168 # there's no way this can fail, it will crash server badly if it does.
169 169 pass
@@ -1,65 +1,65 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20
21 21 import logging
22 22
23 23 from repoze.lru import LRUCache
24 24
25 25 from vcsserver.utils import safe_str
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 30 class LRUDict(LRUCache):
31 31 """
32 32 Wrapper to provide partial dict access
33 33 """
34 34
35 35 def __setitem__(self, key, value):
36 36 return self.put(key, value)
37 37
38 38 def __getitem__(self, key):
39 39 return self.get(key)
40 40
41 41 def __contains__(self, key):
42 42 return bool(self.get(key))
43 43
44 44 def __delitem__(self, key):
45 45 del self.data[key]
46 46
47 47 def keys(self):
48 48 return self.data.keys()
49 49
50 50
51 51 class LRUDictDebug(LRUDict):
52 52 """
53 53 Wrapper to provide some debug options
54 54 """
55 55 def _report_keys(self):
56 56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
57 57 # trick for pformat print it more nicely
58 58 fmt = '\n'
59 59 for cnt, elem in enumerate(self.keys()):
60 60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
62 62
63 63 def __getitem__(self, key):
64 64 self._report_keys()
65 65 return self.get(key)
@@ -1,72 +1,72 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 from dogpile.cache import register_backend
20 20
21 21 register_backend(
22 22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 23 "LRUMemoryBackend")
24 24
25 25 register_backend(
26 26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 27 "FileNamespaceBackend")
28 28
29 29 register_backend(
30 30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 31 "RedisPickleBackend")
32 32
33 33 register_backend(
34 34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 35 "RedisMsgPackBackend")
36 36
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40 from . import region_meta
41 41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
42 42
43 43
44 44 def configure_dogpile_cache(settings):
45 45 cache_dir = settings.get('cache_dir')
46 46 if cache_dir:
47 47 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
48 48
49 49 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
50 50
51 51 # inspect available namespaces
52 52 avail_regions = set()
53 53 for key in rc_cache_data.keys():
54 54 namespace_name = key.split('.', 1)[0]
55 55 avail_regions.add(namespace_name)
56 56 log.debug('dogpile: found following cache regions: %s', avail_regions)
57 57
58 58 # register them into namespace
59 59 for region_name in avail_regions:
60 60 new_region = make_region(
61 61 name=region_name,
62 62 function_key_generator=None
63 63 )
64 64
65 65 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(region_name))
66 66 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
67 67 log.debug('dogpile: registering a new region %s[%s]', region_name, new_region.__dict__)
68 68 region_meta.dogpile_cache_regions[region_name] = new_region
69 69
70 70
71 71 def includeme(config):
72 72 configure_dogpile_cache(config.registry.settings)
@@ -1,253 +1,253 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import time
19 19 import errno
20 20 import logging
21 21
22 22 import msgpack
23 23 import redis
24 24
25 25 from dogpile.cache.api import CachedValue
26 26 from dogpile.cache.backends import memory as memory_backend
27 27 from dogpile.cache.backends import file as file_backend
28 28 from dogpile.cache.backends import redis as redis_backend
29 29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 30 from dogpile.cache.util import memoized_property
31 31
32 32 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
33 33
34 34
35 35 _default_max_size = 1024
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class LRUMemoryBackend(memory_backend.MemoryBackend):
41 41 key_prefix = 'lru_mem_backend'
42 42 pickle_values = False
43 43
44 44 def __init__(self, arguments):
45 45 max_size = arguments.pop('max_size', _default_max_size)
46 46
47 47 LRUDictClass = LRUDict
48 48 if arguments.pop('log_key_count', None):
49 49 LRUDictClass = LRUDictDebug
50 50
51 51 arguments['cache_dict'] = LRUDictClass(max_size)
52 52 super(LRUMemoryBackend, self).__init__(arguments)
53 53
54 54 def delete(self, key):
55 55 try:
56 56 del self._cache[key]
57 57 except KeyError:
58 58 # we don't care if key isn't there at deletion
59 59 pass
60 60
61 61 def delete_multi(self, keys):
62 62 for key in keys:
63 63 self.delete(key)
64 64
65 65
66 66 class PickleSerializer(object):
67 67
68 68 def _dumps(self, value, safe=False):
69 69 try:
70 70 return compat.pickle.dumps(value)
71 71 except Exception:
72 72 if safe:
73 73 return NO_VALUE
74 74 else:
75 75 raise
76 76
77 77 def _loads(self, value, safe=True):
78 78 try:
79 79 return compat.pickle.loads(value)
80 80 except Exception:
81 81 if safe:
82 82 return NO_VALUE
83 83 else:
84 84 raise
85 85
86 86
87 87 class MsgPackSerializer(object):
88 88
89 89 def _dumps(self, value, safe=False):
90 90 try:
91 91 return msgpack.packb(value)
92 92 except Exception:
93 93 if safe:
94 94 return NO_VALUE
95 95 else:
96 96 raise
97 97
98 98 def _loads(self, value, safe=True):
99 99 """
100 100 pickle maintained the `CachedValue` wrapper of the tuple
101 101 msgpack does not, so it must be added back in.
102 102 """
103 103 try:
104 104 value = msgpack.unpackb(value, use_list=False)
105 105 return CachedValue(*value)
106 106 except Exception:
107 107 if safe:
108 108 return NO_VALUE
109 109 else:
110 110 raise
111 111
112 112
113 113 import fcntl
114 114 flock_org = fcntl.flock
115 115
116 116
117 117 class CustomLockFactory(FileLock):
118 118
119 119 pass
120 120
121 121
122 122 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
123 123 key_prefix = 'file_backend'
124 124
125 125 def __init__(self, arguments):
126 126 arguments['lock_factory'] = CustomLockFactory
127 127 super(FileNamespaceBackend, self).__init__(arguments)
128 128
129 129 def __repr__(self):
130 130 return '{} `{}`'.format(self.__class__, self.filename)
131 131
132 132 def list_keys(self, prefix=''):
133 133 prefix = '{}:{}'.format(self.key_prefix, prefix)
134 134
135 135 def cond(v):
136 136 if not prefix:
137 137 return True
138 138
139 139 if v.startswith(prefix):
140 140 return True
141 141 return False
142 142
143 143 with self._dbm_file(True) as dbm:
144 144
145 145 return filter(cond, dbm.keys())
146 146
147 147 def get_store(self):
148 148 return self.filename
149 149
150 150 def get(self, key):
151 151 with self._dbm_file(False) as dbm:
152 152 if hasattr(dbm, 'get'):
153 153 value = dbm.get(key, NO_VALUE)
154 154 else:
155 155 # gdbm objects lack a .get method
156 156 try:
157 157 value = dbm[key]
158 158 except KeyError:
159 159 value = NO_VALUE
160 160 if value is not NO_VALUE:
161 161 value = self._loads(value)
162 162 return value
163 163
164 164 def set(self, key, value):
165 165 with self._dbm_file(True) as dbm:
166 166 dbm[key] = self._dumps(value)
167 167
168 168 def set_multi(self, mapping):
169 169 with self._dbm_file(True) as dbm:
170 170 for key, value in mapping.items():
171 171 dbm[key] = self._dumps(value)
172 172
173 173
174 174 class BaseRedisBackend(redis_backend.RedisBackend):
175 175
176 176 def _create_client(self):
177 177 args = {}
178 178
179 179 if self.url is not None:
180 180 args.update(url=self.url)
181 181
182 182 else:
183 183 args.update(
184 184 host=self.host, password=self.password,
185 185 port=self.port, db=self.db
186 186 )
187 187
188 188 connection_pool = redis.ConnectionPool(**args)
189 189
190 190 return redis.StrictRedis(connection_pool=connection_pool)
191 191
192 192 def list_keys(self, prefix=''):
193 193 prefix = '{}:{}*'.format(self.key_prefix, prefix)
194 194 return self.client.keys(prefix)
195 195
196 196 def get_store(self):
197 197 return self.client.connection_pool
198 198
199 199 def get(self, key):
200 200 value = self.client.get(key)
201 201 if value is None:
202 202 return NO_VALUE
203 203 return self._loads(value)
204 204
205 205 def get_multi(self, keys):
206 206 if not keys:
207 207 return []
208 208 values = self.client.mget(keys)
209 209 loads = self._loads
210 210 return [
211 211 loads(v) if v is not None else NO_VALUE
212 212 for v in values]
213 213
214 214 def set(self, key, value):
215 215 if self.redis_expiration_time:
216 216 self.client.setex(key, self.redis_expiration_time,
217 217 self._dumps(value))
218 218 else:
219 219 self.client.set(key, self._dumps(value))
220 220
221 221 def set_multi(self, mapping):
222 222 dumps = self._dumps
223 223 mapping = dict(
224 224 (k, dumps(v))
225 225 for k, v in mapping.items()
226 226 )
227 227
228 228 if not self.redis_expiration_time:
229 229 self.client.mset(mapping)
230 230 else:
231 231 pipe = self.client.pipeline()
232 232 for key, value in mapping.items():
233 233 pipe.setex(key, self.redis_expiration_time, value)
234 234 pipe.execute()
235 235
236 236 def get_mutex(self, key):
237 237 u = redis_backend.u
238 238 if self.distributed_lock:
239 239 lock_key = u('_lock_{0}').format(key)
240 240 log.debug('Trying to acquire Redis lock for key %s', lock_key)
241 241 return self.client.lock(lock_key, self.lock_timeout, self.lock_sleep)
242 242 else:
243 243 return None
244 244
245 245
246 246 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
247 247 key_prefix = 'redis_pickle_backend'
248 248 pass
249 249
250 250
251 251 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
252 252 key_prefix = 'redis_msgpack_backend'
253 253 pass
@@ -1,26 +1,26 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import tempfile
20 20
21 21 dogpile_config_defaults = {
22 22 'cache_dir': os.path.join(tempfile.gettempdir(), 'rc_cache')
23 23 }
24 24
25 25 # GLOBAL TO STORE ALL REGISTERED REGIONS
26 26 dogpile_cache_regions = {}
@@ -1,153 +1,153 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import logging
20 20 import functools
21 21 from decorator import decorate
22 22
23 23 from dogpile.cache import CacheRegion
24 24 from dogpile.cache.util import compat
25 25
26 26 from vcsserver.utils import safe_str, sha1
27 27
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31
32 32 class RhodeCodeCacheRegion(CacheRegion):
33 33
34 34 def conditional_cache_on_arguments(
35 35 self, namespace=None,
36 36 expiration_time=None,
37 37 should_cache_fn=None,
38 38 to_str=compat.string_type,
39 39 function_key_generator=None,
40 40 condition=True):
41 41 """
42 42 Custom conditional decorator, that will not touch any dogpile internals if
43 43 condition isn't meet. This works a bit different than should_cache_fn
44 44 And it's faster in cases we don't ever want to compute cached values
45 45 """
46 46 expiration_time_is_callable = compat.callable(expiration_time)
47 47
48 48 if function_key_generator is None:
49 49 function_key_generator = self.function_key_generator
50 50
51 51 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
52 52
53 53 if not condition:
54 54 log.debug('Calling un-cached func:%s', user_func.func_name)
55 55 return user_func(*arg, **kw)
56 56
57 57 key = key_generator(*arg, **kw)
58 58
59 59 timeout = expiration_time() if expiration_time_is_callable \
60 60 else expiration_time
61 61
62 62 log.debug('Calling cached fn:%s', user_func.func_name)
63 63 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
64 64
65 65 def cache_decorator(user_func):
66 66 if to_str is compat.string_type:
67 67 # backwards compatible
68 68 key_generator = function_key_generator(namespace, user_func)
69 69 else:
70 70 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
71 71
72 72 def refresh(*arg, **kw):
73 73 """
74 74 Like invalidate, but regenerates the value instead
75 75 """
76 76 key = key_generator(*arg, **kw)
77 77 value = user_func(*arg, **kw)
78 78 self.set(key, value)
79 79 return value
80 80
81 81 def invalidate(*arg, **kw):
82 82 key = key_generator(*arg, **kw)
83 83 self.delete(key)
84 84
85 85 def set_(value, *arg, **kw):
86 86 key = key_generator(*arg, **kw)
87 87 self.set(key, value)
88 88
89 89 def get(*arg, **kw):
90 90 key = key_generator(*arg, **kw)
91 91 return self.get(key)
92 92
93 93 user_func.set = set_
94 94 user_func.invalidate = invalidate
95 95 user_func.get = get
96 96 user_func.refresh = refresh
97 97 user_func.key_generator = key_generator
98 98 user_func.original = user_func
99 99
100 100 # Use `decorate` to preserve the signature of :param:`user_func`.
101 101
102 102 return decorate(user_func, functools.partial(
103 103 get_or_create_for_user_func, key_generator))
104 104
105 105 return cache_decorator
106 106
107 107
108 108 def make_region(*arg, **kw):
109 109 return RhodeCodeCacheRegion(*arg, **kw)
110 110
111 111
112 112 def get_default_cache_settings(settings, prefixes=None):
113 113 prefixes = prefixes or []
114 114 cache_settings = {}
115 115 for key in settings.keys():
116 116 for prefix in prefixes:
117 117 if key.startswith(prefix):
118 118 name = key.split(prefix)[1].strip()
119 119 val = settings[key]
120 120 if isinstance(val, compat.string_types):
121 121 val = val.strip()
122 122 cache_settings[name] = val
123 123 return cache_settings
124 124
125 125
126 126 def compute_key_from_params(*args):
127 127 """
128 128 Helper to compute key from given params to be used in cache manager
129 129 """
130 130 return sha1("_".join(map(safe_str, args)))
131 131
132 132
133 133 def backend_key_generator(backend):
134 134 """
135 135 Special wrapper that also sends over the backend to the key generator
136 136 """
137 137 def wrapper(namespace, fn):
138 138 return key_generator(backend, namespace, fn)
139 139 return wrapper
140 140
141 141
142 142 def key_generator(backend, namespace, fn):
143 143 fname = fn.__name__
144 144
145 145 def generate_key(*args):
146 146 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
147 147 namespace_pref = namespace or 'default_namespace'
148 148 arg_key = compute_key_from_params(*args)
149 149 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
150 150
151 151 return final_key
152 152
153 153 return generate_key
@@ -1,27 +1,27 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2019 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20
21 21 counter = 0
22 22
23 23
24 24 def get_request_counter(request):
25 25 global counter
26 26 counter += 1
27 27 return counter
@@ -1,386 +1,386 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """Handles the Git smart protocol."""
19 19
20 20 import os
21 21 import socket
22 22 import logging
23 23
24 24 import simplejson as json
25 25 import dulwich.protocol
26 26 from webob import Request, Response, exc
27 27
28 28 from vcsserver import hooks, subprocessio
29 29
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 34 class FileWrapper(object):
35 35 """File wrapper that ensures how much data is read from it."""
36 36
37 37 def __init__(self, fd, content_length):
38 38 self.fd = fd
39 39 self.content_length = content_length
40 40 self.remain = content_length
41 41
42 42 def read(self, size):
43 43 if size <= self.remain:
44 44 try:
45 45 data = self.fd.read(size)
46 46 except socket.error:
47 47 raise IOError(self)
48 48 self.remain -= size
49 49 elif self.remain:
50 50 data = self.fd.read(self.remain)
51 51 self.remain = 0
52 52 else:
53 53 data = None
54 54 return data
55 55
56 56 def __repr__(self):
57 57 return '<FileWrapper %s len: %s, read: %s>' % (
58 58 self.fd, self.content_length, self.content_length - self.remain
59 59 )
60 60
61 61
62 62 class GitRepository(object):
63 63 """WSGI app for handling Git smart protocol endpoints."""
64 64
65 65 git_folder_signature = frozenset(
66 66 ('config', 'head', 'info', 'objects', 'refs'))
67 67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 68 valid_accepts = frozenset(('application/x-%s-result' %
69 69 c for c in commands))
70 70
71 71 # The last bytes are the SHA1 of the first 12 bytes.
72 72 EMPTY_PACK = (
73 73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
74 74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 75 )
76 76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
77 77
78 78 def __init__(self, repo_name, content_path, git_path, update_server_info,
79 79 extras):
80 80 files = frozenset(f.lower() for f in os.listdir(content_path))
81 81 valid_dir_signature = self.git_folder_signature.issubset(files)
82 82
83 83 if not valid_dir_signature:
84 84 raise OSError('%s missing git signature' % content_path)
85 85
86 86 self.content_path = content_path
87 87 self.repo_name = repo_name
88 88 self.extras = extras
89 89 self.git_path = git_path
90 90 self.update_server_info = update_server_info
91 91
92 92 def _get_fixedpath(self, path):
93 93 """
94 94 Small fix for repo_path
95 95
96 96 :param path:
97 97 """
98 98 path = path.split(self.repo_name, 1)[-1]
99 99 if path.startswith('.git'):
100 100 # for bare repos we still get the .git prefix inside, we skip it
101 101 # here, and remove from the service command
102 102 path = path[4:]
103 103
104 104 return path.strip('/')
105 105
106 106 def inforefs(self, request, unused_environ):
107 107 """
108 108 WSGI Response producer for HTTP GET Git Smart
109 109 HTTP /info/refs request.
110 110 """
111 111
112 112 git_command = request.GET.get('service')
113 113 if git_command not in self.commands:
114 114 log.debug('command %s not allowed', git_command)
115 115 return exc.HTTPForbidden()
116 116
117 117 # please, resist the urge to add '\n' to git capture and increment
118 118 # line count by 1.
119 119 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
120 120 # a part of protocol.
121 121 # The code in Git client not only does NOT need '\n', but actually
122 122 # blows up if you sprinkle "flush" (0000) as "0001\n".
123 123 # It reads binary, per number of bytes specified.
124 124 # if you do add '\n' as part of data, count it.
125 125 server_advert = '# service=%s\n' % git_command
126 126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
127 127 try:
128 128 gitenv = dict(os.environ)
129 129 # forget all configs
130 130 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
131 131 command = [self.git_path, git_command[4:], '--stateless-rpc',
132 132 '--advertise-refs', self.content_path]
133 133 out = subprocessio.SubprocessIOChunker(
134 134 command,
135 135 env=gitenv,
136 136 starting_values=[packet_len + server_advert + '0000'],
137 137 shell=False
138 138 )
139 139 except EnvironmentError:
140 140 log.exception('Error processing command')
141 141 raise exc.HTTPExpectationFailed()
142 142
143 143 resp = Response()
144 144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
145 145 resp.charset = None
146 146 resp.app_iter = out
147 147
148 148 return resp
149 149
150 150 def _get_want_capabilities(self, request):
151 151 """Read the capabilities found in the first want line of the request."""
152 152 pos = request.body_file_seekable.tell()
153 153 first_line = request.body_file_seekable.readline()
154 154 request.body_file_seekable.seek(pos)
155 155
156 156 return frozenset(
157 157 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
158 158
159 159 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
160 160 """
161 161 Construct a response with an empty PACK file.
162 162
163 163 We use an empty PACK file, as that would trigger the failure of the pull
164 164 or clone command.
165 165
166 166 We also print in the error output a message explaining why the command
167 167 was aborted.
168 168
169 169 If aditionally, the user is accepting messages we send them the output
170 170 of the pre-pull hook.
171 171
172 172 Note that for clients not supporting side-band we just send them the
173 173 emtpy PACK file.
174 174 """
175 175 if self.SIDE_BAND_CAPS.intersection(capabilities):
176 176 response = []
177 177 proto = dulwich.protocol.Protocol(None, response.append)
178 178 proto.write_pkt_line('NAK\n')
179 179 self._write_sideband_to_proto(pre_pull_messages, proto,
180 180 capabilities)
181 181 # N.B.(skreft): Do not change the sideband channel to 3, as that
182 182 # produces a fatal error in the client:
183 183 # fatal: error in sideband demultiplexer
184 184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
185 185 proto.write_sideband(1, self.EMPTY_PACK)
186 186
187 187 # writes 0000
188 188 proto.write_pkt_line(None)
189 189
190 190 return response
191 191 else:
192 192 return [self.EMPTY_PACK]
193 193
194 194 def _write_sideband_to_proto(self, data, proto, capabilities):
195 195 """
196 196 Write the data to the proto's sideband number 2.
197 197
198 198 We do not use dulwich's write_sideband directly as it only supports
199 199 side-band-64k.
200 200 """
201 201 if not data:
202 202 return
203 203
204 204 # N.B.(skreft): The values below are explained in the pack protocol
205 205 # documentation, section Packfile Data.
206 206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
207 207 if 'side-band-64k' in capabilities:
208 208 chunk_size = 65515
209 209 elif 'side-band' in capabilities:
210 210 chunk_size = 995
211 211 else:
212 212 return
213 213
214 214 chunker = (
215 215 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
216 216
217 217 for chunk in chunker:
218 218 proto.write_sideband(2, chunk)
219 219
220 220 def _get_messages(self, data, capabilities):
221 221 """Return a list with packets for sending data in sideband number 2."""
222 222 response = []
223 223 proto = dulwich.protocol.Protocol(None, response.append)
224 224
225 225 self._write_sideband_to_proto(data, proto, capabilities)
226 226
227 227 return response
228 228
229 229 def _inject_messages_to_response(self, response, capabilities,
230 230 start_messages, end_messages):
231 231 """
232 232 Given a list response we inject the pre/post-pull messages.
233 233
234 234 We only inject the messages if the client supports sideband, and the
235 235 response has the format:
236 236 0008NAK\n...0000
237 237
238 238 Note that we do not check the no-progress capability as by default, git
239 239 sends it, which effectively would block all messages.
240 240 """
241 241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
242 242 return response
243 243
244 244 if not start_messages and not end_messages:
245 245 return response
246 246
247 247 # make a list out of response if it's an iterator
248 248 # so we can investigate it for message injection.
249 249 if hasattr(response, '__iter__'):
250 250 response = list(response)
251 251
252 252 if (not response[0].startswith('0008NAK\n') or
253 253 not response[-1].endswith('0000')):
254 254 return response
255 255
256 256 new_response = ['0008NAK\n']
257 257 new_response.extend(self._get_messages(start_messages, capabilities))
258 258 if len(response) == 1:
259 259 new_response.append(response[0][8:-4])
260 260 else:
261 261 new_response.append(response[0][8:])
262 262 new_response.extend(response[1:-1])
263 263 new_response.append(response[-1][:-4])
264 264 new_response.extend(self._get_messages(end_messages, capabilities))
265 265 new_response.append('0000')
266 266
267 267 return new_response
268 268
269 269 def backend(self, request, environ):
270 270 """
271 271 WSGI Response producer for HTTP POST Git Smart HTTP requests.
272 272 Reads commands and data from HTTP POST's body.
273 273 returns an iterator obj with contents of git command's
274 274 response to stdout
275 275 """
276 276 # TODO(skreft): think how we could detect an HTTPLockedException, as
277 277 # we probably want to have the same mechanism used by mercurial and
278 278 # simplevcs.
279 279 # For that we would need to parse the output of the command looking for
280 280 # some signs of the HTTPLockedError, parse the data and reraise it in
281 281 # pygrack. However, that would interfere with the streaming.
282 282 #
283 283 # Now the output of a blocked push is:
284 284 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
285 285 # POST git-receive-pack (1047 bytes)
286 286 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
287 287 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
288 288 # ! [remote rejected] master -> master (pre-receive hook declined)
289 289 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
290 290
291 291 git_command = self._get_fixedpath(request.path_info)
292 292 if git_command not in self.commands:
293 293 log.debug('command %s not allowed', git_command)
294 294 return exc.HTTPForbidden()
295 295
296 296 capabilities = None
297 297 if git_command == 'git-upload-pack':
298 298 capabilities = self._get_want_capabilities(request)
299 299
300 300 if 'CONTENT_LENGTH' in environ:
301 301 inputstream = FileWrapper(request.body_file_seekable,
302 302 request.content_length)
303 303 else:
304 304 inputstream = request.body_file_seekable
305 305
306 306 resp = Response()
307 307 resp.content_type = ('application/x-%s-result' %
308 308 git_command.encode('utf8'))
309 309 resp.charset = None
310 310
311 311 pre_pull_messages = ''
312 312 if git_command == 'git-upload-pack':
313 313 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
314 314 if status != 0:
315 315 resp.app_iter = self._build_failed_pre_pull_response(
316 316 capabilities, pre_pull_messages)
317 317 return resp
318 318
319 319 gitenv = dict(os.environ)
320 320 # forget all configs
321 321 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
322 322 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
323 323 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
324 324 self.content_path]
325 325 log.debug('handling cmd %s', cmd)
326 326
327 327 out = subprocessio.SubprocessIOChunker(
328 328 cmd,
329 329 inputstream=inputstream,
330 330 env=gitenv,
331 331 cwd=self.content_path,
332 332 shell=False,
333 333 fail_on_stderr=False,
334 334 fail_on_return_code=False
335 335 )
336 336
337 337 if self.update_server_info and git_command == 'git-receive-pack':
338 338 # We need to fully consume the iterator here, as the
339 339 # update-server-info command needs to be run after the push.
340 340 out = list(out)
341 341
342 342 # Updating refs manually after each push.
343 343 # This is required as some clients are exposing Git repos internally
344 344 # with the dumb protocol.
345 345 cmd = [self.git_path, 'update-server-info']
346 346 log.debug('handling cmd %s', cmd)
347 347 output = subprocessio.SubprocessIOChunker(
348 348 cmd,
349 349 inputstream=inputstream,
350 350 env=gitenv,
351 351 cwd=self.content_path,
352 352 shell=False,
353 353 fail_on_stderr=False,
354 354 fail_on_return_code=False
355 355 )
356 356 # Consume all the output so the subprocess finishes
357 357 for _ in output:
358 358 pass
359 359
360 360 if git_command == 'git-upload-pack':
361 361 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
362 362 resp.app_iter = self._inject_messages_to_response(
363 363 out, capabilities, pre_pull_messages, post_pull_messages)
364 364 else:
365 365 resp.app_iter = out
366 366
367 367 return resp
368 368
369 369 def __call__(self, environ, start_response):
370 370 request = Request(environ)
371 371 _path = self._get_fixedpath(request.path_info)
372 372 if _path.startswith('info/refs'):
373 373 app = self.inforefs
374 374 else:
375 375 app = self.backend
376 376
377 377 try:
378 378 resp = app(request, environ)
379 379 except exc.HTTPException as error:
380 380 log.exception('HTTP Error')
381 381 resp = error
382 382 except Exception:
383 383 log.exception('Unknown error')
384 384 resp = exc.HTTPInternalServerError()
385 385
386 386 return resp(environ, start_response)
@@ -1,34 +1,34 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from vcsserver import scm_app, wsgi_app_caller
19 19
20 20
21 21 class GitRemoteWsgi(object):
22 22 def handle(self, environ, input_data, *args, **kwargs):
23 23 app = wsgi_app_caller.WSGIAppCaller(
24 24 scm_app.create_git_wsgi_app(*args, **kwargs))
25 25
26 26 return app.handle(environ, input_data)
27 27
28 28
29 29 class HgRemoteWsgi(object):
30 30 def handle(self, environ, input_data, *args, **kwargs):
31 31 app = wsgi_app_caller.WSGIAppCaller(
32 32 scm_app.create_hg_wsgi_app(*args, **kwargs))
33 33
34 34 return app.handle(environ, input_data)
@@ -1,235 +1,235 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import logging
20 20 import itertools
21 21
22 22 import mercurial
23 23 import mercurial.error
24 24 import mercurial.wireprotoserver
25 25 import mercurial.hgweb.common
26 26 import mercurial.hgweb.hgweb_mod
27 27 import webob.exc
28 28
29 29 from vcsserver import pygrack, exceptions, settings, git_lfs
30 30
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 # propagated from mercurial documentation
36 36 HG_UI_SECTIONS = [
37 37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 40 ]
41 41
42 42
43 43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 44 """Extension of hgweb that simplifies some functions."""
45 45
46 46 def _get_view(self, repo):
47 47 """Views are not supported."""
48 48 return repo
49 49
50 50 def loadsubweb(self):
51 51 """The result is only used in the templater method which is not used."""
52 52 return None
53 53
54 54 def run(self):
55 55 """Unused function so raise an exception if accidentally called."""
56 56 raise NotImplementedError
57 57
58 58 def templater(self, req):
59 59 """Function used in an unreachable code path.
60 60
61 61 This code is unreachable because we guarantee that the HTTP request,
62 62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 63 never going to get a user-visible url.
64 64 """
65 65 raise NotImplementedError
66 66
67 67 def archivelist(self, nodeid):
68 68 """Unused function so raise an exception if accidentally called."""
69 69 raise NotImplementedError
70 70
71 71 def __call__(self, environ, start_response):
72 72 """Run the WSGI application.
73 73
74 74 This may be called by multiple threads.
75 75 """
76 76 from mercurial.hgweb import request as requestmod
77 77 req = requestmod.parserequestfromenv(environ)
78 78 res = requestmod.wsgiresponse(req, start_response)
79 79 gen = self.run_wsgi(req, res)
80 80
81 81 first_chunk = None
82 82
83 83 try:
84 84 data = gen.next()
85 85
86 86 def first_chunk():
87 87 yield data
88 88 except StopIteration:
89 89 pass
90 90
91 91 if first_chunk:
92 92 return itertools.chain(first_chunk(), gen)
93 93 return gen
94 94
95 95 def _runwsgi(self, req, res, repo):
96 96
97 97 cmd = req.qsparams.get('cmd', '')
98 98 if not mercurial.wireprotoserver.iscmd(cmd):
99 99 # NOTE(marcink): for unsupported commands, we return bad request
100 100 # internally from HG
101 101 from mercurial.hgweb.common import statusmessage
102 102 res.status = statusmessage(mercurial.hgweb.common.HTTP_BAD_REQUEST)
103 103 res.setbodybytes('')
104 104 return res.sendresponse()
105 105
106 106 return super(HgWeb, self)._runwsgi(req, res, repo)
107 107
108 108
109 109 def make_hg_ui_from_config(repo_config):
110 110 baseui = mercurial.ui.ui()
111 111
112 112 # clean the baseui object
113 113 baseui._ocfg = mercurial.config.config()
114 114 baseui._ucfg = mercurial.config.config()
115 115 baseui._tcfg = mercurial.config.config()
116 116
117 117 for section, option, value in repo_config:
118 118 baseui.setconfig(section, option, value)
119 119
120 120 # make our hgweb quiet so it doesn't print output
121 121 baseui.setconfig('ui', 'quiet', 'true')
122 122
123 123 return baseui
124 124
125 125
126 126 def update_hg_ui_from_hgrc(baseui, repo_path):
127 127 path = os.path.join(repo_path, '.hg', 'hgrc')
128 128
129 129 if not os.path.isfile(path):
130 130 log.debug('hgrc file is not present at %s, skipping...', path)
131 131 return
132 132 log.debug('reading hgrc from %s', path)
133 133 cfg = mercurial.config.config()
134 134 cfg.read(path)
135 135 for section in HG_UI_SECTIONS:
136 136 for k, v in cfg.items(section):
137 137 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
138 138 baseui.setconfig(section, k, v)
139 139
140 140
141 141 def create_hg_wsgi_app(repo_path, repo_name, config):
142 142 """
143 143 Prepares a WSGI application to handle Mercurial requests.
144 144
145 145 :param config: is a list of 3-item tuples representing a ConfigObject
146 146 (it is the serialized version of the config object).
147 147 """
148 148 log.debug("Creating Mercurial WSGI application")
149 149
150 150 baseui = make_hg_ui_from_config(config)
151 151 update_hg_ui_from_hgrc(baseui, repo_path)
152 152
153 153 try:
154 154 return HgWeb(repo_path, name=repo_name, baseui=baseui)
155 155 except mercurial.error.RequirementError as e:
156 156 raise exceptions.RequirementException(e)(e)
157 157
158 158
159 159 class GitHandler(object):
160 160 """
161 161 Handler for Git operations like push/pull etc
162 162 """
163 163 def __init__(self, repo_location, repo_name, git_path, update_server_info,
164 164 extras):
165 165 if not os.path.isdir(repo_location):
166 166 raise OSError(repo_location)
167 167 self.content_path = repo_location
168 168 self.repo_name = repo_name
169 169 self.repo_location = repo_location
170 170 self.extras = extras
171 171 self.git_path = git_path
172 172 self.update_server_info = update_server_info
173 173
174 174 def __call__(self, environ, start_response):
175 175 app = webob.exc.HTTPNotFound()
176 176 candidate_paths = (
177 177 self.content_path, os.path.join(self.content_path, '.git'))
178 178
179 179 for content_path in candidate_paths:
180 180 try:
181 181 app = pygrack.GitRepository(
182 182 self.repo_name, content_path, self.git_path,
183 183 self.update_server_info, self.extras)
184 184 break
185 185 except OSError:
186 186 continue
187 187
188 188 return app(environ, start_response)
189 189
190 190
191 191 def create_git_wsgi_app(repo_path, repo_name, config):
192 192 """
193 193 Creates a WSGI application to handle Git requests.
194 194
195 195 :param config: is a dictionary holding the extras.
196 196 """
197 197 git_path = settings.GIT_EXECUTABLE
198 198 update_server_info = config.pop('git_update_server_info')
199 199 app = GitHandler(
200 200 repo_path, repo_name, git_path, update_server_info, config)
201 201
202 202 return app
203 203
204 204
205 205 class GitLFSHandler(object):
206 206 """
207 207 Handler for Git LFS operations
208 208 """
209 209
210 210 def __init__(self, repo_location, repo_name, git_path, update_server_info,
211 211 extras):
212 212 if not os.path.isdir(repo_location):
213 213 raise OSError(repo_location)
214 214 self.content_path = repo_location
215 215 self.repo_name = repo_name
216 216 self.repo_location = repo_location
217 217 self.extras = extras
218 218 self.git_path = git_path
219 219 self.update_server_info = update_server_info
220 220
221 221 def get_app(self, git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme):
222 222 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
223 223 return app
224 224
225 225
226 226 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
227 227 git_path = settings.GIT_EXECUTABLE
228 228 update_server_info = config.pop('git_update_server_info')
229 229 git_lfs_enabled = config.pop('git_lfs_enabled')
230 230 git_lfs_store_path = config.pop('git_lfs_store_path')
231 231 git_lfs_http_scheme = config.pop('git_lfs_http_scheme', 'http')
232 232 app = GitLFSHandler(
233 233 repo_path, repo_name, git_path, update_server_info, config)
234 234
235 235 return app.get_app(git_lfs_enabled, git_lfs_store_path, git_lfs_http_scheme)
@@ -1,78 +1,78 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import gc
19 19 import logging
20 20 import os
21 21 import time
22 22
23 23
24 24 log = logging.getLogger(__name__)
25 25
26 26
27 27 class VcsServer(object):
28 28 """
29 29 Exposed remote interface of the vcsserver itself.
30 30
31 31 This object can be used to manage the server remotely. Right now the main
32 32 use case is to allow to shut down the server.
33 33 """
34 34
35 35 _shutdown = False
36 36
37 37 def shutdown(self):
38 38 self._shutdown = True
39 39
40 40 def ping(self):
41 41 """
42 42 Utility to probe a server connection.
43 43 """
44 44 log.debug("Received server ping.")
45 45
46 46 def echo(self, data):
47 47 """
48 48 Utility for performance testing.
49 49
50 50 Allows to pass in arbitrary data and will return this data.
51 51 """
52 52 log.debug("Received server echo.")
53 53 return data
54 54
55 55 def sleep(self, seconds):
56 56 """
57 57 Utility to simulate long running server interaction.
58 58 """
59 59 log.debug("Sleeping %s seconds", seconds)
60 60 time.sleep(seconds)
61 61
62 62 def get_pid(self):
63 63 """
64 64 Allows to discover the PID based on a proxy object.
65 65 """
66 66 return os.getpid()
67 67
68 68 def run_gc(self):
69 69 """
70 70 Allows to trigger the garbage collector.
71 71
72 72 Main intention is to support statistics gathering during test runs.
73 73 """
74 74 freed_objects = gc.collect()
75 75 return {
76 76 'freed_objects': freed_objects,
77 77 'garbage': len(gc.garbage),
78 78 }
@@ -1,22 +1,22 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 WIRE_ENCODING = 'UTF-8'
19 19 GIT_EXECUTABLE = 'git'
20 20 SVN_EXECUTABLE = 'svn'
21 21 SVNLOOK_EXECUTABLE = 'svnlook'
22 22 BINARY_DIR = ''
@@ -1,799 +1,791 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 import subprocess
22 22 from urllib2 import URLError
23 23 import urlparse
24 24 import logging
25 25 import posixpath as vcspath
26 26 import StringIO
27 27 import urllib
28 28 import traceback
29 29
30 30 import svn.client
31 31 import svn.core
32 32 import svn.delta
33 33 import svn.diff
34 34 import svn.fs
35 35 import svn.repos
36 36
37 37 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 38 from vcsserver.base import RepoFactory, raise_from_original
39 39 from vcsserver.vcs_base import RemoteBase
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 # Set of svn compatible version flags.
45 # Compare with subversion/svnadmin/svnadmin.c
46 svn_compatible_versions = {
47 'pre-1.4-compatible',
48 'pre-1.5-compatible',
49 'pre-1.6-compatible',
50 'pre-1.8-compatible',
51 'pre-1.9-compatible'
52 }
53
54 44 svn_compatible_versions_map = {
55 45 'pre-1.4-compatible': '1.3',
56 46 'pre-1.5-compatible': '1.4',
57 47 'pre-1.6-compatible': '1.5',
58 48 'pre-1.8-compatible': '1.7',
59 49 'pre-1.9-compatible': '1.8',
60 50 }
61 51
52 current_compatible_version = '1.12'
53
62 54
63 55 def reraise_safe_exceptions(func):
64 56 """Decorator for converting svn exceptions to something neutral."""
65 57 def wrapper(*args, **kwargs):
66 58 try:
67 59 return func(*args, **kwargs)
68 60 except Exception as e:
69 61 if not hasattr(e, '_vcs_kind'):
70 62 log.exception("Unhandled exception in svn remote call")
71 63 raise_from_original(exceptions.UnhandledException(e))
72 64 raise
73 65 return wrapper
74 66
75 67
76 68 class SubversionFactory(RepoFactory):
77 69 repo_type = 'svn'
78 70
79 71 def _create_repo(self, wire, create, compatible_version):
80 72 path = svn.core.svn_path_canonicalize(wire['path'])
81 73 if create:
82 fs_config = {'compatible-version': '1.9'}
74 fs_config = {'compatible-version': current_compatible_version}
83 75 if compatible_version:
84 if compatible_version not in svn_compatible_versions:
85 raise Exception('Unknown SVN compatible version "{}"'
86 .format(compatible_version))
87 fs_config['compatible-version'] = \
88 svn_compatible_versions_map[compatible_version]
76
77 compatible_version_string = \
78 svn_compatible_versions_map.get(compatible_version) \
79 or compatible_version
80 fs_config['compatible-version'] = compatible_version_string
89 81
90 82 log.debug('Create SVN repo with config "%s"', fs_config)
91 83 repo = svn.repos.create(path, "", "", None, fs_config)
92 84 else:
93 85 repo = svn.repos.open(path)
94 86
95 87 log.debug('Got SVN object: %s', repo)
96 88 return repo
97 89
98 90 def repo(self, wire, create=False, compatible_version=None):
99 91 """
100 92 Get a repository instance for the given path.
101 93 """
102 94 return self._create_repo(wire, create, compatible_version)
103 95
104 96
105 97 NODE_TYPE_MAPPING = {
106 98 svn.core.svn_node_file: 'file',
107 99 svn.core.svn_node_dir: 'dir',
108 100 }
109 101
110 102
111 103 class SvnRemote(RemoteBase):
112 104
113 105 def __init__(self, factory, hg_factory=None):
114 106 self._factory = factory
115 107 # TODO: Remove once we do not use internal Mercurial objects anymore
116 108 # for subversion
117 109 self._hg_factory = hg_factory
118 110
119 111 @reraise_safe_exceptions
120 112 def discover_svn_version(self):
121 113 try:
122 114 import svn.core
123 115 svn_ver = svn.core.SVN_VERSION
124 116 except ImportError:
125 117 svn_ver = None
126 118 return svn_ver
127 119
128 120 @reraise_safe_exceptions
129 121 def is_empty(self, wire):
130 122
131 123 try:
132 124 return self.lookup(wire, -1) == 0
133 125 except Exception:
134 126 log.exception("failed to read object_store")
135 127 return False
136 128
137 129 def check_url(self, url, config_items):
138 130 # this can throw exception if not installed, but we detect this
139 131 from hgsubversion import svnrepo
140 132
141 133 baseui = self._hg_factory._create_config(config_items)
142 134 # uuid function get's only valid UUID from proper repo, else
143 135 # throws exception
144 136 try:
145 137 svnrepo.svnremoterepo(baseui, url).svn.uuid
146 138 except Exception:
147 139 tb = traceback.format_exc()
148 140 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
149 141 raise URLError(
150 142 '"%s" is not a valid Subversion source url.' % (url, ))
151 143 return True
152 144
153 145 def is_path_valid_repository(self, wire, path):
154 146
155 147 # NOTE(marcink): short circuit the check for SVN repo
156 148 # the repos.open might be expensive to check, but we have one cheap
157 149 # pre condition that we can use, to check for 'format' file
158 150
159 151 if not os.path.isfile(os.path.join(path, 'format')):
160 152 return False
161 153
162 154 try:
163 155 svn.repos.open(path)
164 156 except svn.core.SubversionException:
165 157 tb = traceback.format_exc()
166 158 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
167 159 return False
168 160 return True
169 161
170 162 @reraise_safe_exceptions
171 163 def verify(self, wire,):
172 164 repo_path = wire['path']
173 165 if not self.is_path_valid_repository(wire, repo_path):
174 166 raise Exception(
175 167 "Path %s is not a valid Subversion repository." % repo_path)
176 168
177 169 cmd = ['svnadmin', 'info', repo_path]
178 170 stdout, stderr = subprocessio.run_command(cmd)
179 171 return stdout
180 172
181 173 def lookup(self, wire, revision):
182 174 if revision not in [-1, None, 'HEAD']:
183 175 raise NotImplementedError
184 176 repo = self._factory.repo(wire)
185 177 fs_ptr = svn.repos.fs(repo)
186 178 head = svn.fs.youngest_rev(fs_ptr)
187 179 return head
188 180
189 181 def lookup_interval(self, wire, start_ts, end_ts):
190 182 repo = self._factory.repo(wire)
191 183 fsobj = svn.repos.fs(repo)
192 184 start_rev = None
193 185 end_rev = None
194 186 if start_ts:
195 187 start_ts_svn = apr_time_t(start_ts)
196 188 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
197 189 else:
198 190 start_rev = 1
199 191 if end_ts:
200 192 end_ts_svn = apr_time_t(end_ts)
201 193 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
202 194 else:
203 195 end_rev = svn.fs.youngest_rev(fsobj)
204 196 return start_rev, end_rev
205 197
206 198 def revision_properties(self, wire, revision):
207 199
208 200 cache_on, context_uid, repo_id = self._cache_on(wire)
209 201 @self.region.conditional_cache_on_arguments(condition=cache_on)
210 202 def _revision_properties(_repo_id, _revision):
211 203 repo = self._factory.repo(wire)
212 204 fs_ptr = svn.repos.fs(repo)
213 205 return svn.fs.revision_proplist(fs_ptr, revision)
214 206 return _revision_properties(repo_id, revision)
215 207
216 208 def revision_changes(self, wire, revision):
217 209
218 210 repo = self._factory.repo(wire)
219 211 fsobj = svn.repos.fs(repo)
220 212 rev_root = svn.fs.revision_root(fsobj, revision)
221 213
222 214 editor = svn.repos.ChangeCollector(fsobj, rev_root)
223 215 editor_ptr, editor_baton = svn.delta.make_editor(editor)
224 216 base_dir = ""
225 217 send_deltas = False
226 218 svn.repos.replay2(
227 219 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
228 220 editor_ptr, editor_baton, None)
229 221
230 222 added = []
231 223 changed = []
232 224 removed = []
233 225
234 226 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
235 227 for path, change in editor.changes.iteritems():
236 228 # TODO: Decide what to do with directory nodes. Subversion can add
237 229 # empty directories.
238 230
239 231 if change.item_kind == svn.core.svn_node_dir:
240 232 continue
241 233 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
242 234 added.append(path)
243 235 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
244 236 svn.repos.CHANGE_ACTION_REPLACE]:
245 237 changed.append(path)
246 238 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
247 239 removed.append(path)
248 240 else:
249 241 raise NotImplementedError(
250 242 "Action %s not supported on path %s" % (
251 243 change.action, path))
252 244
253 245 changes = {
254 246 'added': added,
255 247 'changed': changed,
256 248 'removed': removed,
257 249 }
258 250 return changes
259 251
260 252 @reraise_safe_exceptions
261 253 def node_history(self, wire, path, revision, limit):
262 254 cache_on, context_uid, repo_id = self._cache_on(wire)
263 255 @self.region.conditional_cache_on_arguments(condition=cache_on)
264 256 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
265 257 cross_copies = False
266 258 repo = self._factory.repo(wire)
267 259 fsobj = svn.repos.fs(repo)
268 260 rev_root = svn.fs.revision_root(fsobj, revision)
269 261
270 262 history_revisions = []
271 263 history = svn.fs.node_history(rev_root, path)
272 264 history = svn.fs.history_prev(history, cross_copies)
273 265 while history:
274 266 __, node_revision = svn.fs.history_location(history)
275 267 history_revisions.append(node_revision)
276 268 if limit and len(history_revisions) >= limit:
277 269 break
278 270 history = svn.fs.history_prev(history, cross_copies)
279 271 return history_revisions
280 272 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
281 273
282 274 def node_properties(self, wire, path, revision):
283 275 cache_on, context_uid, repo_id = self._cache_on(wire)
284 276 @self.region.conditional_cache_on_arguments(condition=cache_on)
285 277 def _node_properties(_repo_id, _path, _revision):
286 278 repo = self._factory.repo(wire)
287 279 fsobj = svn.repos.fs(repo)
288 280 rev_root = svn.fs.revision_root(fsobj, revision)
289 281 return svn.fs.node_proplist(rev_root, path)
290 282 return _node_properties(repo_id, path, revision)
291 283
292 284 def file_annotate(self, wire, path, revision):
293 285 abs_path = 'file://' + urllib.pathname2url(
294 286 vcspath.join(wire['path'], path))
295 287 file_uri = svn.core.svn_path_canonicalize(abs_path)
296 288
297 289 start_rev = svn_opt_revision_value_t(0)
298 290 peg_rev = svn_opt_revision_value_t(revision)
299 291 end_rev = peg_rev
300 292
301 293 annotations = []
302 294
303 295 def receiver(line_no, revision, author, date, line, pool):
304 296 annotations.append((line_no, revision, line))
305 297
306 298 # TODO: Cannot use blame5, missing typemap function in the swig code
307 299 try:
308 300 svn.client.blame2(
309 301 file_uri, peg_rev, start_rev, end_rev,
310 302 receiver, svn.client.create_context())
311 303 except svn.core.SubversionException as exc:
312 304 log.exception("Error during blame operation.")
313 305 raise Exception(
314 306 "Blame not supported or file does not exist at path %s. "
315 307 "Error %s." % (path, exc))
316 308
317 309 return annotations
318 310
319 311 def get_node_type(self, wire, path, revision=None):
320 312
321 313 cache_on, context_uid, repo_id = self._cache_on(wire)
322 314 @self.region.conditional_cache_on_arguments(condition=cache_on)
323 315 def _get_node_type(_repo_id, _path, _revision):
324 316 repo = self._factory.repo(wire)
325 317 fs_ptr = svn.repos.fs(repo)
326 318 if _revision is None:
327 319 _revision = svn.fs.youngest_rev(fs_ptr)
328 320 root = svn.fs.revision_root(fs_ptr, _revision)
329 321 node = svn.fs.check_path(root, path)
330 322 return NODE_TYPE_MAPPING.get(node, None)
331 323 return _get_node_type(repo_id, path, revision)
332 324
333 325 def get_nodes(self, wire, path, revision=None):
334 326
335 327 cache_on, context_uid, repo_id = self._cache_on(wire)
336 328 @self.region.conditional_cache_on_arguments(condition=cache_on)
337 329 def _get_nodes(_repo_id, _path, _revision):
338 330 repo = self._factory.repo(wire)
339 331 fsobj = svn.repos.fs(repo)
340 332 if _revision is None:
341 333 _revision = svn.fs.youngest_rev(fsobj)
342 334 root = svn.fs.revision_root(fsobj, _revision)
343 335 entries = svn.fs.dir_entries(root, path)
344 336 result = []
345 337 for entry_path, entry_info in entries.iteritems():
346 338 result.append(
347 339 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
348 340 return result
349 341 return _get_nodes(repo_id, path, revision)
350 342
351 343 def get_file_content(self, wire, path, rev=None):
352 344 repo = self._factory.repo(wire)
353 345 fsobj = svn.repos.fs(repo)
354 346 if rev is None:
355 347 rev = svn.fs.youngest_revision(fsobj)
356 348 root = svn.fs.revision_root(fsobj, rev)
357 349 content = svn.core.Stream(svn.fs.file_contents(root, path))
358 350 return content.read()
359 351
360 352 def get_file_size(self, wire, path, revision=None):
361 353
362 354 cache_on, context_uid, repo_id = self._cache_on(wire)
363 355 @self.region.conditional_cache_on_arguments(condition=cache_on)
364 356 def _get_file_size(_repo_id, _path, _revision):
365 357 repo = self._factory.repo(wire)
366 358 fsobj = svn.repos.fs(repo)
367 359 if _revision is None:
368 360 _revision = svn.fs.youngest_revision(fsobj)
369 361 root = svn.fs.revision_root(fsobj, _revision)
370 362 size = svn.fs.file_length(root, path)
371 363 return size
372 364 return _get_file_size(repo_id, path, revision)
373 365
374 366 def create_repository(self, wire, compatible_version=None):
375 367 log.info('Creating Subversion repository in path "%s"', wire['path'])
376 368 self._factory.repo(wire, create=True,
377 369 compatible_version=compatible_version)
378 370
379 371 def get_url_and_credentials(self, src_url):
380 372 obj = urlparse.urlparse(src_url)
381 373 username = obj.username or None
382 374 password = obj.password or None
383 375 return username, password, src_url
384 376
385 377 def import_remote_repository(self, wire, src_url):
386 378 repo_path = wire['path']
387 379 if not self.is_path_valid_repository(wire, repo_path):
388 380 raise Exception(
389 381 "Path %s is not a valid Subversion repository." % repo_path)
390 382
391 383 username, password, src_url = self.get_url_and_credentials(src_url)
392 384 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
393 385 '--trust-server-cert-failures=unknown-ca']
394 386 if username and password:
395 387 rdump_cmd += ['--username', username, '--password', password]
396 388 rdump_cmd += [src_url]
397 389
398 390 rdump = subprocess.Popen(
399 391 rdump_cmd,
400 392 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
401 393 load = subprocess.Popen(
402 394 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
403 395
404 396 # TODO: johbo: This can be a very long operation, might be better
405 397 # to track some kind of status and provide an api to check if the
406 398 # import is done.
407 399 rdump.wait()
408 400 load.wait()
409 401
410 402 log.debug('Return process ended with code: %s', rdump.returncode)
411 403 if rdump.returncode != 0:
412 404 errors = rdump.stderr.read()
413 405 log.error('svnrdump dump failed: statuscode %s: message: %s',
414 406 rdump.returncode, errors)
415 407 reason = 'UNKNOWN'
416 408 if 'svnrdump: E230001:' in errors:
417 409 reason = 'INVALID_CERTIFICATE'
418 410
419 411 if reason == 'UNKNOWN':
420 412 reason = 'UNKNOWN:{}'.format(errors)
421 413 raise Exception(
422 414 'Failed to dump the remote repository from %s. Reason:%s' % (
423 415 src_url, reason))
424 416 if load.returncode != 0:
425 417 raise Exception(
426 418 'Failed to load the dump of remote repository from %s.' %
427 419 (src_url, ))
428 420
429 421 def commit(self, wire, message, author, timestamp, updated, removed):
430 422 assert isinstance(message, str)
431 423 assert isinstance(author, str)
432 424
433 425 repo = self._factory.repo(wire)
434 426 fsobj = svn.repos.fs(repo)
435 427
436 428 rev = svn.fs.youngest_rev(fsobj)
437 429 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
438 430 txn_root = svn.fs.txn_root(txn)
439 431
440 432 for node in updated:
441 433 TxnNodeProcessor(node, txn_root).update()
442 434 for node in removed:
443 435 TxnNodeProcessor(node, txn_root).remove()
444 436
445 437 commit_id = svn.repos.fs_commit_txn(repo, txn)
446 438
447 439 if timestamp:
448 440 apr_time = apr_time_t(timestamp)
449 441 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
450 442 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
451 443
452 444 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
453 445 return commit_id
454 446
455 447 def diff(self, wire, rev1, rev2, path1=None, path2=None,
456 448 ignore_whitespace=False, context=3):
457 449
458 450 wire.update(cache=False)
459 451 repo = self._factory.repo(wire)
460 452 diff_creator = SvnDiffer(
461 453 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
462 454 try:
463 455 return diff_creator.generate_diff()
464 456 except svn.core.SubversionException as e:
465 457 log.exception(
466 458 "Error during diff operation operation. "
467 459 "Path might not exist %s, %s" % (path1, path2))
468 460 return ""
469 461
470 462 @reraise_safe_exceptions
471 463 def is_large_file(self, wire, path):
472 464 return False
473 465
474 466 @reraise_safe_exceptions
475 467 def is_binary(self, wire, rev, path):
476 468 cache_on, context_uid, repo_id = self._cache_on(wire)
477 469
478 470 @self.region.conditional_cache_on_arguments(condition=cache_on)
479 471 def _is_binary(_repo_id, _rev, _path):
480 472 raw_bytes = self.get_file_content(wire, path, rev)
481 473 return raw_bytes and '\0' in raw_bytes
482 474
483 475 return _is_binary(repo_id, rev, path)
484 476
485 477 @reraise_safe_exceptions
486 478 def run_svn_command(self, wire, cmd, **opts):
487 479 path = wire.get('path', None)
488 480
489 481 if path and os.path.isdir(path):
490 482 opts['cwd'] = path
491 483
492 484 safe_call = False
493 485 if '_safe' in opts:
494 486 safe_call = True
495 487
496 488 svnenv = os.environ.copy()
497 489 svnenv.update(opts.pop('extra_env', {}))
498 490
499 491 _opts = {'env': svnenv, 'shell': False}
500 492
501 493 try:
502 494 _opts.update(opts)
503 495 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
504 496
505 497 return ''.join(p), ''.join(p.error)
506 498 except (EnvironmentError, OSError) as err:
507 499 cmd = ' '.join(cmd) # human friendly CMD
508 500 tb_err = ("Couldn't run svn command (%s).\n"
509 501 "Original error was:%s\n"
510 502 "Call options:%s\n"
511 503 % (cmd, err, _opts))
512 504 log.exception(tb_err)
513 505 if safe_call:
514 506 return '', err
515 507 else:
516 508 raise exceptions.VcsException()(tb_err)
517 509
518 510 @reraise_safe_exceptions
519 511 def install_hooks(self, wire, force=False):
520 512 from vcsserver.hook_utils import install_svn_hooks
521 513 repo_path = wire['path']
522 514 binary_dir = settings.BINARY_DIR
523 515 executable = None
524 516 if binary_dir:
525 517 executable = os.path.join(binary_dir, 'python')
526 518 return install_svn_hooks(
527 519 repo_path, executable=executable, force_create=force)
528 520
529 521 @reraise_safe_exceptions
530 522 def get_hooks_info(self, wire):
531 523 from vcsserver.hook_utils import (
532 524 get_svn_pre_hook_version, get_svn_post_hook_version)
533 525 repo_path = wire['path']
534 526 return {
535 527 'pre_version': get_svn_pre_hook_version(repo_path),
536 528 'post_version': get_svn_post_hook_version(repo_path),
537 529 }
538 530
539 531
540 532 class SvnDiffer(object):
541 533 """
542 534 Utility to create diffs based on difflib and the Subversion api
543 535 """
544 536
545 537 binary_content = False
546 538
547 539 def __init__(
548 540 self, repo, src_rev, src_path, tgt_rev, tgt_path,
549 541 ignore_whitespace, context):
550 542 self.repo = repo
551 543 self.ignore_whitespace = ignore_whitespace
552 544 self.context = context
553 545
554 546 fsobj = svn.repos.fs(repo)
555 547
556 548 self.tgt_rev = tgt_rev
557 549 self.tgt_path = tgt_path or ''
558 550 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
559 551 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
560 552
561 553 self.src_rev = src_rev
562 554 self.src_path = src_path or self.tgt_path
563 555 self.src_root = svn.fs.revision_root(fsobj, src_rev)
564 556 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
565 557
566 558 self._validate()
567 559
568 560 def _validate(self):
569 561 if (self.tgt_kind != svn.core.svn_node_none and
570 562 self.src_kind != svn.core.svn_node_none and
571 563 self.src_kind != self.tgt_kind):
572 564 # TODO: johbo: proper error handling
573 565 raise Exception(
574 566 "Source and target are not compatible for diff generation. "
575 567 "Source type: %s, target type: %s" %
576 568 (self.src_kind, self.tgt_kind))
577 569
578 570 def generate_diff(self):
579 571 buf = StringIO.StringIO()
580 572 if self.tgt_kind == svn.core.svn_node_dir:
581 573 self._generate_dir_diff(buf)
582 574 else:
583 575 self._generate_file_diff(buf)
584 576 return buf.getvalue()
585 577
586 578 def _generate_dir_diff(self, buf):
587 579 editor = DiffChangeEditor()
588 580 editor_ptr, editor_baton = svn.delta.make_editor(editor)
589 581 svn.repos.dir_delta2(
590 582 self.src_root,
591 583 self.src_path,
592 584 '', # src_entry
593 585 self.tgt_root,
594 586 self.tgt_path,
595 587 editor_ptr, editor_baton,
596 588 authorization_callback_allow_all,
597 589 False, # text_deltas
598 590 svn.core.svn_depth_infinity, # depth
599 591 False, # entry_props
600 592 False, # ignore_ancestry
601 593 )
602 594
603 595 for path, __, change in sorted(editor.changes):
604 596 self._generate_node_diff(
605 597 buf, change, path, self.tgt_path, path, self.src_path)
606 598
607 599 def _generate_file_diff(self, buf):
608 600 change = None
609 601 if self.src_kind == svn.core.svn_node_none:
610 602 change = "add"
611 603 elif self.tgt_kind == svn.core.svn_node_none:
612 604 change = "delete"
613 605 tgt_base, tgt_path = vcspath.split(self.tgt_path)
614 606 src_base, src_path = vcspath.split(self.src_path)
615 607 self._generate_node_diff(
616 608 buf, change, tgt_path, tgt_base, src_path, src_base)
617 609
618 610 def _generate_node_diff(
619 611 self, buf, change, tgt_path, tgt_base, src_path, src_base):
620 612
621 613 if self.src_rev == self.tgt_rev and tgt_base == src_base:
622 614 # makes consistent behaviour with git/hg to return empty diff if
623 615 # we compare same revisions
624 616 return
625 617
626 618 tgt_full_path = vcspath.join(tgt_base, tgt_path)
627 619 src_full_path = vcspath.join(src_base, src_path)
628 620
629 621 self.binary_content = False
630 622 mime_type = self._get_mime_type(tgt_full_path)
631 623
632 624 if mime_type and not mime_type.startswith('text'):
633 625 self.binary_content = True
634 626 buf.write("=" * 67 + '\n')
635 627 buf.write("Cannot display: file marked as a binary type.\n")
636 628 buf.write("svn:mime-type = %s\n" % mime_type)
637 629 buf.write("Index: %s\n" % (tgt_path, ))
638 630 buf.write("=" * 67 + '\n')
639 631 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
640 632 'tgt_path': tgt_path})
641 633
642 634 if change == 'add':
643 635 # TODO: johbo: SVN is missing a zero here compared to git
644 636 buf.write("new file mode 10644\n")
645 637
646 638 #TODO(marcink): intro to binary detection of svn patches
647 639 # if self.binary_content:
648 640 # buf.write('GIT binary patch\n')
649 641
650 642 buf.write("--- /dev/null\t(revision 0)\n")
651 643 src_lines = []
652 644 else:
653 645 if change == 'delete':
654 646 buf.write("deleted file mode 10644\n")
655 647
656 648 #TODO(marcink): intro to binary detection of svn patches
657 649 # if self.binary_content:
658 650 # buf.write('GIT binary patch\n')
659 651
660 652 buf.write("--- a/%s\t(revision %s)\n" % (
661 653 src_path, self.src_rev))
662 654 src_lines = self._svn_readlines(self.src_root, src_full_path)
663 655
664 656 if change == 'delete':
665 657 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
666 658 tgt_lines = []
667 659 else:
668 660 buf.write("+++ b/%s\t(revision %s)\n" % (
669 661 tgt_path, self.tgt_rev))
670 662 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
671 663
672 664 if not self.binary_content:
673 665 udiff = svn_diff.unified_diff(
674 666 src_lines, tgt_lines, context=self.context,
675 667 ignore_blank_lines=self.ignore_whitespace,
676 668 ignore_case=False,
677 669 ignore_space_changes=self.ignore_whitespace)
678 670 buf.writelines(udiff)
679 671
680 672 def _get_mime_type(self, path):
681 673 try:
682 674 mime_type = svn.fs.node_prop(
683 675 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
684 676 except svn.core.SubversionException:
685 677 mime_type = svn.fs.node_prop(
686 678 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
687 679 return mime_type
688 680
689 681 def _svn_readlines(self, fs_root, node_path):
690 682 if self.binary_content:
691 683 return []
692 684 node_kind = svn.fs.check_path(fs_root, node_path)
693 685 if node_kind not in (
694 686 svn.core.svn_node_file, svn.core.svn_node_symlink):
695 687 return []
696 688 content = svn.core.Stream(
697 689 svn.fs.file_contents(fs_root, node_path)).read()
698 690 return content.splitlines(True)
699 691
700 692
701 693 class DiffChangeEditor(svn.delta.Editor):
702 694 """
703 695 Records changes between two given revisions
704 696 """
705 697
706 698 def __init__(self):
707 699 self.changes = []
708 700
709 701 def delete_entry(self, path, revision, parent_baton, pool=None):
710 702 self.changes.append((path, None, 'delete'))
711 703
712 704 def add_file(
713 705 self, path, parent_baton, copyfrom_path, copyfrom_revision,
714 706 file_pool=None):
715 707 self.changes.append((path, 'file', 'add'))
716 708
717 709 def open_file(self, path, parent_baton, base_revision, file_pool=None):
718 710 self.changes.append((path, 'file', 'change'))
719 711
720 712
721 713 def authorization_callback_allow_all(root, path, pool):
722 714 return True
723 715
724 716
725 717 class TxnNodeProcessor(object):
726 718 """
727 719 Utility to process the change of one node within a transaction root.
728 720
729 721 It encapsulates the knowledge of how to add, update or remove
730 722 a node for a given transaction root. The purpose is to support the method
731 723 `SvnRemote.commit`.
732 724 """
733 725
734 726 def __init__(self, node, txn_root):
735 727 assert isinstance(node['path'], str)
736 728
737 729 self.node = node
738 730 self.txn_root = txn_root
739 731
740 732 def update(self):
741 733 self._ensure_parent_dirs()
742 734 self._add_file_if_node_does_not_exist()
743 735 self._update_file_content()
744 736 self._update_file_properties()
745 737
746 738 def remove(self):
747 739 svn.fs.delete(self.txn_root, self.node['path'])
748 740 # TODO: Clean up directory if empty
749 741
750 742 def _ensure_parent_dirs(self):
751 743 curdir = vcspath.dirname(self.node['path'])
752 744 dirs_to_create = []
753 745 while not self._svn_path_exists(curdir):
754 746 dirs_to_create.append(curdir)
755 747 curdir = vcspath.dirname(curdir)
756 748
757 749 for curdir in reversed(dirs_to_create):
758 750 log.debug('Creating missing directory "%s"', curdir)
759 751 svn.fs.make_dir(self.txn_root, curdir)
760 752
761 753 def _svn_path_exists(self, path):
762 754 path_status = svn.fs.check_path(self.txn_root, path)
763 755 return path_status != svn.core.svn_node_none
764 756
765 757 def _add_file_if_node_does_not_exist(self):
766 758 kind = svn.fs.check_path(self.txn_root, self.node['path'])
767 759 if kind == svn.core.svn_node_none:
768 760 svn.fs.make_file(self.txn_root, self.node['path'])
769 761
770 762 def _update_file_content(self):
771 763 assert isinstance(self.node['content'], str)
772 764 handler, baton = svn.fs.apply_textdelta(
773 765 self.txn_root, self.node['path'], None, None)
774 766 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
775 767
776 768 def _update_file_properties(self):
777 769 properties = self.node.get('properties', {})
778 770 for key, value in properties.iteritems():
779 771 svn.fs.change_node_prop(
780 772 self.txn_root, self.node['path'], key, value)
781 773
782 774
783 775 def apr_time_t(timestamp):
784 776 """
785 777 Convert a Python timestamp into APR timestamp type apr_time_t
786 778 """
787 779 return timestamp * 1E6
788 780
789 781
790 782 def svn_opt_revision_value_t(num):
791 783 """
792 784 Put `num` into a `svn_opt_revision_value_t` structure.
793 785 """
794 786 value = svn.core.svn_opt_revision_value_t()
795 787 value.number = num
796 788 revision = svn.core.svn_opt_revision_t()
797 789 revision.kind = svn.core.svn_opt_revision_number
798 790 revision.value = value
799 791 return revision
@@ -1,16 +1,16 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,57 +1,57 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import socket
19 19
20 20 import pytest
21 21
22 22
23 23 def pytest_addoption(parser):
24 24 parser.addoption(
25 25 '--repeat', type=int, default=100,
26 26 help="Number of repetitions in performance tests.")
27 27
28 28
29 29 @pytest.fixture(scope='session')
30 30 def repeat(request):
31 31 """
32 32 The number of repetitions is based on this fixture.
33 33
34 34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 35 tests are not too slow in our default test suite.
36 36 """
37 37 return request.config.getoption('--repeat')
38 38
39 39
40 40 @pytest.fixture(scope='session')
41 41 def vcsserver_port(request):
42 42 port = get_available_port()
43 43 print('Using vcsserver port %s' % (port, ))
44 44 return port
45 45
46 46
47 47 def get_available_port():
48 48 family = socket.AF_INET
49 49 socktype = socket.SOCK_STREAM
50 50 host = '127.0.0.1'
51 51
52 52 mysocket = socket.socket(family, socktype)
53 53 mysocket.bind((host, 0))
54 54 port = mysocket.getsockname()[1]
55 55 mysocket.close()
56 56 del mysocket
57 57 return port
@@ -1,86 +1,86 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import tempfile
21 21
22 22 import configobj
23 23
24 24
25 25 class ContextINI(object):
26 26 """
27 27 Allows to create a new test.ini file as a copy of existing one with edited
28 28 data. If existing file is not present, it creates a new one. Example usage::
29 29
30 30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 31 print 'vcsserver --config=%s' % new_test_ini
32 32 """
33 33
34 34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 35 destroy=True):
36 36 self.ini_file_path = ini_file_path
37 37 self.ini_params = ini_params
38 38 self.new_path = None
39 39 self.new_path_prefix = new_file_prefix or 'test'
40 40 self.destroy = destroy
41 41
42 42 def __enter__(self):
43 43 _, pref = tempfile.mkstemp()
44 44 loc = tempfile.gettempdir()
45 45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 46 pref, self.new_path_prefix, self.ini_file_path))
47 47
48 48 # copy ini file and modify according to the params, if we re-use a file
49 49 if os.path.isfile(self.ini_file_path):
50 50 shutil.copy(self.ini_file_path, self.new_path)
51 51 else:
52 52 # create new dump file for configObj to write to.
53 53 with open(self.new_path, 'wb'):
54 54 pass
55 55
56 56 config = configobj.ConfigObj(
57 57 self.new_path, file_error=True, write_empty_values=True)
58 58
59 59 for data in self.ini_params:
60 60 section, ini_params = data.items()[0]
61 61 key, val = ini_params.items()[0]
62 62 if section not in config:
63 63 config[section] = {}
64 64 config[section][key] = val
65 65
66 66 config.write()
67 67 return self.new_path
68 68
69 69 def __exit__(self, exc_type, exc_val, exc_tb):
70 70 if self.destroy:
71 71 os.remove(self.new_path)
72 72
73 73
74 74 def no_newline_id_generator(test_name):
75 75 """
76 76 Generates a test name without spaces or newlines characters. Used for
77 77 nicer output of progress of test
78 78 """
79 79 org_name = test_name
80 80 test_name = str(test_name)\
81 81 .replace('\n', '_N') \
82 82 .replace('\r', '_N') \
83 83 .replace('\t', '_T') \
84 84 .replace(' ', '_S')
85 85
86 86 return test_name or 'test-with-empty-name'
@@ -1,160 +1,160 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver import git
25 25
26 26
27 27 SAMPLE_REFS = {
28 28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 33 }
34 34
35 35
36 36 @pytest.fixture
37 37 def git_remote():
38 38 """
39 39 A GitRemote instance with a mock factory.
40 40 """
41 41 factory = Mock()
42 42 remote = git.GitRemote(factory)
43 43 return remote
44 44
45 45
46 46 def test_discover_git_version(git_remote):
47 47 version = git_remote.discover_git_version()
48 48 assert version
49 49
50 50
51 51 class TestGitFetch(object):
52 52 def setup(self):
53 53 self.mock_repo = Mock()
54 54 factory = Mock()
55 55 factory.repo = Mock(return_value=self.mock_repo)
56 56 self.remote_git = git.GitRemote(factory)
57 57
58 58 def test_fetches_all_when_no_commit_ids_specified(self):
59 59 def side_effect(determine_wants, *args, **kwargs):
60 60 determine_wants(SAMPLE_REFS)
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
68 68 def test_fetches_specified_commits(self):
69 69 selected_refs = {
70 70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 72 }
73 73
74 74 def side_effect(determine_wants, *args, **kwargs):
75 75 result = determine_wants(SAMPLE_REFS)
76 76 assert sorted(result) == sorted(selected_refs.values())
77 77 return result
78 78
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 81 self.remote_git.pull(
82 82 wire={}, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
86 86
87 87 def test_get_remote_refs(self):
88 88 factory = Mock()
89 89 remote_git = git.GitRemote(factory)
90 90 url = 'http://example.com/test/test.git'
91 91 sample_refs = {
92 92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 94 }
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
102 102
103 103 class TestReraiseSafeExceptions(object):
104 104
105 105 def test_method_decorated_with_reraise_safe_exceptions(self):
106 106 factory = Mock()
107 107 git_remote = git.GitRemote(factory)
108 108
109 109 def fake_function():
110 110 return None
111 111
112 112 decorator = git.reraise_safe_exceptions(fake_function)
113 113
114 114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
115 115 for method_name, method in methods:
116 116 if not method_name.startswith('_'):
117 117 assert method.im_func.__code__ == decorator.__code__
118 118
119 119 @pytest.mark.parametrize('side_effect, expected_type', [
120 120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
121 121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
122 122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
123 123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
124 124 (dulwich.errors.HangupException(), 'error'),
125 125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
126 126 ])
127 127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
128 128 @git.reraise_safe_exceptions
129 129 def fake_method():
130 130 raise side_effect
131 131
132 132 with pytest.raises(Exception) as exc_info:
133 133 fake_method()
134 134 assert type(exc_info.value) == Exception
135 135 assert exc_info.value._vcs_kind == expected_type
136 136
137 137
138 138 class TestDulwichRepoWrapper(object):
139 139 def test_calls_close_on_delete(self):
140 140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
141 141 with isdir_patcher:
142 142 repo = git.Repo('/tmp/abcde')
143 143 with patch.object(git.DulwichRepo, 'close') as close_mock:
144 144 del repo
145 145 close_mock.assert_called_once_with()
146 146
147 147
148 148 class TestGitFactory(object):
149 149 def test_create_repo_returns_dulwich_wrapper(self):
150 150
151 151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
152 152 mock.side_effect = {'repo_objects': ''}
153 153 factory = git.GitFactory()
154 154 wire = {
155 155 'path': '/tmp/abcde'
156 156 }
157 157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
158 158 with isdir_patcher:
159 159 result = factory._create_repo(wire, True)
160 160 assert isinstance(result, git.Repo)
@@ -1,108 +1,108 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19 import sys
20 20 import traceback
21 21
22 22 import pytest
23 23 from mercurial.error import LookupError
24 24 from mock import Mock, MagicMock, patch
25 25
26 26 from vcsserver import exceptions, hg, hgcompat
27 27
28 28
29 29 class TestDiff(object):
30 30 def test_raising_safe_exception_when_lookup_failed(self):
31 31
32 32 factory = Mock()
33 33 hg_remote = hg.HgRemote(factory)
34 34 with patch('mercurial.patch.diff') as diff_mock:
35 35 diff_mock.side_effect = LookupError(
36 36 'deadbeef', 'index', 'message')
37 37 with pytest.raises(Exception) as exc_info:
38 38 hg_remote.diff(
39 39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 40 file_filter=None, opt_git=True, opt_ignorews=True,
41 41 context=3)
42 42 assert type(exc_info.value) == Exception
43 43 assert exc_info.value._vcs_kind == 'lookup'
44 44
45 45
46 46 class TestReraiseSafeExceptions(object):
47 47 def test_method_decorated_with_reraise_safe_exceptions(self):
48 48 factory = Mock()
49 49 hg_remote = hg.HgRemote(factory)
50 50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
51 51 decorator = hg.reraise_safe_exceptions(None)
52 52 for method_name, method in methods:
53 53 if not method_name.startswith('_'):
54 54 assert method.im_func.__code__ == decorator.__code__
55 55
56 56 @pytest.mark.parametrize('side_effect, expected_type', [
57 57 (hgcompat.Abort(), 'abort'),
58 58 (hgcompat.InterventionRequired(), 'abort'),
59 59 (hgcompat.RepoLookupError(), 'lookup'),
60 60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
61 61 (hgcompat.RepoError(), 'error'),
62 62 (hgcompat.RequirementError(), 'requirement'),
63 63 ])
64 64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
65 65 @hg.reraise_safe_exceptions
66 66 def fake_method():
67 67 raise side_effect
68 68
69 69 with pytest.raises(Exception) as exc_info:
70 70 fake_method()
71 71 assert type(exc_info.value) == Exception
72 72 assert exc_info.value._vcs_kind == expected_type
73 73
74 74 def test_keeps_original_traceback(self):
75 75 @hg.reraise_safe_exceptions
76 76 def fake_method():
77 77 try:
78 78 raise hgcompat.Abort()
79 79 except:
80 80 self.original_traceback = traceback.format_tb(
81 81 sys.exc_info()[2])
82 82 raise
83 83
84 84 try:
85 85 fake_method()
86 86 except Exception:
87 87 new_traceback = traceback.format_tb(sys.exc_info()[2])
88 88
89 89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 90 assert new_traceback_tail == self.original_traceback
91 91
92 92 def test_maps_unknow_exceptions_to_unhandled(self):
93 93 @hg.reraise_safe_exceptions
94 94 def stub_method():
95 95 raise ValueError('stub')
96 96
97 97 with pytest.raises(Exception) as exc_info:
98 98 stub_method()
99 99 assert exc_info.value._vcs_kind == 'unhandled'
100 100
101 101 def test_does_not_map_known_exceptions(self):
102 102 @hg.reraise_safe_exceptions
103 103 def stub_method():
104 104 raise exceptions.LookupException()('stub')
105 105
106 106 with pytest.raises(Exception) as exc_info:
107 107 stub_method()
108 108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,124 +1,124 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import mock
19 19 import pytest
20 20
21 21 from vcsserver import hgcompat, hgpatches
22 22
23 23
24 24 LARGEFILES_CAPABILITY = 'largefiles=serve'
25 25
26 26
27 27 def test_patch_largefiles_capabilities_applies_patch(
28 28 patched_capabilities):
29 29 lfproto = hgcompat.largefiles.proto
30 30 hgpatches.patch_largefiles_capabilities()
31 31 assert lfproto._capabilities.func_name == '_dynamic_capabilities'
32 32
33 33
34 34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
36 36 orig_capabilities):
37 37 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
38 38 hgcompat.largefiles.proto, stub_extensions)
39 39
40 40 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
41 41
42 42 stub_extensions.assert_called_once_with(stub_ui)
43 43 assert LARGEFILES_CAPABILITY not in caps
44 44
45 45
46 46 def test_dynamic_capabilities_ignores_updated_capabilities(
47 47 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
48 48 orig_capabilities):
49 49 stub_extensions.return_value = [('largefiles', mock.Mock())]
50 50 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
51 51 hgcompat.largefiles.proto, stub_extensions)
52 52
53 53 # This happens when the extension is loaded for the first time, important
54 54 # to ensure that an updated function is correctly picked up.
55 55 hgcompat.largefiles.proto._capabilities = mock.Mock(
56 56 side_effect=Exception('Must not be called'))
57 57
58 58 dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
59 59
60 60
61 61 def test_dynamic_capabilities_uses_largefiles_if_enabled(
62 62 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities,
63 63 orig_capabilities):
64 64 stub_extensions.return_value = [('largefiles', mock.Mock())]
65 65
66 66 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
67 67 hgcompat.largefiles.proto, stub_extensions)
68 68
69 69 caps = dynamic_capabilities(orig_capabilities, stub_repo, stub_proto)
70 70
71 71 stub_extensions.assert_called_once_with(stub_ui)
72 72 assert LARGEFILES_CAPABILITY in caps
73 73
74 74
75 75 def test_hgsubversion_import():
76 76 from hgsubversion import svnrepo
77 77 assert svnrepo
78 78
79 79
80 80 @pytest.fixture
81 81 def patched_capabilities(request):
82 82 """
83 83 Patch in `capabilitiesorig` and restore both capability functions.
84 84 """
85 85 lfproto = hgcompat.largefiles.proto
86 86 orig_capabilities = lfproto._capabilities
87 87
88 88 @request.addfinalizer
89 89 def restore():
90 90 lfproto._capabilities = orig_capabilities
91 91
92 92
93 93 @pytest.fixture
94 94 def stub_repo(stub_ui):
95 95 repo = mock.Mock()
96 96 repo.ui = stub_ui
97 97 return repo
98 98
99 99
100 100 @pytest.fixture
101 101 def stub_proto(stub_ui):
102 102 proto = mock.Mock()
103 103 proto.ui = stub_ui
104 104 return proto
105 105
106 106
107 107 @pytest.fixture
108 108 def orig_capabilities():
109 109 from mercurial.wireprotov1server import wireprotocaps
110 110
111 111 def _capabilities(repo, proto):
112 112 return wireprotocaps
113 113 return _capabilities
114 114
115 115
116 116 @pytest.fixture
117 117 def stub_ui():
118 118 return hgcompat.ui.ui()
119 119
120 120
121 121 @pytest.fixture
122 122 def stub_extensions():
123 123 extensions = mock.Mock(return_value=tuple())
124 124 return extensions
@@ -1,241 +1,241 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import contextlib
19 19 import io
20 20 import threading
21 21 from BaseHTTPServer import BaseHTTPRequestHandler
22 22 from SocketServer import TCPServer
23 23
24 24 import mercurial.ui
25 25 import mock
26 26 import pytest
27 27 import simplejson as json
28 28
29 29 from vcsserver import hooks
30 30
31 31
32 32 def get_hg_ui(extras=None):
33 33 """Create a Config object with a valid RC_SCM_DATA entry."""
34 34 extras = extras or {}
35 35 required_extras = {
36 36 'username': '',
37 37 'repository': '',
38 38 'locked_by': '',
39 39 'scm': '',
40 40 'make_lock': '',
41 41 'action': '',
42 42 'ip': '',
43 43 'hooks_uri': 'fake_hooks_uri',
44 44 }
45 45 required_extras.update(extras)
46 46 hg_ui = mercurial.ui.ui()
47 47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
48 48
49 49 return hg_ui
50 50
51 51
52 52 def test_git_pre_receive_is_disabled():
53 53 extras = {'hooks': ['pull']}
54 54 response = hooks.git_pre_receive(None, None,
55 55 {'RC_SCM_DATA': json.dumps(extras)})
56 56
57 57 assert response == 0
58 58
59 59
60 60 def test_git_post_receive_is_disabled():
61 61 extras = {'hooks': ['pull']}
62 62 response = hooks.git_post_receive(None, '',
63 63 {'RC_SCM_DATA': json.dumps(extras)})
64 64
65 65 assert response == 0
66 66
67 67
68 68 def test_git_post_receive_calls_repo_size():
69 69 extras = {'hooks': ['push', 'repo_size']}
70 70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 71 hooks.git_post_receive(
72 72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
73 73 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
74 74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
75 75 expected_calls = [
76 76 mock.call('repo_size', extras, mock.ANY),
77 77 mock.call('post_push', extras, mock.ANY),
78 78 ]
79 79 assert call_hook_mock.call_args_list == expected_calls
80 80
81 81
82 82 def test_git_post_receive_does_not_call_disabled_repo_size():
83 83 extras = {'hooks': ['push']}
84 84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 85 hooks.git_post_receive(
86 86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
87 87 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
88 88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
89 89 expected_calls = [
90 90 mock.call('post_push', extras, mock.ANY)
91 91 ]
92 92 assert call_hook_mock.call_args_list == expected_calls
93 93
94 94
95 95 def test_repo_size_exception_does_not_affect_git_post_receive():
96 96 extras = {'hooks': ['push', 'repo_size']}
97 97 status = 0
98 98
99 99 def side_effect(name, *args, **kwargs):
100 100 if name == 'repo_size':
101 101 raise Exception('Fake exception')
102 102 else:
103 103 return status
104 104
105 105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
106 106 call_hook_mock.side_effect = side_effect
107 107 result = hooks.git_post_receive(
108 108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
109 109 assert result == status
110 110
111 111
112 112 def test_git_pre_pull_is_disabled():
113 113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
114 114
115 115
116 116 def test_git_post_pull_is_disabled():
117 117 assert (
118 118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
119 119
120 120
121 121 class TestGetHooksClient(object):
122 122
123 123 def test_returns_http_client_when_protocol_matches(self):
124 124 hooks_uri = 'localhost:8000'
125 125 result = hooks._get_hooks_client({
126 126 'hooks_uri': hooks_uri,
127 127 'hooks_protocol': 'http'
128 128 })
129 129 assert isinstance(result, hooks.HooksHttpClient)
130 130 assert result.hooks_uri == hooks_uri
131 131
132 132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
133 133 fake_module = mock.Mock()
134 134 import_patcher = mock.patch.object(
135 135 hooks.importlib, 'import_module', return_value=fake_module)
136 136 fake_module_name = 'fake.module'
137 137 with import_patcher as import_mock:
138 138 result = hooks._get_hooks_client(
139 139 {'hooks_module': fake_module_name})
140 140
141 141 import_mock.assert_called_once_with(fake_module_name)
142 142 assert isinstance(result, hooks.HooksDummyClient)
143 143 assert result._hooks_module == fake_module
144 144
145 145
146 146 class TestHooksHttpClient(object):
147 147 def test_init_sets_hooks_uri(self):
148 148 uri = 'localhost:3000'
149 149 client = hooks.HooksHttpClient(uri)
150 150 assert client.hooks_uri == uri
151 151
152 152 def test_serialize_returns_json_string(self):
153 153 client = hooks.HooksHttpClient('localhost:3000')
154 154 hook_name = 'test'
155 155 extras = {
156 156 'first': 1,
157 157 'second': 'two'
158 158 }
159 159 result = client._serialize(hook_name, extras)
160 160 expected_result = json.dumps({
161 161 'method': hook_name,
162 162 'extras': extras
163 163 })
164 164 assert result == expected_result
165 165
166 166 def test_call_queries_http_server(self, http_mirror):
167 167 client = hooks.HooksHttpClient(http_mirror.uri)
168 168 hook_name = 'test'
169 169 extras = {
170 170 'first': 1,
171 171 'second': 'two'
172 172 }
173 173 result = client(hook_name, extras)
174 174 expected_result = {
175 175 'method': hook_name,
176 176 'extras': extras
177 177 }
178 178 assert result == expected_result
179 179
180 180
181 181 class TestHooksDummyClient(object):
182 182 def test_init_imports_hooks_module(self):
183 183 hooks_module_name = 'rhodecode.fake.module'
184 184 hooks_module = mock.MagicMock()
185 185
186 186 import_patcher = mock.patch.object(
187 187 hooks.importlib, 'import_module', return_value=hooks_module)
188 188 with import_patcher as import_mock:
189 189 client = hooks.HooksDummyClient(hooks_module_name)
190 190 import_mock.assert_called_once_with(hooks_module_name)
191 191 assert client._hooks_module == hooks_module
192 192
193 193 def test_call_returns_hook_result(self):
194 194 hooks_module_name = 'rhodecode.fake.module'
195 195 hooks_module = mock.MagicMock()
196 196 import_patcher = mock.patch.object(
197 197 hooks.importlib, 'import_module', return_value=hooks_module)
198 198 with import_patcher:
199 199 client = hooks.HooksDummyClient(hooks_module_name)
200 200
201 201 result = client('post_push', {})
202 202 hooks_module.Hooks.assert_called_once_with()
203 203 assert result == hooks_module.Hooks().__enter__().post_push()
204 204
205 205
206 206 @pytest.fixture
207 207 def http_mirror(request):
208 208 server = MirrorHttpServer()
209 209 request.addfinalizer(server.stop)
210 210 return server
211 211
212 212
213 213 class MirrorHttpHandler(BaseHTTPRequestHandler):
214 214 def do_POST(self):
215 215 length = int(self.headers['Content-Length'])
216 216 body = self.rfile.read(length).decode('utf-8')
217 217 self.send_response(200)
218 218 self.end_headers()
219 219 self.wfile.write(body)
220 220
221 221
222 222 class MirrorHttpServer(object):
223 223 ip_address = '127.0.0.1'
224 224 port = 0
225 225
226 226 def __init__(self):
227 227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
228 228 _, self.port = self._daemon.server_address
229 229 self._thread = threading.Thread(target=self._daemon.serve_forever)
230 230 self._thread.daemon = True
231 231 self._thread.start()
232 232
233 233 def stop(self):
234 234 self._daemon.shutdown()
235 235 self._thread.join()
236 236 self._daemon = None
237 237 self._thread = None
238 238
239 239 @property
240 240 def uri(self):
241 241 return '{}:{}'.format(self.ip_address, self.port)
@@ -1,206 +1,206 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import stat
21 21 import pytest
22 22 import vcsserver
23 23 import tempfile
24 24 from vcsserver import hook_utils
25 25 from vcsserver.tests.fixture import no_newline_id_generator
26 26 from vcsserver.utils import AttributeDict
27 27
28 28
29 29 class TestCheckRhodecodeHook(object):
30 30
31 31 def test_returns_false_when_hook_file_is_wrong_found(self, tmpdir):
32 32 hook = os.path.join(str(tmpdir), 'fake_hook_file.py')
33 33 with open(hook, 'wb') as f:
34 34 f.write('dummy test')
35 35 result = hook_utils.check_rhodecode_hook(hook)
36 36 assert result is False
37 37
38 38 def test_returns_true_when_no_hook_file_found(self, tmpdir):
39 39 hook = os.path.join(str(tmpdir), 'fake_hook_file_not_existing.py')
40 40 result = hook_utils.check_rhodecode_hook(hook)
41 41 assert result
42 42
43 43 @pytest.mark.parametrize("file_content, expected_result", [
44 44 ("RC_HOOK_VER = '3.3.3'\n", True),
45 45 ("RC_HOOK = '3.3.3'\n", False),
46 46 ], ids=no_newline_id_generator)
47 47 def test_signatures(self, file_content, expected_result, tmpdir):
48 48 hook = os.path.join(str(tmpdir), 'fake_hook_file_1.py')
49 49 with open(hook, 'wb') as f:
50 50 f.write(file_content)
51 51
52 52 result = hook_utils.check_rhodecode_hook(hook)
53 53
54 54 assert result is expected_result
55 55
56 56
57 57 class BaseInstallHooks(object):
58 58 HOOK_FILES = ()
59 59
60 60 def _check_hook_file_mode(self, file_path):
61 61 assert os.path.exists(file_path), 'path %s missing' % file_path
62 62 stat_info = os.stat(file_path)
63 63
64 64 file_mode = stat.S_IMODE(stat_info.st_mode)
65 65 expected_mode = int('755', 8)
66 66 assert expected_mode == file_mode
67 67
68 68 def _check_hook_file_content(self, file_path, executable):
69 69 executable = executable or sys.executable
70 70 with open(file_path, 'rt') as hook_file:
71 71 content = hook_file.read()
72 72
73 73 expected_env = '#!{}'.format(executable)
74 74 expected_rc_version = "\nRC_HOOK_VER = '{}'\n".format(
75 75 vcsserver.__version__)
76 76 assert content.strip().startswith(expected_env)
77 77 assert expected_rc_version in content
78 78
79 79 def _create_fake_hook(self, file_path, content):
80 80 with open(file_path, 'w') as hook_file:
81 81 hook_file.write(content)
82 82
83 83 def create_dummy_repo(self, repo_type):
84 84 tmpdir = tempfile.mkdtemp()
85 85 repo = AttributeDict()
86 86 if repo_type == 'git':
87 87 repo.path = os.path.join(tmpdir, 'test_git_hooks_installation_repo')
88 88 os.makedirs(repo.path)
89 89 os.makedirs(os.path.join(repo.path, 'hooks'))
90 90 repo.bare = True
91 91
92 92 elif repo_type == 'svn':
93 93 repo.path = os.path.join(tmpdir, 'test_svn_hooks_installation_repo')
94 94 os.makedirs(repo.path)
95 95 os.makedirs(os.path.join(repo.path, 'hooks'))
96 96
97 97 return repo
98 98
99 99 def check_hooks(self, repo_path, repo_bare=True):
100 100 for file_name in self.HOOK_FILES:
101 101 if repo_bare:
102 102 file_path = os.path.join(repo_path, 'hooks', file_name)
103 103 else:
104 104 file_path = os.path.join(repo_path, '.git', 'hooks', file_name)
105 105 self._check_hook_file_mode(file_path)
106 106 self._check_hook_file_content(file_path, sys.executable)
107 107
108 108
109 109 class TestInstallGitHooks(BaseInstallHooks):
110 110 HOOK_FILES = ('pre-receive', 'post-receive')
111 111
112 112 def test_hooks_are_installed(self):
113 113 repo = self.create_dummy_repo('git')
114 114 result = hook_utils.install_git_hooks(repo.path, repo.bare)
115 115 assert result
116 116 self.check_hooks(repo.path, repo.bare)
117 117
118 118 def test_hooks_are_replaced(self):
119 119 repo = self.create_dummy_repo('git')
120 120 hooks_path = os.path.join(repo.path, 'hooks')
121 121 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
122 122 self._create_fake_hook(
123 123 file_path, content="RC_HOOK_VER = 'abcde'\n")
124 124
125 125 result = hook_utils.install_git_hooks(repo.path, repo.bare)
126 126 assert result
127 127 self.check_hooks(repo.path, repo.bare)
128 128
129 129 def test_non_rc_hooks_are_not_replaced(self):
130 130 repo = self.create_dummy_repo('git')
131 131 hooks_path = os.path.join(repo.path, 'hooks')
132 132 non_rc_content = 'echo "non rc hook"\n'
133 133 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
134 134 self._create_fake_hook(
135 135 file_path, content=non_rc_content)
136 136
137 137 result = hook_utils.install_git_hooks(repo.path, repo.bare)
138 138 assert result
139 139
140 140 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
141 141 with open(file_path, 'rt') as hook_file:
142 142 content = hook_file.read()
143 143 assert content == non_rc_content
144 144
145 145 def test_non_rc_hooks_are_replaced_with_force_flag(self):
146 146 repo = self.create_dummy_repo('git')
147 147 hooks_path = os.path.join(repo.path, 'hooks')
148 148 non_rc_content = 'echo "non rc hook"\n'
149 149 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
150 150 self._create_fake_hook(
151 151 file_path, content=non_rc_content)
152 152
153 153 result = hook_utils.install_git_hooks(
154 154 repo.path, repo.bare, force_create=True)
155 155 assert result
156 156 self.check_hooks(repo.path, repo.bare)
157 157
158 158
159 159 class TestInstallSvnHooks(BaseInstallHooks):
160 160 HOOK_FILES = ('pre-commit', 'post-commit')
161 161
162 162 def test_hooks_are_installed(self):
163 163 repo = self.create_dummy_repo('svn')
164 164 result = hook_utils.install_svn_hooks(repo.path)
165 165 assert result
166 166 self.check_hooks(repo.path)
167 167
168 168 def test_hooks_are_replaced(self):
169 169 repo = self.create_dummy_repo('svn')
170 170 hooks_path = os.path.join(repo.path, 'hooks')
171 171 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
172 172 self._create_fake_hook(
173 173 file_path, content="RC_HOOK_VER = 'abcde'\n")
174 174
175 175 result = hook_utils.install_svn_hooks(repo.path)
176 176 assert result
177 177 self.check_hooks(repo.path)
178 178
179 179 def test_non_rc_hooks_are_not_replaced(self):
180 180 repo = self.create_dummy_repo('svn')
181 181 hooks_path = os.path.join(repo.path, 'hooks')
182 182 non_rc_content = 'echo "non rc hook"\n'
183 183 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
184 184 self._create_fake_hook(
185 185 file_path, content=non_rc_content)
186 186
187 187 result = hook_utils.install_svn_hooks(repo.path)
188 188 assert result
189 189
190 190 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
191 191 with open(file_path, 'rt') as hook_file:
192 192 content = hook_file.read()
193 193 assert content == non_rc_content
194 194
195 195 def test_non_rc_hooks_are_replaced_with_force_flag(self):
196 196 repo = self.create_dummy_repo('svn')
197 197 hooks_path = os.path.join(repo.path, 'hooks')
198 198 non_rc_content = 'echo "non rc hook"\n'
199 199 for file_path in [os.path.join(hooks_path, f) for f in self.HOOK_FILES]:
200 200 self._create_fake_hook(
201 201 file_path, content=non_rc_content)
202 202
203 203 result = hook_utils.install_svn_hooks(
204 204 repo.path, force_create=True)
205 205 assert result
206 206 self.check_hooks(repo.path, )
@@ -1,57 +1,57 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import mock
19 19 import pytest
20 20
21 21 from vcsserver import http_main
22 22 from vcsserver.base import obfuscate_qs
23 23
24 24
25 25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
26 26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
27 27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
28 28 http_main.main({})
29 29 patch_largefiles_capabilities.assert_called_once_with()
30 30
31 31
32 32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
33 33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
34 34 @mock.patch(
35 35 'vcsserver.hgpatches.patch_largefiles_capabilities',
36 36 mock.Mock(side_effect=Exception("Must not be called")))
37 37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
38 38 http_main.main({})
39 39
40 40
41 41 @pytest.mark.parametrize('given, expected', [
42 42 ('bad', 'bad'),
43 43 ('query&foo=bar', 'query&foo=bar'),
44 44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 45 ('a;b;c;query&foo=bar&auth_token=secret',
46 46 'a&b&c&query&foo=bar&auth_token=*****'),
47 47 ('', ''),
48 48 (None, None),
49 49 ('foo=bar', 'foo=bar'),
50 50 ('auth_token=secret', 'auth_token=*****'),
51 51 ('auth_token=secret&api_key=secret2',
52 52 'auth_token=*****&api_key=*****'),
53 53 ('auth_token=secret&api_key=secret2&param=value',
54 54 'auth_token=*****&api_key=*****&param=value'),
55 55 ])
56 56 def test_obfuscate_qs(given, expected):
57 57 assert expected == obfuscate_qs(given)
@@ -1,249 +1,249 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19
20 20 import dulwich.protocol
21 21 import mock
22 22 import pytest
23 23 import webob
24 24 import webtest
25 25
26 26 from vcsserver import hooks, pygrack
27 27
28 28 # pylint: disable=redefined-outer-name,protected-access
29 29
30 30
31 31 @pytest.fixture()
32 32 def pygrack_instance(tmpdir):
33 33 """
34 34 Creates a pygrack app instance.
35 35
36 36 Right now, it does not much helpful regarding the passed directory.
37 37 It just contains the required folders to pass the signature test.
38 38 """
39 39 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
40 40 tmpdir.mkdir(dir_name)
41 41
42 42 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
43 43
44 44
45 45 @pytest.fixture()
46 46 def pygrack_app(pygrack_instance):
47 47 """
48 48 Creates a pygrack app wrapped in webtest.TestApp.
49 49 """
50 50 return webtest.TestApp(pygrack_instance)
51 51
52 52
53 53 def test_invalid_service_info_refs_returns_403(pygrack_app):
54 54 response = pygrack_app.get('/info/refs?service=git-upload-packs',
55 55 expect_errors=True)
56 56
57 57 assert response.status_int == 403
58 58
59 59
60 60 def test_invalid_endpoint_returns_403(pygrack_app):
61 61 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
62 62
63 63 assert response.status_int == 403
64 64
65 65
66 66 @pytest.mark.parametrize('sideband', [
67 67 'side-band-64k',
68 68 'side-band',
69 69 'side-band no-progress',
70 70 ])
71 71 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
72 72 request = ''.join([
73 73 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
74 74 'multi_ack %s ofs-delta\n' % sideband,
75 75 '0000',
76 76 '0009done\n',
77 77 ])
78 78 with mock.patch('vcsserver.hooks.git_pre_pull',
79 79 return_value=hooks.HookResponse(1, 'foo')):
80 80 response = pygrack_app.post(
81 81 '/git-upload-pack', params=request,
82 82 content_type='application/x-git-upload-pack')
83 83
84 84 data = io.BytesIO(response.body)
85 85 proto = dulwich.protocol.Protocol(data.read, None)
86 86 packets = list(proto.read_pkt_seq())
87 87
88 88 expected_packets = [
89 89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
90 90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
91 91 ]
92 92 assert packets == expected_packets
93 93
94 94
95 95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
96 96 request = ''.join([
97 97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
98 98 'multi_ack ofs-delta\n'
99 99 '0000',
100 100 '0009done\n',
101 101 ])
102 102 with mock.patch('vcsserver.hooks.git_pre_pull',
103 103 return_value=hooks.HookResponse(1, 'foo')):
104 104 response = pygrack_app.post(
105 105 '/git-upload-pack', params=request,
106 106 content_type='application/x-git-upload-pack')
107 107
108 108 assert response.body == pygrack.GitRepository.EMPTY_PACK
109 109
110 110
111 111 def test_pull_has_hook_messages(pygrack_app):
112 112 request = ''.join([
113 113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
114 114 'multi_ack side-band-64k ofs-delta\n'
115 115 '0000',
116 116 '0009done\n',
117 117 ])
118 118 with mock.patch('vcsserver.hooks.git_pre_pull',
119 119 return_value=hooks.HookResponse(0, 'foo')):
120 120 with mock.patch('vcsserver.hooks.git_post_pull',
121 121 return_value=hooks.HookResponse(1, 'bar')):
122 122 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
123 123 return_value=['0008NAK\n0009subp\n0000']):
124 124 response = pygrack_app.post(
125 125 '/git-upload-pack', params=request,
126 126 content_type='application/x-git-upload-pack')
127 127
128 128 data = io.BytesIO(response.body)
129 129 proto = dulwich.protocol.Protocol(data.read, None)
130 130 packets = list(proto.read_pkt_seq())
131 131
132 132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
133 133
134 134
135 135 def test_get_want_capabilities(pygrack_instance):
136 136 data = io.BytesIO(
137 137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
138 138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
139 139
140 140 request = webob.Request({
141 141 'wsgi.input': data,
142 142 'REQUEST_METHOD': 'POST',
143 143 'webob.is_body_seekable': True
144 144 })
145 145
146 146 capabilities = pygrack_instance._get_want_capabilities(request)
147 147
148 148 assert capabilities == frozenset(
149 149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
150 150 assert data.tell() == 0
151 151
152 152
153 153 @pytest.mark.parametrize('data,capabilities,expected', [
154 154 ('foo', [], []),
155 155 ('', ['side-band-64k'], []),
156 156 ('', ['side-band'], []),
157 157 ('foo', ['side-band-64k'], ['0008\x02foo']),
158 158 ('foo', ['side-band'], ['0008\x02foo']),
159 159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
160 160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
161 161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
162 162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
163 163 ], ids=[
164 164 'foo-empty',
165 165 'empty-64k', 'empty',
166 166 'foo-64k', 'foo',
167 167 'f-1000-64k', 'f-1000',
168 168 'f-65520-64k', 'f-65520'])
169 169 def test_get_messages(pygrack_instance, data, capabilities, expected):
170 170 messages = pygrack_instance._get_messages(data, capabilities)
171 171
172 172 assert messages == expected
173 173
174 174
175 175 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
176 176 # Unexpected response
177 177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
178 178 # No sideband
179 179 ('no-sideband', [], 'foo', 'bar'),
180 180 # No messages
181 181 ('no-messages', ['side-band-64k'], '', ''),
182 182 ])
183 183 def test_inject_messages_to_response_nothing_to_do(
184 184 pygrack_instance, response, capabilities, pre_pull_messages,
185 185 post_pull_messages):
186 186 new_response = pygrack_instance._inject_messages_to_response(
187 187 response, capabilities, pre_pull_messages, post_pull_messages)
188 188
189 189 assert new_response == response
190 190
191 191
192 192 @pytest.mark.parametrize('capabilities', [
193 193 ['side-band'],
194 194 ['side-band-64k'],
195 195 ])
196 196 def test_inject_messages_to_response_single_element(pygrack_instance,
197 197 capabilities):
198 198 response = ['0008NAK\n0009subp\n0000']
199 199 new_response = pygrack_instance._inject_messages_to_response(
200 200 response, capabilities, 'foo', 'bar')
201 201
202 202 expected_response = [
203 203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
204 204
205 205 assert new_response == expected_response
206 206
207 207
208 208 @pytest.mark.parametrize('capabilities', [
209 209 ['side-band'],
210 210 ['side-band-64k'],
211 211 ])
212 212 def test_inject_messages_to_response_multi_element(pygrack_instance,
213 213 capabilities):
214 214 response = [
215 215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
216 216 new_response = pygrack_instance._inject_messages_to_response(
217 217 response, capabilities, 'foo', 'bar')
218 218
219 219 expected_response = [
220 220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
221 221 '000asubp4\n', '0008\x02bar', '0000'
222 222 ]
223 223
224 224 assert new_response == expected_response
225 225
226 226
227 227 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
228 228 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
229 229
230 230 assert response == [pygrack.GitRepository.EMPTY_PACK]
231 231
232 232
233 233 @pytest.mark.parametrize('capabilities', [
234 234 ['side-band'],
235 235 ['side-band-64k'],
236 236 ['side-band-64k', 'no-progress'],
237 237 ])
238 238 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
239 239 response = pygrack_instance._build_failed_pre_pull_response(
240 240 capabilities, 'foo')
241 241
242 242 expected_response = [
243 243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
244 244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
245 245 pygrack.GitRepository.EMPTY_PACK),
246 246 '0000',
247 247 ]
248 248
249 249 assert response == expected_response
@@ -1,86 +1,86 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19
20 20 import mercurial.hg
21 21 import mercurial.ui
22 22 import mercurial.error
23 23 import mock
24 24 import pytest
25 25 import webtest
26 26
27 27 from vcsserver import scm_app
28 28
29 29
30 30 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
32 32 app = webtest.TestApp(scm_app.HgWeb(repo))
33 33
34 34 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
35 35
36 36 assert response.status_int == 400
37 37
38 38
39 39 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
41 41 config = (
42 42 ('paths', 'default', ''),
43 43 )
44 44 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
45 45 hgweb_mock.side_effect = mercurial.error.RequirementError()
46 46 with pytest.raises(Exception):
47 47 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
48 48
49 49
50 50 def test_git_returns_not_found(tmpdir):
51 51 app = webtest.TestApp(
52 52 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
53 53
54 54 response = app.get('/repo_name/inforefs?service=git-upload-pack',
55 55 expect_errors=True)
56 56
57 57 assert response.status_int == 404
58 58
59 59
60 60 def test_git(tmpdir):
61 61 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
62 62 tmpdir.mkdir(dir_name)
63 63
64 64 app = webtest.TestApp(
65 65 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
66 66
67 67 # We set service to git-upload-packs to trigger a 403
68 68 response = app.get('/repo_name/inforefs?service=git-upload-packs',
69 69 expect_errors=True)
70 70
71 71 assert response.status_int == 403
72 72
73 73
74 74 def test_git_fallbacks_to_git_folder(tmpdir):
75 75 tmpdir.mkdir('.git')
76 76 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
77 77 tmpdir.mkdir(os.path.join('.git', dir_name))
78 78
79 79 app = webtest.TestApp(
80 80 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
81 81
82 82 # We set service to git-upload-packs to trigger a 403
83 83 response = app.get('/repo_name/inforefs?service=git-upload-packs',
84 84 expect_errors=True)
85 85
86 86 assert response.status_int == 403
@@ -1,39 +1,39 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19
20 20 import mock
21 21 import pytest
22 22
23 23 from vcsserver.server import VcsServer
24 24
25 25
26 26 def test_provides_the_pid(server):
27 27 pid = server.get_pid()
28 28 assert pid == os.getpid()
29 29
30 30
31 31 def test_allows_to_trigger_the_garbage_collector(server):
32 32 with mock.patch('gc.collect') as collect:
33 33 server.run_gc()
34 34 assert collect.called
35 35
36 36
37 37 @pytest.fixture
38 38 def server():
39 39 return VcsServer()
@@ -1,155 +1,155 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import os
20 20 import sys
21 21
22 22 import pytest
23 23
24 24 from vcsserver import subprocessio
25 25
26 26
27 27 class KindaFilelike(object): # pragma: no cover
28 28
29 29 def __init__(self, data, size):
30 30 chunks = size / len(data)
31 31
32 32 self.stream = self._get_stream(data, chunks)
33 33
34 34 def _get_stream(self, data, chunks):
35 35 for x in xrange(chunks):
36 36 yield data
37 37
38 38 def read(self, n):
39 39
40 40 buffer_stream = ''
41 41 for chunk in self.stream:
42 42 buffer_stream += chunk
43 43 if len(buffer_stream) >= n:
44 44 break
45 45
46 46 # self.stream = self.bytes[n:]
47 47 return buffer_stream
48 48
49 49
50 50 @pytest.fixture(scope='module')
51 51 def environ():
52 52 """Delete coverage variables, as they make the tests fail."""
53 53 env = dict(os.environ)
54 54 for key in env.keys():
55 55 if key.startswith('COV_CORE_'):
56 56 del env[key]
57 57
58 58 return env
59 59
60 60
61 61 def _get_python_args(script):
62 62 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
63 63
64 64
65 65 def test_raise_exception_on_non_zero_return_code(environ):
66 66 args = _get_python_args('sys.exit(1)')
67 67 with pytest.raises(EnvironmentError):
68 68 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
69 69
70 70
71 71 def test_does_not_fail_on_non_zero_return_code(environ):
72 72 args = _get_python_args('sys.exit(1)')
73 73 output = ''.join(
74 74 subprocessio.SubprocessIOChunker(
75 75 args, shell=False, fail_on_return_code=False, env=environ
76 76 )
77 77 )
78 78
79 79 assert output == ''
80 80
81 81
82 82 def test_raise_exception_on_stderr(environ):
83 83 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
84 84 with pytest.raises(EnvironmentError) as excinfo:
85 85 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
86 86
87 87 assert 'exited due to an error:\nX' in str(excinfo.value)
88 88
89 89
90 90 def test_does_not_fail_on_stderr(environ):
91 91 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
92 92 output = ''.join(
93 93 subprocessio.SubprocessIOChunker(
94 94 args, shell=False, fail_on_stderr=False, env=environ
95 95 )
96 96 )
97 97
98 98 assert output == ''
99 99
100 100
101 101 @pytest.mark.parametrize('size', [1, 10 ** 5])
102 102 def test_output_with_no_input(size, environ):
103 103 print(type(environ))
104 104 data = 'X'
105 105 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
106 106 output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
107 107
108 108 assert output == data * size
109 109
110 110
111 111 @pytest.mark.parametrize('size', [1, 10 ** 5])
112 112 def test_output_with_no_input_does_not_fail(size, environ):
113 113 data = 'X'
114 114 args = _get_python_args('sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
115 115 output = ''.join(
116 116 subprocessio.SubprocessIOChunker(
117 117 args, shell=False, fail_on_return_code=False, env=environ
118 118 )
119 119 )
120 120
121 121 print("{} {}".format(len(data * size), len(output)))
122 122 assert output == data * size
123 123
124 124
125 125 @pytest.mark.parametrize('size', [1, 10 ** 5])
126 126 def test_output_with_input(size, environ):
127 127 data_len = size
128 128 inputstream = KindaFilelike('X', size)
129 129
130 130 # This acts like the cat command.
131 131 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
132 132 output = ''.join(
133 133 subprocessio.SubprocessIOChunker(
134 134 args, shell=False, inputstream=inputstream, env=environ
135 135 )
136 136 )
137 137
138 138 assert len(output) == data_len
139 139
140 140
141 141 @pytest.mark.parametrize('size', [1, 10 ** 5])
142 142 def test_output_with_input_skipping_iterator(size, environ):
143 143 data_len = size
144 144 inputstream = KindaFilelike('X', size)
145 145
146 146 # This acts like the cat command.
147 147 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
148 148
149 149 # Note: assigning the chunker makes sure that it is not deleted too early
150 150 chunker = subprocessio.SubprocessIOChunker(
151 151 args, shell=False, inputstream=inputstream, env=environ
152 152 )
153 153 output = ''.join(chunker.output)
154 154
155 155 assert len(output) == data_len
@@ -1,87 +1,87 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import mock
20 20 import pytest
21 21 import sys
22 22
23 23
24 24 class MockPopen(object):
25 25 def __init__(self, stderr):
26 26 self.stdout = io.BytesIO('')
27 27 self.stderr = io.BytesIO(stderr)
28 28 self.returncode = 1
29 29
30 30 def wait(self):
31 31 pass
32 32
33 33
34 34 INVALID_CERTIFICATE_STDERR = '\n'.join([
35 35 'svnrdump: E230001: Unable to connect to a repository at URL url',
36 36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
37 37 ])
38 38
39 39
40 40 @pytest.mark.parametrize('stderr,expected_reason', [
41 41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
42 42 ('svnrdump: E123456', 'UNKNOWN:svnrdump: E123456'),
43 43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
44 44 @pytest.mark.xfail(sys.platform == "cygwin",
45 45 reason="SVN not packaged for Cygwin")
46 46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 47 from vcsserver import svn
48 48 factory = mock.Mock()
49 49 factory.repo = mock.Mock(return_value=mock.Mock())
50 50
51 51 remote = svn.SvnRemote(factory)
52 52 remote.is_path_valid_repository = lambda wire, path: True
53 53
54 54 with mock.patch('subprocess.Popen',
55 55 return_value=MockPopen(stderr)):
56 56 with pytest.raises(Exception) as excinfo:
57 57 remote.import_remote_repository({'path': 'path'}, 'url')
58 58
59 59 expected_error_args = (
60 60 'Failed to dump the remote repository from url. Reason:{}'.format(expected_reason),)
61 61
62 62 assert excinfo.value.args == expected_error_args
63 63
64 64
65 65 def test_svn_libraries_can_be_imported():
66 66 import svn
67 67 import svn.client
68 68 assert svn.client is not None
69 69
70 70
71 71 @pytest.mark.parametrize('example_url, parts', [
72 72 ('http://server.com', (None, None, 'http://server.com')),
73 73 ('http://user@server.com', ('user', None, 'http://user@server.com')),
74 74 ('http://user:pass@server.com', ('user', 'pass', 'http://user:pass@server.com')),
75 75 ('<script>', (None, None, '<script>')),
76 76 ('http://', (None, None, 'http://')),
77 77 ])
78 78 def test_username_password_extraction_from_url(example_url, parts):
79 79 from vcsserver import svn
80 80
81 81 factory = mock.Mock()
82 82 factory.repo = mock.Mock(return_value=mock.Mock())
83 83
84 84 remote = svn.SvnRemote(factory)
85 85 remote.is_path_valid_repository = lambda wire, path: True
86 86
87 87 assert remote.get_url_and_credentials(example_url) == parts
@@ -1,96 +1,96 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import wsgiref.simple_server
19 19 import wsgiref.validate
20 20
21 21 from vcsserver import wsgi_app_caller
22 22
23 23
24 24 # pylint: disable=protected-access,too-many-public-methods
25 25
26 26
27 27 @wsgiref.validate.validator
28 28 def demo_app(environ, start_response):
29 29 """WSGI app used for testing."""
30 30 data = [
31 31 'Hello World!\n',
32 32 'input_data=%s\n' % environ['wsgi.input'].read(),
33 33 ]
34 34 for key, value in sorted(environ.items()):
35 35 data.append('%s=%s\n' % (key, value))
36 36
37 37 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 38 write('Old school write method\n')
39 39 write('***********************\n')
40 40 return data
41 41
42 42
43 43 BASE_ENVIRON = {
44 44 'REQUEST_METHOD': 'GET',
45 45 'SERVER_NAME': 'localhost',
46 46 'SERVER_PORT': '80',
47 47 'SCRIPT_NAME': '',
48 48 'PATH_INFO': '/',
49 49 'QUERY_STRING': '',
50 50 'foo.var': 'bla',
51 51 }
52 52
53 53
54 54 def test_complete_environ():
55 55 environ = dict(BASE_ENVIRON)
56 56 data = "data"
57 57 wsgi_app_caller._complete_environ(environ, data)
58 58 wsgiref.validate.check_environ(environ)
59 59
60 60 assert data == environ['wsgi.input'].read()
61 61
62 62
63 63 def test_start_response():
64 64 start_response = wsgi_app_caller._StartResponse()
65 65 status = '200 OK'
66 66 headers = [('Content-Type', 'text/plain')]
67 67 start_response(status, headers)
68 68
69 69 assert status == start_response.status
70 70 assert headers == start_response.headers
71 71
72 72
73 73 def test_start_response_with_error():
74 74 start_response = wsgi_app_caller._StartResponse()
75 75 status = '500 Internal Server Error'
76 76 headers = [('Content-Type', 'text/plain')]
77 77 start_response(status, headers, (None, None, None))
78 78
79 79 assert status == start_response.status
80 80 assert headers == start_response.headers
81 81
82 82
83 83 def test_wsgi_app_caller():
84 84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
85 85 environ = dict(BASE_ENVIRON)
86 86 input_data = 'some text'
87 87 responses, status, headers = caller.handle(environ, input_data)
88 88 response = ''.join(responses)
89 89
90 90 assert status == '200 OK'
91 91 assert headers == [('Content-Type', 'text/plain')]
92 92 assert response.startswith(
93 93 'Old school write method\n***********************\n')
94 94 assert 'Hello World!\n' in response
95 95 assert 'foo.var=bla\n' in response
96 96 assert 'input_data=%s\n' % input_data in response
@@ -1,19 +1,19 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 # Copyright (C) 2016-2019 RhodeCode GmbH
3 # Copyright (C) 2016-2020 RhodeCode GmbH
4 4 #
5 5 # This program is free software: you can redistribute it and/or modify
6 6 # it under the terms of the GNU Affero General Public License, version 3
7 7 # (only), as published by the Free Software Foundation.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU Affero General Public License
15 15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16 16 #
17 17 # This program is dual-licensed. If you wish to learn more about the
18 18 # RhodeCode Enterprise Edition, including its added features, Support services,
19 19 # and proprietary license terms, please see https://rhodecode.com/licenses/
@@ -1,64 +1,64 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import time
19 19 import logging
20 20
21 21 import vcsserver
22 22 from vcsserver.utils import safe_str
23 23
24 24
25 25 log = logging.getLogger(__name__)
26 26
27 27
28 28 def get_access_path(request):
29 29 environ = request.environ
30 30 return environ.get('PATH_INFO')
31 31
32 32
33 33 def get_user_agent(environ):
34 34 return environ.get('HTTP_USER_AGENT')
35 35
36 36
37 37 class RequestWrapperTween(object):
38 38 def __init__(self, handler, registry):
39 39 self.handler = handler
40 40 self.registry = registry
41 41
42 42 # one-time configuration code goes here
43 43
44 44 def __call__(self, request):
45 45 start = time.time()
46 46 try:
47 47 response = self.handler(request)
48 48 finally:
49 49 end = time.time()
50 50 total = end - start
51 51 count = request.request_count()
52 52 _ver_ = vcsserver.__version__
53 53 log.info(
54 54 'Req[%4s] IP: %s %s Request to %s time: %.4fs [%s], VCSServer %s',
55 55 count, '127.0.0.1', request.environ.get('REQUEST_METHOD'),
56 56 safe_str(get_access_path(request)), total, get_user_agent(request.environ), _ver_)
57 57
58 58 return response
59 59
60 60
61 61 def includeme(config):
62 62 config.add_tween(
63 63 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
64 64 )
@@ -1,110 +1,110 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import logging
18 18 import hashlib
19 19
20 20 log = logging.getLogger(__name__)
21 21
22 22
23 23 def safe_int(val, default=None):
24 24 """
25 25 Returns int() of val if val is not convertable to int use default
26 26 instead
27 27
28 28 :param val:
29 29 :param default:
30 30 """
31 31
32 32 try:
33 33 val = int(val)
34 34 except (ValueError, TypeError):
35 35 val = default
36 36
37 37 return val
38 38
39 39
40 40 def safe_str(unicode_, to_encoding=None):
41 41 """
42 42 safe str function. Does few trick to turn unicode_ into string
43 43
44 44 :param unicode_: unicode to encode
45 45 :param to_encoding: encode to this type UTF8 default
46 46 :rtype: str
47 47 :returns: str object
48 48 """
49 49 to_encoding = to_encoding or ['utf8']
50 50 # if it's not basestr cast to str
51 51 if not isinstance(unicode_, basestring):
52 52 return str(unicode_)
53 53
54 54 if isinstance(unicode_, str):
55 55 return unicode_
56 56
57 57 if not isinstance(to_encoding, (list, tuple)):
58 58 to_encoding = [to_encoding]
59 59
60 60 for enc in to_encoding:
61 61 try:
62 62 return unicode_.encode(enc)
63 63 except UnicodeEncodeError:
64 64 pass
65 65
66 66 return unicode_.encode(to_encoding[0], 'replace')
67 67
68 68
69 69 def safe_unicode(str_, from_encoding=None):
70 70 """
71 71 safe unicode function. Does few trick to turn str_ into unicode
72 72
73 73 :param str_: string to decode
74 74 :param from_encoding: encode from this type UTF8 default
75 75 :rtype: unicode
76 76 :returns: unicode object
77 77 """
78 78 from_encoding = from_encoding or ['utf8']
79 79
80 80 if isinstance(str_, unicode):
81 81 return str_
82 82
83 83 if not isinstance(from_encoding, (list, tuple)):
84 84 from_encoding = [from_encoding]
85 85
86 86 try:
87 87 return unicode(str_)
88 88 except UnicodeDecodeError:
89 89 pass
90 90
91 91 for enc in from_encoding:
92 92 try:
93 93 return unicode(str_, enc)
94 94 except UnicodeDecodeError:
95 95 pass
96 96
97 97 return unicode(str_, from_encoding[0], 'replace')
98 98
99 99
100 100 class AttributeDict(dict):
101 101 def __getattr__(self, attr):
102 102 return self.get(attr, None)
103 103 __setattr__ = dict.__setitem__
104 104 __delattr__ = dict.__delitem__
105 105
106 106
107 107 def sha1(val):
108 108 return hashlib.sha1(val).hexdigest()
109 109
110 110
@@ -1,32 +1,32 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 19 class RemoteBase(object):
20 20 EMPTY_COMMIT = '0' * 40
21 21
22 22 @property
23 23 def region(self):
24 24 return self._factory._cache_region
25 25
26 26 def _cache_on(self, wire):
27 27 context = wire.get('context', '')
28 28 context_uid = '{}'.format(context)
29 29 repo_id = wire.get('repo_id', '')
30 30 cache = wire.get('cache', True)
31 31 cache_on = context and cache
32 32 return cache_on, context_uid, repo_id
@@ -1,116 +1,116 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2019 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """Extract the responses of a WSGI app."""
19 19
20 20 __all__ = ('WSGIAppCaller',)
21 21
22 22 import io
23 23 import logging
24 24 import os
25 25
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29 DEV_NULL = open(os.devnull)
30 30
31 31
32 32 def _complete_environ(environ, input_data):
33 33 """Update the missing wsgi.* variables of a WSGI environment.
34 34
35 35 :param environ: WSGI environment to update
36 36 :type environ: dict
37 37 :param input_data: data to be read by the app
38 38 :type input_data: str
39 39 """
40 40 environ.update({
41 41 'wsgi.version': (1, 0),
42 42 'wsgi.url_scheme': 'http',
43 43 'wsgi.multithread': True,
44 44 'wsgi.multiprocess': True,
45 45 'wsgi.run_once': False,
46 46 'wsgi.input': io.BytesIO(input_data),
47 47 'wsgi.errors': DEV_NULL,
48 48 })
49 49
50 50
51 51 # pylint: disable=too-few-public-methods
52 52 class _StartResponse(object):
53 53 """Save the arguments of a start_response call."""
54 54
55 55 __slots__ = ['status', 'headers', 'content']
56 56
57 57 def __init__(self):
58 58 self.status = None
59 59 self.headers = None
60 60 self.content = []
61 61
62 62 def __call__(self, status, headers, exc_info=None):
63 63 # TODO(skreft): do something meaningful with the exc_info
64 64 exc_info = None # avoid dangling circular reference
65 65 self.status = status
66 66 self.headers = headers
67 67
68 68 return self.write
69 69
70 70 def write(self, content):
71 71 """Write method returning when calling this object.
72 72
73 73 All the data written is then available in content.
74 74 """
75 75 self.content.append(content)
76 76
77 77
78 78 class WSGIAppCaller(object):
79 79 """Calls a WSGI app."""
80 80
81 81 def __init__(self, app):
82 82 """
83 83 :param app: WSGI app to call
84 84 """
85 85 self.app = app
86 86
87 87 def handle(self, environ, input_data):
88 88 """Process a request with the WSGI app.
89 89
90 90 The returned data of the app is fully consumed into a list.
91 91
92 92 :param environ: WSGI environment to update
93 93 :type environ: dict
94 94 :param input_data: data to be read by the app
95 95 :type input_data: str
96 96
97 97 :returns: a tuple with the contents, status and headers
98 98 :rtype: (list<str>, str, list<(str, str)>)
99 99 """
100 100 _complete_environ(environ, input_data)
101 101 start_response = _StartResponse()
102 102 log.debug("Calling wrapped WSGI application")
103 103 responses = self.app(environ, start_response)
104 104 responses_list = list(responses)
105 105 existing_responses = start_response.content
106 106 if existing_responses:
107 107 log.debug(
108 108 "Adding returned response to response written via write()")
109 109 existing_responses.extend(responses_list)
110 110 responses_list = existing_responses
111 111 if hasattr(responses, 'close'):
112 112 log.debug("Closing iterator from WSGI application")
113 113 responses.close()
114 114
115 115 log.debug("Handling of WSGI request done, returning response")
116 116 return responses_list, start_response.status, start_response.headers
General Comments 0
You need to be logged in to leave comments. Login now