##// END OF EJS Templates
release: Merge default into stable for release preparation
super-admin -
r992:2a5ae811 merge stable
parent child Browse files
Show More
@@ -1,5 +1,5 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.26.0
2 current_version = 4.27.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.26.0
13 version = 4.27.0
16
14
@@ -1,1103 +1,1103 b''
1 # Generated by pip2nix 0.8.0.dev1
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 { pkgs, fetchurl, fetchgit, fetchhg }:
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5
5
6 self: super: {
6 self: super: {
7 "atomicwrites" = super.buildPythonPackage {
7 "atomicwrites" = super.buildPythonPackage {
8 name = "atomicwrites-1.3.0";
8 name = "atomicwrites-1.3.0";
9 doCheck = false;
9 doCheck = false;
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
11 url = "https://files.pythonhosted.org/packages/ec/0f/cd484ac8820fed363b374af30049adc8fd13065720fd4f4c6be8a2309da7/atomicwrites-1.3.0.tar.gz";
12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
12 sha256 = "19ngcscdf3jsqmpcxn6zl5b6anmsajb6izp1smcd1n02midl9abm";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.mit ];
15 license = [ pkgs.lib.licenses.mit ];
16 };
16 };
17 };
17 };
18 "attrs" = super.buildPythonPackage {
18 "attrs" = super.buildPythonPackage {
19 name = "attrs-19.3.0";
19 name = "attrs-19.3.0";
20 doCheck = false;
20 doCheck = false;
21 src = fetchurl {
21 src = fetchurl {
22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
22 url = "https://files.pythonhosted.org/packages/98/c3/2c227e66b5e896e15ccdae2e00bbc69aa46e9a8ce8869cc5fa96310bf612/attrs-19.3.0.tar.gz";
23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
23 sha256 = "0wky4h28n7xnr6xv69p9z6kv8bzn50d10c3drmd9ds8gawbcxdzp";
24 };
24 };
25 meta = {
25 meta = {
26 license = [ pkgs.lib.licenses.mit ];
26 license = [ pkgs.lib.licenses.mit ];
27 };
27 };
28 };
28 };
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 name = "backports.shutil-get-terminal-size-1.0.0";
30 name = "backports.shutil-get-terminal-size-1.0.0";
31 doCheck = false;
31 doCheck = false;
32 src = fetchurl {
32 src = fetchurl {
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 };
35 };
36 meta = {
36 meta = {
37 license = [ pkgs.lib.licenses.mit ];
37 license = [ pkgs.lib.licenses.mit ];
38 };
38 };
39 };
39 };
40 "beautifulsoup4" = super.buildPythonPackage {
40 "beautifulsoup4" = super.buildPythonPackage {
41 name = "beautifulsoup4-4.6.3";
41 name = "beautifulsoup4-4.6.3";
42 doCheck = false;
42 doCheck = false;
43 src = fetchurl {
43 src = fetchurl {
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 };
46 };
47 meta = {
47 meta = {
48 license = [ pkgs.lib.licenses.mit ];
48 license = [ pkgs.lib.licenses.mit ];
49 };
49 };
50 };
50 };
51 "cffi" = super.buildPythonPackage {
51 "cffi" = super.buildPythonPackage {
52 name = "cffi-1.12.3";
52 name = "cffi-1.12.3";
53 doCheck = false;
53 doCheck = false;
54 propagatedBuildInputs = [
54 propagatedBuildInputs = [
55 self."pycparser"
55 self."pycparser"
56 ];
56 ];
57 src = fetchurl {
57 src = fetchurl {
58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
58 url = "https://files.pythonhosted.org/packages/93/1a/ab8c62b5838722f29f3daffcc8d4bd61844aa9b5f437341cc890ceee483b/cffi-1.12.3.tar.gz";
59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
59 sha256 = "0x075521fxwv0mfp4cqzk7lvmw4n94bjw601qkcv314z5s182704";
60 };
60 };
61 meta = {
61 meta = {
62 license = [ pkgs.lib.licenses.mit ];
62 license = [ pkgs.lib.licenses.mit ];
63 };
63 };
64 };
64 };
65 "configobj" = super.buildPythonPackage {
65 "configobj" = super.buildPythonPackage {
66 name = "configobj-5.0.6";
66 name = "configobj-5.0.6";
67 doCheck = false;
67 doCheck = false;
68 propagatedBuildInputs = [
68 propagatedBuildInputs = [
69 self."six"
69 self."six"
70 ];
70 ];
71 src = fetchurl {
71 src = fetchurl {
72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
72 url = "https://code.rhodecode.com/upstream/configobj/artifacts/download/0-012de99a-b1e1-4f64-a5c0-07a98a41b324.tar.gz?md5=6a513f51fe04b2c18cf84c1395a7c626";
73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
73 sha256 = "0kqfrdfr14mw8yd8qwq14dv2xghpkjmd3yjsy8dfcbvpcc17xnxp";
74 };
74 };
75 meta = {
75 meta = {
76 license = [ pkgs.lib.licenses.bsdOriginal ];
76 license = [ pkgs.lib.licenses.bsdOriginal ];
77 };
77 };
78 };
78 };
79 "configparser" = super.buildPythonPackage {
79 "configparser" = super.buildPythonPackage {
80 name = "configparser-4.0.2";
80 name = "configparser-4.0.2";
81 doCheck = false;
81 doCheck = false;
82 src = fetchurl {
82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
83 url = "https://files.pythonhosted.org/packages/16/4f/48975536bd488d3a272549eb795ac4a13a5f7fcdc8995def77fbef3532ee/configparser-4.0.2.tar.gz";
84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
84 sha256 = "1priacxym85yjcf68hh38w55nqswaxp71ryjyfdk222kg9l85ln7";
85 };
85 };
86 meta = {
86 meta = {
87 license = [ pkgs.lib.licenses.mit ];
87 license = [ pkgs.lib.licenses.mit ];
88 };
88 };
89 };
89 };
90 "contextlib2" = super.buildPythonPackage {
90 "contextlib2" = super.buildPythonPackage {
91 name = "contextlib2-0.6.0.post1";
91 name = "contextlib2-0.6.0.post1";
92 doCheck = false;
92 doCheck = false;
93 src = fetchurl {
93 src = fetchurl {
94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
94 url = "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz";
95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
95 sha256 = "0bhnr2ac7wy5l85ji909gyljyk85n92w8pdvslmrvc8qih4r1x01";
96 };
96 };
97 meta = {
97 meta = {
98 license = [ pkgs.lib.licenses.psfl ];
98 license = [ pkgs.lib.licenses.psfl ];
99 };
99 };
100 };
100 };
101 "cov-core" = super.buildPythonPackage {
101 "cov-core" = super.buildPythonPackage {
102 name = "cov-core-1.15.0";
102 name = "cov-core-1.15.0";
103 doCheck = false;
103 doCheck = false;
104 propagatedBuildInputs = [
104 propagatedBuildInputs = [
105 self."coverage"
105 self."coverage"
106 ];
106 ];
107 src = fetchurl {
107 src = fetchurl {
108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
108 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
109 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
110 };
110 };
111 meta = {
111 meta = {
112 license = [ pkgs.lib.licenses.mit ];
112 license = [ pkgs.lib.licenses.mit ];
113 };
113 };
114 };
114 };
115 "coverage" = super.buildPythonPackage {
115 "coverage" = super.buildPythonPackage {
116 name = "coverage-4.5.4";
116 name = "coverage-4.5.4";
117 doCheck = false;
117 doCheck = false;
118 src = fetchurl {
118 src = fetchurl {
119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
119 url = "https://files.pythonhosted.org/packages/85/d5/818d0e603685c4a613d56f065a721013e942088047ff1027a632948bdae6/coverage-4.5.4.tar.gz";
120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
120 sha256 = "0p0j4di6h8k6ica7jwwj09azdcg4ycxq60i9qsskmsg94cd9yzg0";
121 };
121 };
122 meta = {
122 meta = {
123 license = [ pkgs.lib.licenses.asl20 ];
123 license = [ pkgs.lib.licenses.asl20 ];
124 };
124 };
125 };
125 };
126 "decorator" = super.buildPythonPackage {
126 "decorator" = super.buildPythonPackage {
127 name = "decorator-4.1.2";
127 name = "decorator-4.1.2";
128 doCheck = false;
128 doCheck = false;
129 src = fetchurl {
129 src = fetchurl {
130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
130 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
131 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
132 };
132 };
133 meta = {
133 meta = {
134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
134 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
135 };
135 };
136 };
136 };
137 "dogpile.cache" = super.buildPythonPackage {
137 "dogpile.cache" = super.buildPythonPackage {
138 name = "dogpile.cache-0.9.0";
138 name = "dogpile.cache-0.9.0";
139 doCheck = false;
139 doCheck = false;
140 propagatedBuildInputs = [
140 propagatedBuildInputs = [
141 self."decorator"
141 self."decorator"
142 ];
142 ];
143 src = fetchurl {
143 src = fetchurl {
144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
144 url = "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz";
145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
145 sha256 = "0sr1fn6b4k5bh0cscd9yi8csqxvj4ngzildav58x5p694mc86j5k";
146 };
146 };
147 meta = {
147 meta = {
148 license = [ pkgs.lib.licenses.bsdOriginal ];
148 license = [ pkgs.lib.licenses.bsdOriginal ];
149 };
149 };
150 };
150 };
151 "dogpile.core" = super.buildPythonPackage {
151 "dogpile.core" = super.buildPythonPackage {
152 name = "dogpile.core-0.4.1";
152 name = "dogpile.core-0.4.1";
153 doCheck = false;
153 doCheck = false;
154 src = fetchurl {
154 src = fetchurl {
155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
155 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
156 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
157 };
157 };
158 meta = {
158 meta = {
159 license = [ pkgs.lib.licenses.bsdOriginal ];
159 license = [ pkgs.lib.licenses.bsdOriginal ];
160 };
160 };
161 };
161 };
162 "dulwich" = super.buildPythonPackage {
162 "dulwich" = super.buildPythonPackage {
163 name = "dulwich-0.13.0";
163 name = "dulwich-0.13.0";
164 doCheck = false;
164 doCheck = false;
165 src = fetchurl {
165 src = fetchurl {
166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
166 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
167 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
168 };
168 };
169 meta = {
169 meta = {
170 license = [ pkgs.lib.licenses.gpl2Plus ];
170 license = [ pkgs.lib.licenses.gpl2Plus ];
171 };
171 };
172 };
172 };
173 "enum34" = super.buildPythonPackage {
173 "enum34" = super.buildPythonPackage {
174 name = "enum34-1.1.10";
174 name = "enum34-1.1.10";
175 doCheck = false;
175 doCheck = false;
176 src = fetchurl {
176 src = fetchurl {
177 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
177 url = "https://files.pythonhosted.org/packages/11/c4/2da1f4952ba476677a42f25cd32ab8aaf0e1c0d0e00b89822b835c7e654c/enum34-1.1.10.tar.gz";
178 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
178 sha256 = "0j7ji699fwswm4vg6w1v07fkbf8dkzdm6gfh88jvs5nqgr3sgrnc";
179 };
179 };
180 meta = {
180 meta = {
181 license = [ pkgs.lib.licenses.bsdOriginal ];
181 license = [ pkgs.lib.licenses.bsdOriginal ];
182 };
182 };
183 };
183 };
184 "funcsigs" = super.buildPythonPackage {
184 "funcsigs" = super.buildPythonPackage {
185 name = "funcsigs-1.0.2";
185 name = "funcsigs-1.0.2";
186 doCheck = false;
186 doCheck = false;
187 src = fetchurl {
187 src = fetchurl {
188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
188 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
189 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
190 };
190 };
191 meta = {
191 meta = {
192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
192 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
193 };
193 };
194 };
194 };
195 "gevent" = super.buildPythonPackage {
195 "gevent" = super.buildPythonPackage {
196 name = "gevent-1.5.0";
196 name = "gevent-1.5.0";
197 doCheck = false;
197 doCheck = false;
198 propagatedBuildInputs = [
198 propagatedBuildInputs = [
199 self."greenlet"
199 self."greenlet"
200 ];
200 ];
201 src = fetchurl {
201 src = fetchurl {
202 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
202 url = "https://files.pythonhosted.org/packages/5a/79/2c63d385d017b5dd7d70983a463dfd25befae70c824fedb857df6e72eff2/gevent-1.5.0.tar.gz";
203 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
203 sha256 = "0aac3d4vhv5n4rsb6cqzq0d1xx9immqz4fmpddw35yxkwdc450dj";
204 };
204 };
205 meta = {
205 meta = {
206 license = [ pkgs.lib.licenses.mit ];
206 license = [ pkgs.lib.licenses.mit ];
207 };
207 };
208 };
208 };
209 "gprof2dot" = super.buildPythonPackage {
209 "gprof2dot" = super.buildPythonPackage {
210 name = "gprof2dot-2017.9.19";
210 name = "gprof2dot-2017.9.19";
211 doCheck = false;
211 doCheck = false;
212 src = fetchurl {
212 src = fetchurl {
213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
213 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
214 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
215 };
215 };
216 meta = {
216 meta = {
217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
217 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
218 };
218 };
219 };
219 };
220 "greenlet" = super.buildPythonPackage {
220 "greenlet" = super.buildPythonPackage {
221 name = "greenlet-0.4.15";
221 name = "greenlet-0.4.15";
222 doCheck = false;
222 doCheck = false;
223 src = fetchurl {
223 src = fetchurl {
224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
224 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
225 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
226 };
226 };
227 meta = {
227 meta = {
228 license = [ pkgs.lib.licenses.mit ];
228 license = [ pkgs.lib.licenses.mit ];
229 };
229 };
230 };
230 };
231 "gunicorn" = super.buildPythonPackage {
231 "gunicorn" = super.buildPythonPackage {
232 name = "gunicorn-19.9.0";
232 name = "gunicorn-19.9.0";
233 doCheck = false;
233 doCheck = false;
234 src = fetchurl {
234 src = fetchurl {
235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
235 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
236 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
237 };
237 };
238 meta = {
238 meta = {
239 license = [ pkgs.lib.licenses.mit ];
239 license = [ pkgs.lib.licenses.mit ];
240 };
240 };
241 };
241 };
242 "hg-evolve" = super.buildPythonPackage {
242 "hg-evolve" = super.buildPythonPackage {
243 name = "hg-evolve-9.1.0";
243 name = "hg-evolve-9.1.0";
244 doCheck = false;
244 doCheck = false;
245 src = fetchurl {
245 src = fetchurl {
246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
246 url = "https://files.pythonhosted.org/packages/20/36/5a6655975aa0c663be91098d31a0b24841acad44fe896aa2bdee77c6b883/hg-evolve-9.1.0.tar.gz";
247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
247 sha256 = "1mna81cmzxxn7s2nwz3g1xgdjlcc1axkvfmwg7gjqghwn3pdraps";
248 };
248 };
249 meta = {
249 meta = {
250 license = [ { fullName = "GPLv2+"; } ];
250 license = [ { fullName = "GPLv2+"; } ];
251 };
251 };
252 };
252 };
253 "hgsubversion" = super.buildPythonPackage {
253 "hgsubversion" = super.buildPythonPackage {
254 name = "hgsubversion-1.9.3";
254 name = "hgsubversion-1.9.3";
255 doCheck = false;
255 doCheck = false;
256 propagatedBuildInputs = [
256 propagatedBuildInputs = [
257 self."mercurial"
257 self."mercurial"
258 self."subvertpy"
258 self."subvertpy"
259 ];
259 ];
260 src = fetchurl {
260 src = fetchurl {
261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
261 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
262 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
263 };
263 };
264 meta = {
264 meta = {
265 license = [ pkgs.lib.licenses.gpl1 ];
265 license = [ pkgs.lib.licenses.gpl1 ];
266 };
266 };
267 };
267 };
268 "hupper" = super.buildPythonPackage {
268 "hupper" = super.buildPythonPackage {
269 name = "hupper-1.10.2";
269 name = "hupper-1.10.2";
270 doCheck = false;
270 doCheck = false;
271 src = fetchurl {
271 src = fetchurl {
272 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
272 url = "https://files.pythonhosted.org/packages/41/24/ea90fef04706e54bd1635c05c50dc9cf87cda543c59303a03e7aa7dda0ce/hupper-1.10.2.tar.gz";
273 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
273 sha256 = "0am0p6g5cz6xmcaf04xq8q6dzdd9qz0phj6gcmpsckf2mcyza61q";
274 };
274 };
275 meta = {
275 meta = {
276 license = [ pkgs.lib.licenses.mit ];
276 license = [ pkgs.lib.licenses.mit ];
277 };
277 };
278 };
278 };
279 "importlib-metadata" = super.buildPythonPackage {
279 "importlib-metadata" = super.buildPythonPackage {
280 name = "importlib-metadata-1.6.0";
280 name = "importlib-metadata-1.6.0";
281 doCheck = false;
281 doCheck = false;
282 propagatedBuildInputs = [
282 propagatedBuildInputs = [
283 self."zipp"
283 self."zipp"
284 self."pathlib2"
284 self."pathlib2"
285 self."contextlib2"
285 self."contextlib2"
286 self."configparser"
286 self."configparser"
287 ];
287 ];
288 src = fetchurl {
288 src = fetchurl {
289 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
289 url = "https://files.pythonhosted.org/packages/b4/1b/baab42e3cd64c9d5caac25a9d6c054f8324cdc38975a44d600569f1f7158/importlib_metadata-1.6.0.tar.gz";
290 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
290 sha256 = "07icyggasn38yv2swdrd8z6i0plazmc9adavsdkbqqj91j53ll9l";
291 };
291 };
292 meta = {
292 meta = {
293 license = [ pkgs.lib.licenses.asl20 ];
293 license = [ pkgs.lib.licenses.asl20 ];
294 };
294 };
295 };
295 };
296 "ipdb" = super.buildPythonPackage {
296 "ipdb" = super.buildPythonPackage {
297 name = "ipdb-0.13.2";
297 name = "ipdb-0.13.2";
298 doCheck = false;
298 doCheck = false;
299 propagatedBuildInputs = [
299 propagatedBuildInputs = [
300 self."setuptools"
300 self."setuptools"
301 self."ipython"
301 self."ipython"
302 ];
302 ];
303 src = fetchurl {
303 src = fetchurl {
304 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
304 url = "https://files.pythonhosted.org/packages/2c/bb/a3e1a441719ebd75c6dac8170d3ddba884b7ee8a5c0f9aefa7297386627a/ipdb-0.13.2.tar.gz";
305 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
305 sha256 = "0jcd849rx30y3wcgzsqbn06v0yjlzvb9x3076q0yxpycdwm1ryvp";
306 };
306 };
307 meta = {
307 meta = {
308 license = [ pkgs.lib.licenses.bsdOriginal ];
308 license = [ pkgs.lib.licenses.bsdOriginal ];
309 };
309 };
310 };
310 };
311 "ipython" = super.buildPythonPackage {
311 "ipython" = super.buildPythonPackage {
312 name = "ipython-5.1.0";
312 name = "ipython-5.1.0";
313 doCheck = false;
313 doCheck = false;
314 propagatedBuildInputs = [
314 propagatedBuildInputs = [
315 self."setuptools"
315 self."setuptools"
316 self."decorator"
316 self."decorator"
317 self."pickleshare"
317 self."pickleshare"
318 self."simplegeneric"
318 self."simplegeneric"
319 self."traitlets"
319 self."traitlets"
320 self."prompt-toolkit"
320 self."prompt-toolkit"
321 self."pygments"
321 self."pygments"
322 self."pexpect"
322 self."pexpect"
323 self."backports.shutil-get-terminal-size"
323 self."backports.shutil-get-terminal-size"
324 self."pathlib2"
324 self."pathlib2"
325 self."pexpect"
325 self."pexpect"
326 ];
326 ];
327 src = fetchurl {
327 src = fetchurl {
328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
328 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
329 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
330 };
330 };
331 meta = {
331 meta = {
332 license = [ pkgs.lib.licenses.bsdOriginal ];
332 license = [ pkgs.lib.licenses.bsdOriginal ];
333 };
333 };
334 };
334 };
335 "ipython-genutils" = super.buildPythonPackage {
335 "ipython-genutils" = super.buildPythonPackage {
336 name = "ipython-genutils-0.2.0";
336 name = "ipython-genutils-0.2.0";
337 doCheck = false;
337 doCheck = false;
338 src = fetchurl {
338 src = fetchurl {
339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
339 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
340 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
341 };
341 };
342 meta = {
342 meta = {
343 license = [ pkgs.lib.licenses.bsdOriginal ];
343 license = [ pkgs.lib.licenses.bsdOriginal ];
344 };
344 };
345 };
345 };
346 "mako" = super.buildPythonPackage {
346 "mako" = super.buildPythonPackage {
347 name = "mako-1.1.0";
347 name = "mako-1.1.0";
348 doCheck = false;
348 doCheck = false;
349 propagatedBuildInputs = [
349 propagatedBuildInputs = [
350 self."markupsafe"
350 self."markupsafe"
351 ];
351 ];
352 src = fetchurl {
352 src = fetchurl {
353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
353 url = "https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz";
354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
354 sha256 = "0jqa3qfpykyn4fmkn0kh6043sfls7br8i2bsdbccazcvk9cijsd3";
355 };
355 };
356 meta = {
356 meta = {
357 license = [ pkgs.lib.licenses.mit ];
357 license = [ pkgs.lib.licenses.mit ];
358 };
358 };
359 };
359 };
360 "markupsafe" = super.buildPythonPackage {
360 "markupsafe" = super.buildPythonPackage {
361 name = "markupsafe-1.1.1";
361 name = "markupsafe-1.1.1";
362 doCheck = false;
362 doCheck = false;
363 src = fetchurl {
363 src = fetchurl {
364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
364 url = "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz";
365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
365 sha256 = "0sqipg4fk7xbixqd8kq6rlkxj664d157bdwbh93farcphf92x1r9";
366 };
366 };
367 meta = {
367 meta = {
368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
368 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.bsd3 ];
369 };
369 };
370 };
370 };
371 "mercurial" = super.buildPythonPackage {
371 "mercurial" = super.buildPythonPackage {
372 name = "mercurial-5.1.1";
372 name = "mercurial-5.1.1";
373 doCheck = false;
373 doCheck = false;
374 src = fetchurl {
374 src = fetchurl {
375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
375 url = "https://files.pythonhosted.org/packages/22/39/e1a95f6048aa0785b82f5faad8281ae7320894a635cb4a57e19479639c92/mercurial-5.1.1.tar.gz";
376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
376 sha256 = "17z42rfjdkrks4grzgac66nfh285zf1pwxd2zwx1p71pw2jqpz1m";
377 };
377 };
378 meta = {
378 meta = {
379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
379 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
380 };
380 };
381 };
381 };
382 "mock" = super.buildPythonPackage {
382 "mock" = super.buildPythonPackage {
383 name = "mock-3.0.5";
383 name = "mock-3.0.5";
384 doCheck = false;
384 doCheck = false;
385 propagatedBuildInputs = [
385 propagatedBuildInputs = [
386 self."six"
386 self."six"
387 self."funcsigs"
387 self."funcsigs"
388 ];
388 ];
389 src = fetchurl {
389 src = fetchurl {
390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
390 url = "https://files.pythonhosted.org/packages/2e/ab/4fe657d78b270aa6a32f027849513b829b41b0f28d9d8d7f8c3d29ea559a/mock-3.0.5.tar.gz";
391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
391 sha256 = "1hrp6j0yrx2xzylfv02qa8kph661m6yq4p0mc8fnimch9j4psrc3";
392 };
392 };
393 meta = {
393 meta = {
394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
394 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "OSI Approved :: BSD License"; } ];
395 };
395 };
396 };
396 };
397 "more-itertools" = super.buildPythonPackage {
397 "more-itertools" = super.buildPythonPackage {
398 name = "more-itertools-5.0.0";
398 name = "more-itertools-5.0.0";
399 doCheck = false;
399 doCheck = false;
400 propagatedBuildInputs = [
400 propagatedBuildInputs = [
401 self."six"
401 self."six"
402 ];
402 ];
403 src = fetchurl {
403 src = fetchurl {
404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
404 url = "https://files.pythonhosted.org/packages/dd/26/30fc0d541d9fdf55faf5ba4b0fd68f81d5bd2447579224820ad525934178/more-itertools-5.0.0.tar.gz";
405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
405 sha256 = "1r12cm6mcdwdzz7d47a6g4l437xsvapdlgyhqay3i2nrlv03da9q";
406 };
406 };
407 meta = {
407 meta = {
408 license = [ pkgs.lib.licenses.mit ];
408 license = [ pkgs.lib.licenses.mit ];
409 };
409 };
410 };
410 };
411 "msgpack-python" = super.buildPythonPackage {
411 "msgpack-python" = super.buildPythonPackage {
412 name = "msgpack-python-0.5.6";
412 name = "msgpack-python-0.5.6";
413 doCheck = false;
413 doCheck = false;
414 src = fetchurl {
414 src = fetchurl {
415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
415 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
416 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
417 };
417 };
418 meta = {
418 meta = {
419 license = [ pkgs.lib.licenses.asl20 ];
419 license = [ pkgs.lib.licenses.asl20 ];
420 };
420 };
421 };
421 };
422 "packaging" = super.buildPythonPackage {
422 "packaging" = super.buildPythonPackage {
423 name = "packaging-20.3";
423 name = "packaging-20.3";
424 doCheck = false;
424 doCheck = false;
425 propagatedBuildInputs = [
425 propagatedBuildInputs = [
426 self."pyparsing"
426 self."pyparsing"
427 self."six"
427 self."six"
428 ];
428 ];
429 src = fetchurl {
429 src = fetchurl {
430 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
430 url = "https://files.pythonhosted.org/packages/65/37/83e3f492eb52d771e2820e88105f605335553fe10422cba9d256faeb1702/packaging-20.3.tar.gz";
431 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
431 sha256 = "18xpablq278janh03bai9xd4kz9b0yfp6vflazn725ns9x3jna9w";
432 };
432 };
433 meta = {
433 meta = {
434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
434 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "BSD or Apache License, Version 2.0"; } pkgs.lib.licenses.asl20 ];
435 };
435 };
436 };
436 };
437 "pastedeploy" = super.buildPythonPackage {
437 "pastedeploy" = super.buildPythonPackage {
438 name = "pastedeploy-2.1.0";
438 name = "pastedeploy-2.1.0";
439 doCheck = false;
439 doCheck = false;
440 src = fetchurl {
440 src = fetchurl {
441 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
441 url = "https://files.pythonhosted.org/packages/c4/e9/972a1c20318b3ae9edcab11a6cef64308fbae5d0d45ab52c6f8b2b8f35b8/PasteDeploy-2.1.0.tar.gz";
442 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
442 sha256 = "16qsq5y6mryslmbp5pn35x4z8z3ndp5rpgl42h226879nrw9hmg7";
443 };
443 };
444 meta = {
444 meta = {
445 license = [ pkgs.lib.licenses.mit ];
445 license = [ pkgs.lib.licenses.mit ];
446 };
446 };
447 };
447 };
448 "pathlib2" = super.buildPythonPackage {
448 "pathlib2" = super.buildPythonPackage {
449 name = "pathlib2-2.3.5";
449 name = "pathlib2-2.3.5";
450 doCheck = false;
450 doCheck = false;
451 propagatedBuildInputs = [
451 propagatedBuildInputs = [
452 self."six"
452 self."six"
453 self."scandir"
453 self."scandir"
454 ];
454 ];
455 src = fetchurl {
455 src = fetchurl {
456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
456 url = "https://files.pythonhosted.org/packages/94/d8/65c86584e7e97ef824a1845c72bbe95d79f5b306364fa778a3c3e401b309/pathlib2-2.3.5.tar.gz";
457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
457 sha256 = "0s4qa8c082fdkb17izh4mfgwrjd1n5pya18wvrbwqdvvb5xs9nbc";
458 };
458 };
459 meta = {
459 meta = {
460 license = [ pkgs.lib.licenses.mit ];
460 license = [ pkgs.lib.licenses.mit ];
461 };
461 };
462 };
462 };
463 "pexpect" = super.buildPythonPackage {
463 "pexpect" = super.buildPythonPackage {
464 name = "pexpect-4.8.0";
464 name = "pexpect-4.8.0";
465 doCheck = false;
465 doCheck = false;
466 propagatedBuildInputs = [
466 propagatedBuildInputs = [
467 self."ptyprocess"
467 self."ptyprocess"
468 ];
468 ];
469 src = fetchurl {
469 src = fetchurl {
470 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
470 url = "https://files.pythonhosted.org/packages/e5/9b/ff402e0e930e70467a7178abb7c128709a30dfb22d8777c043e501bc1b10/pexpect-4.8.0.tar.gz";
471 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
471 sha256 = "032cg337h8awydgypz6f4wx848lw8dyrj4zy988x0lyib4ws8rgw";
472 };
472 };
473 meta = {
473 meta = {
474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
474 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
475 };
475 };
476 };
476 };
477 "pickleshare" = super.buildPythonPackage {
477 "pickleshare" = super.buildPythonPackage {
478 name = "pickleshare-0.7.5";
478 name = "pickleshare-0.7.5";
479 doCheck = false;
479 doCheck = false;
480 propagatedBuildInputs = [
480 propagatedBuildInputs = [
481 self."pathlib2"
481 self."pathlib2"
482 ];
482 ];
483 src = fetchurl {
483 src = fetchurl {
484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
484 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
485 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
486 };
486 };
487 meta = {
487 meta = {
488 license = [ pkgs.lib.licenses.mit ];
488 license = [ pkgs.lib.licenses.mit ];
489 };
489 };
490 };
490 };
491 "plaster" = super.buildPythonPackage {
491 "plaster" = super.buildPythonPackage {
492 name = "plaster-1.0";
492 name = "plaster-1.0";
493 doCheck = false;
493 doCheck = false;
494 propagatedBuildInputs = [
494 propagatedBuildInputs = [
495 self."setuptools"
495 self."setuptools"
496 ];
496 ];
497 src = fetchurl {
497 src = fetchurl {
498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
498 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
499 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
500 };
500 };
501 meta = {
501 meta = {
502 license = [ pkgs.lib.licenses.mit ];
502 license = [ pkgs.lib.licenses.mit ];
503 };
503 };
504 };
504 };
505 "plaster-pastedeploy" = super.buildPythonPackage {
505 "plaster-pastedeploy" = super.buildPythonPackage {
506 name = "plaster-pastedeploy-0.7";
506 name = "plaster-pastedeploy-0.7";
507 doCheck = false;
507 doCheck = false;
508 propagatedBuildInputs = [
508 propagatedBuildInputs = [
509 self."pastedeploy"
509 self."pastedeploy"
510 self."plaster"
510 self."plaster"
511 ];
511 ];
512 src = fetchurl {
512 src = fetchurl {
513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
513 url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz";
514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
514 sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r";
515 };
515 };
516 meta = {
516 meta = {
517 license = [ pkgs.lib.licenses.mit ];
517 license = [ pkgs.lib.licenses.mit ];
518 };
518 };
519 };
519 };
520 "pluggy" = super.buildPythonPackage {
520 "pluggy" = super.buildPythonPackage {
521 name = "pluggy-0.13.1";
521 name = "pluggy-0.13.1";
522 doCheck = false;
522 doCheck = false;
523 propagatedBuildInputs = [
523 propagatedBuildInputs = [
524 self."importlib-metadata"
524 self."importlib-metadata"
525 ];
525 ];
526 src = fetchurl {
526 src = fetchurl {
527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
527 url = "https://files.pythonhosted.org/packages/f8/04/7a8542bed4b16a65c2714bf76cf5a0b026157da7f75e87cc88774aa10b14/pluggy-0.13.1.tar.gz";
528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
528 sha256 = "1c35qyhvy27q9ih9n899f3h4sdnpgq027dbiilly2qb5cvgarchm";
529 };
529 };
530 meta = {
530 meta = {
531 license = [ pkgs.lib.licenses.mit ];
531 license = [ pkgs.lib.licenses.mit ];
532 };
532 };
533 };
533 };
534 "prompt-toolkit" = super.buildPythonPackage {
534 "prompt-toolkit" = super.buildPythonPackage {
535 name = "prompt-toolkit-1.0.18";
535 name = "prompt-toolkit-1.0.18";
536 doCheck = false;
536 doCheck = false;
537 propagatedBuildInputs = [
537 propagatedBuildInputs = [
538 self."six"
538 self."six"
539 self."wcwidth"
539 self."wcwidth"
540 ];
540 ];
541 src = fetchurl {
541 src = fetchurl {
542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
542 url = "https://files.pythonhosted.org/packages/c5/64/c170e5b1913b540bf0c8ab7676b21fdd1d25b65ddeb10025c6ca43cccd4c/prompt_toolkit-1.0.18.tar.gz";
543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
543 sha256 = "09h1153wgr5x2ny7ds0w2m81n3bb9j8hjb8sjfnrg506r01clkyx";
544 };
544 };
545 meta = {
545 meta = {
546 license = [ pkgs.lib.licenses.bsdOriginal ];
546 license = [ pkgs.lib.licenses.bsdOriginal ];
547 };
547 };
548 };
548 };
549 "psutil" = super.buildPythonPackage {
549 "psutil" = super.buildPythonPackage {
550 name = "psutil-5.7.0";
550 name = "psutil-5.7.0";
551 doCheck = false;
551 doCheck = false;
552 src = fetchurl {
552 src = fetchurl {
553 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
553 url = "https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz";
554 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
554 sha256 = "03jykdi3dgf1cdal9bv4fq9zjvzj9l9bs99gi5ar81sdl5nc2pk8";
555 };
555 };
556 meta = {
556 meta = {
557 license = [ pkgs.lib.licenses.bsdOriginal ];
557 license = [ pkgs.lib.licenses.bsdOriginal ];
558 };
558 };
559 };
559 };
560 "ptyprocess" = super.buildPythonPackage {
560 "ptyprocess" = super.buildPythonPackage {
561 name = "ptyprocess-0.6.0";
561 name = "ptyprocess-0.6.0";
562 doCheck = false;
562 doCheck = false;
563 src = fetchurl {
563 src = fetchurl {
564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
564 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
565 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
566 };
566 };
567 meta = {
567 meta = {
568 license = [ ];
568 license = [ ];
569 };
569 };
570 };
570 };
571 "py" = super.buildPythonPackage {
571 "py" = super.buildPythonPackage {
572 name = "py-1.8.0";
572 name = "py-1.8.0";
573 doCheck = false;
573 doCheck = false;
574 src = fetchurl {
574 src = fetchurl {
575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
575 url = "https://files.pythonhosted.org/packages/f1/5a/87ca5909f400a2de1561f1648883af74345fe96349f34f737cdfc94eba8c/py-1.8.0.tar.gz";
576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
576 sha256 = "0lsy1gajva083pzc7csj1cvbmminb7b4l6a0prdzyb3fd829nqyw";
577 };
577 };
578 meta = {
578 meta = {
579 license = [ pkgs.lib.licenses.mit ];
579 license = [ pkgs.lib.licenses.mit ];
580 };
580 };
581 };
581 };
582 "pycparser" = super.buildPythonPackage {
582 "pycparser" = super.buildPythonPackage {
583 name = "pycparser-2.20";
583 name = "pycparser-2.20";
584 doCheck = false;
584 doCheck = false;
585 src = fetchurl {
585 src = fetchurl {
586 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
586 url = "https://files.pythonhosted.org/packages/0f/86/e19659527668d70be91d0369aeaa055b4eb396b0f387a4f92293a20035bd/pycparser-2.20.tar.gz";
587 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
587 sha256 = "1w0m3xvlrzq4lkbvd1ngfm8mdw64r1yxy6n7djlw6qj5d0km6ird";
588 };
588 };
589 meta = {
589 meta = {
590 license = [ pkgs.lib.licenses.bsdOriginal ];
590 license = [ pkgs.lib.licenses.bsdOriginal ];
591 };
591 };
592 };
592 };
593 "pygit2" = super.buildPythonPackage {
593 "pygit2" = super.buildPythonPackage {
594 name = "pygit2-0.28.2";
594 name = "pygit2-0.28.2";
595 doCheck = false;
595 doCheck = false;
596 propagatedBuildInputs = [
596 propagatedBuildInputs = [
597 self."cffi"
597 self."cffi"
598 self."six"
598 self."six"
599 ];
599 ];
600 src = fetchurl {
600 src = fetchurl {
601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
601 url = "https://files.pythonhosted.org/packages/4c/64/88c2a4eb2d22ca1982b364f41ff5da42d61de791d7eb68140e7f8f7eb721/pygit2-0.28.2.tar.gz";
602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
602 sha256 = "11kzj5mjkspvplnpdb6bj8dcj6rgmkk986k8hjcklyg5yaxkz32d";
603 };
603 };
604 meta = {
604 meta = {
605 license = [ { fullName = "GPLv2 with linking exception"; } ];
605 license = [ { fullName = "GPLv2 with linking exception"; } ];
606 };
606 };
607 };
607 };
608 "pygments" = super.buildPythonPackage {
608 "pygments" = super.buildPythonPackage {
609 name = "pygments-2.4.2";
609 name = "pygments-2.4.2";
610 doCheck = false;
610 doCheck = false;
611 src = fetchurl {
611 src = fetchurl {
612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
612 url = "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz";
613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
613 sha256 = "15v2sqm5g12bqa0c7wikfh9ck2nl97ayizy1hpqhmws5gqalq748";
614 };
614 };
615 meta = {
615 meta = {
616 license = [ pkgs.lib.licenses.bsdOriginal ];
616 license = [ pkgs.lib.licenses.bsdOriginal ];
617 };
617 };
618 };
618 };
619 "pyparsing" = super.buildPythonPackage {
619 "pyparsing" = super.buildPythonPackage {
620 name = "pyparsing-2.4.7";
620 name = "pyparsing-2.4.7";
621 doCheck = false;
621 doCheck = false;
622 src = fetchurl {
622 src = fetchurl {
623 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
623 url = "https://files.pythonhosted.org/packages/c1/47/dfc9c342c9842bbe0036c7f763d2d6686bcf5eb1808ba3e170afdb282210/pyparsing-2.4.7.tar.gz";
624 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
624 sha256 = "1hgc8qrbq1ymxbwfbjghv01fm3fbpjwpjwi0bcailxxzhf3yq0y2";
625 };
625 };
626 meta = {
626 meta = {
627 license = [ pkgs.lib.licenses.mit ];
627 license = [ pkgs.lib.licenses.mit ];
628 };
628 };
629 };
629 };
630 "pyramid" = super.buildPythonPackage {
630 "pyramid" = super.buildPythonPackage {
631 name = "pyramid-1.10.4";
631 name = "pyramid-1.10.4";
632 doCheck = false;
632 doCheck = false;
633 propagatedBuildInputs = [
633 propagatedBuildInputs = [
634 self."hupper"
634 self."hupper"
635 self."plaster"
635 self."plaster"
636 self."plaster-pastedeploy"
636 self."plaster-pastedeploy"
637 self."setuptools"
637 self."setuptools"
638 self."translationstring"
638 self."translationstring"
639 self."venusian"
639 self."venusian"
640 self."webob"
640 self."webob"
641 self."zope.deprecation"
641 self."zope.deprecation"
642 self."zope.interface"
642 self."zope.interface"
643 self."repoze.lru"
643 self."repoze.lru"
644 ];
644 ];
645 src = fetchurl {
645 src = fetchurl {
646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
646 url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz";
647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
647 sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q";
648 };
648 };
649 meta = {
649 meta = {
650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
650 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
651 };
651 };
652 };
652 };
653 "pyramid-mako" = super.buildPythonPackage {
653 "pyramid-mako" = super.buildPythonPackage {
654 name = "pyramid-mako-1.1.0";
654 name = "pyramid-mako-1.1.0";
655 doCheck = false;
655 doCheck = false;
656 propagatedBuildInputs = [
656 propagatedBuildInputs = [
657 self."pyramid"
657 self."pyramid"
658 self."mako"
658 self."mako"
659 ];
659 ];
660 src = fetchurl {
660 src = fetchurl {
661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
661 url = "https://files.pythonhosted.org/packages/63/7b/5e2af68f675071a6bad148c1c393928f0ef5fcd94e95cbf53b89d6471a83/pyramid_mako-1.1.0.tar.gz";
662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
662 sha256 = "1qj0m091mnii86j2q1d82yir22nha361rvhclvg3s70z8iiwhrh0";
663 };
663 };
664 meta = {
664 meta = {
665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
665 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
666 };
666 };
667 };
667 };
668 "pytest" = super.buildPythonPackage {
668 "pytest" = super.buildPythonPackage {
669 name = "pytest-4.6.5";
669 name = "pytest-4.6.5";
670 doCheck = false;
670 doCheck = false;
671 propagatedBuildInputs = [
671 propagatedBuildInputs = [
672 self."py"
672 self."py"
673 self."six"
673 self."six"
674 self."packaging"
674 self."packaging"
675 self."attrs"
675 self."attrs"
676 self."atomicwrites"
676 self."atomicwrites"
677 self."pluggy"
677 self."pluggy"
678 self."importlib-metadata"
678 self."importlib-metadata"
679 self."wcwidth"
679 self."wcwidth"
680 self."funcsigs"
680 self."funcsigs"
681 self."pathlib2"
681 self."pathlib2"
682 self."more-itertools"
682 self."more-itertools"
683 ];
683 ];
684 src = fetchurl {
684 src = fetchurl {
685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
685 url = "https://files.pythonhosted.org/packages/2a/c6/1d1f32f6a5009900521b12e6560fb6b7245b0d4bc3fb771acd63d10e30e1/pytest-4.6.5.tar.gz";
686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
686 sha256 = "0iykwwfp4h181nd7rsihh2120b0rkawlw7rvbl19sgfspncr3hwg";
687 };
687 };
688 meta = {
688 meta = {
689 license = [ pkgs.lib.licenses.mit ];
689 license = [ pkgs.lib.licenses.mit ];
690 };
690 };
691 };
691 };
692 "pytest-cov" = super.buildPythonPackage {
692 "pytest-cov" = super.buildPythonPackage {
693 name = "pytest-cov-2.7.1";
693 name = "pytest-cov-2.7.1";
694 doCheck = false;
694 doCheck = false;
695 propagatedBuildInputs = [
695 propagatedBuildInputs = [
696 self."pytest"
696 self."pytest"
697 self."coverage"
697 self."coverage"
698 ];
698 ];
699 src = fetchurl {
699 src = fetchurl {
700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
700 url = "https://files.pythonhosted.org/packages/bb/0f/3db7ff86801883b21d5353b258c994b1b8e2abbc804e2273b8d0fd19004b/pytest-cov-2.7.1.tar.gz";
701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
701 sha256 = "0filvmmyqm715azsl09ql8hy2x7h286n6d8z5x42a1wpvvys83p0";
702 };
702 };
703 meta = {
703 meta = {
704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
704 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
705 };
705 };
706 };
706 };
707 "pytest-profiling" = super.buildPythonPackage {
707 "pytest-profiling" = super.buildPythonPackage {
708 name = "pytest-profiling-1.7.0";
708 name = "pytest-profiling-1.7.0";
709 doCheck = false;
709 doCheck = false;
710 propagatedBuildInputs = [
710 propagatedBuildInputs = [
711 self."six"
711 self."six"
712 self."pytest"
712 self."pytest"
713 self."gprof2dot"
713 self."gprof2dot"
714 ];
714 ];
715 src = fetchurl {
715 src = fetchurl {
716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
716 url = "https://files.pythonhosted.org/packages/39/70/22a4b33739f07f1732a63e33bbfbf68e0fa58cfba9d200e76d01921eddbf/pytest-profiling-1.7.0.tar.gz";
717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
717 sha256 = "0abz9gi26jpcfdzgsvwad91555lpgdc8kbymicmms8k2fqa8z4wk";
718 };
718 };
719 meta = {
719 meta = {
720 license = [ pkgs.lib.licenses.mit ];
720 license = [ pkgs.lib.licenses.mit ];
721 };
721 };
722 };
722 };
723 "pytest-runner" = super.buildPythonPackage {
723 "pytest-runner" = super.buildPythonPackage {
724 name = "pytest-runner-5.1";
724 name = "pytest-runner-5.1";
725 doCheck = false;
725 doCheck = false;
726 src = fetchurl {
726 src = fetchurl {
727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
727 url = "https://files.pythonhosted.org/packages/d9/6d/4b41a74b31720e25abd4799be72d54811da4b4d0233e38b75864dcc1f7ad/pytest-runner-5.1.tar.gz";
728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
728 sha256 = "0ykfcnpp8c22winj63qzc07l5axwlc9ikl8vn05sc32gv3417815";
729 };
729 };
730 meta = {
730 meta = {
731 license = [ pkgs.lib.licenses.mit ];
731 license = [ pkgs.lib.licenses.mit ];
732 };
732 };
733 };
733 };
734 "pytest-sugar" = super.buildPythonPackage {
734 "pytest-sugar" = super.buildPythonPackage {
735 name = "pytest-sugar-0.9.2";
735 name = "pytest-sugar-0.9.2";
736 doCheck = false;
736 doCheck = false;
737 propagatedBuildInputs = [
737 propagatedBuildInputs = [
738 self."pytest"
738 self."pytest"
739 self."termcolor"
739 self."termcolor"
740 self."packaging"
740 self."packaging"
741 ];
741 ];
742 src = fetchurl {
742 src = fetchurl {
743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
743 url = "https://files.pythonhosted.org/packages/55/59/f02f78d1c80f7e03e23177f60624c8106d4f23d124c921df103f65692464/pytest-sugar-0.9.2.tar.gz";
744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
744 sha256 = "1asq7yc4g8bx2sn7yy974mhc9ywvaihasjab4inkirdwn9s7mn7w";
745 };
745 };
746 meta = {
746 meta = {
747 license = [ pkgs.lib.licenses.bsdOriginal ];
747 license = [ pkgs.lib.licenses.bsdOriginal ];
748 };
748 };
749 };
749 };
750 "pytest-timeout" = super.buildPythonPackage {
750 "pytest-timeout" = super.buildPythonPackage {
751 name = "pytest-timeout-1.3.3";
751 name = "pytest-timeout-1.3.3";
752 doCheck = false;
752 doCheck = false;
753 propagatedBuildInputs = [
753 propagatedBuildInputs = [
754 self."pytest"
754 self."pytest"
755 ];
755 ];
756 src = fetchurl {
756 src = fetchurl {
757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
757 url = "https://files.pythonhosted.org/packages/13/48/7a166eaa29c1dca6cc253e3ba5773ff2e4aa4f567c1ea3905808e95ac5c1/pytest-timeout-1.3.3.tar.gz";
758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
758 sha256 = "1cczcjhw4xx5sjkhxlhc5c1bkr7x6fcyx12wrnvwfckshdvblc2a";
759 };
759 };
760 meta = {
760 meta = {
761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
761 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
762 };
762 };
763 };
763 };
764 "redis" = super.buildPythonPackage {
764 "redis" = super.buildPythonPackage {
765 name = "redis-3.5.3";
765 name = "redis-3.5.3";
766 doCheck = false;
766 doCheck = false;
767 src = fetchurl {
767 src = fetchurl {
768 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
768 url = "https://files.pythonhosted.org/packages/b3/17/1e567ff78c83854e16b98694411fe6e08c3426af866ad11397cddceb80d3/redis-3.5.3.tar.gz";
769 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
769 sha256 = "0e7e0cfca8660dea8b7d5cd8c4f6c5e29e11f31158c0b0ae91a397f00e5a05a2";
770 };
770 };
771 meta = {
771 meta = {
772 license = [ pkgs.lib.licenses.mit ];
772 license = [ pkgs.lib.licenses.mit ];
773 };
773 };
774 };
774 };
775 "repoze.lru" = super.buildPythonPackage {
775 "repoze.lru" = super.buildPythonPackage {
776 name = "repoze.lru-0.7";
776 name = "repoze.lru-0.7";
777 doCheck = false;
777 doCheck = false;
778 src = fetchurl {
778 src = fetchurl {
779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
779 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
780 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
781 };
781 };
782 meta = {
782 meta = {
783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
783 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
784 };
784 };
785 };
785 };
786 "rhodecode-vcsserver" = super.buildPythonPackage {
786 "rhodecode-vcsserver" = super.buildPythonPackage {
787 name = "rhodecode-vcsserver-4.26.0";
787 name = "rhodecode-vcsserver-4.27.0";
788 buildInputs = [
788 buildInputs = [
789 self."pytest"
789 self."pytest"
790 self."py"
790 self."py"
791 self."pytest-cov"
791 self."pytest-cov"
792 self."pytest-sugar"
792 self."pytest-sugar"
793 self."pytest-runner"
793 self."pytest-runner"
794 self."pytest-profiling"
794 self."pytest-profiling"
795 self."pytest-timeout"
795 self."pytest-timeout"
796 self."gprof2dot"
796 self."gprof2dot"
797 self."mock"
797 self."mock"
798 self."cov-core"
798 self."cov-core"
799 self."coverage"
799 self."coverage"
800 self."webtest"
800 self."webtest"
801 self."beautifulsoup4"
801 self."beautifulsoup4"
802 self."configobj"
802 self."configobj"
803 ];
803 ];
804 doCheck = true;
804 doCheck = true;
805 propagatedBuildInputs = [
805 propagatedBuildInputs = [
806 self."configobj"
806 self."configobj"
807 self."dogpile.cache"
807 self."dogpile.cache"
808 self."dogpile.core"
808 self."dogpile.core"
809 self."decorator"
809 self."decorator"
810 self."dulwich"
810 self."dulwich"
811 self."hgsubversion"
811 self."hgsubversion"
812 self."hg-evolve"
812 self."hg-evolve"
813 self."mako"
813 self."mako"
814 self."markupsafe"
814 self."markupsafe"
815 self."mercurial"
815 self."mercurial"
816 self."msgpack-python"
816 self."msgpack-python"
817 self."pastedeploy"
817 self."pastedeploy"
818 self."pyramid"
818 self."pyramid"
819 self."pyramid-mako"
819 self."pyramid-mako"
820 self."pygit2"
820 self."pygit2"
821 self."repoze.lru"
821 self."repoze.lru"
822 self."redis"
822 self."redis"
823 self."simplejson"
823 self."simplejson"
824 self."subprocess32"
824 self."subprocess32"
825 self."subvertpy"
825 self."subvertpy"
826 self."six"
826 self."six"
827 self."translationstring"
827 self."translationstring"
828 self."webob"
828 self."webob"
829 self."zope.deprecation"
829 self."zope.deprecation"
830 self."zope.interface"
830 self."zope.interface"
831 self."gevent"
831 self."gevent"
832 self."greenlet"
832 self."greenlet"
833 self."gunicorn"
833 self."gunicorn"
834 self."waitress"
834 self."waitress"
835 self."ipdb"
835 self."ipdb"
836 self."ipython"
836 self."ipython"
837 self."pytest"
837 self."pytest"
838 self."py"
838 self."py"
839 self."pytest-cov"
839 self."pytest-cov"
840 self."pytest-sugar"
840 self."pytest-sugar"
841 self."pytest-runner"
841 self."pytest-runner"
842 self."pytest-profiling"
842 self."pytest-profiling"
843 self."pytest-timeout"
843 self."pytest-timeout"
844 self."gprof2dot"
844 self."gprof2dot"
845 self."mock"
845 self."mock"
846 self."cov-core"
846 self."cov-core"
847 self."coverage"
847 self."coverage"
848 self."webtest"
848 self."webtest"
849 self."beautifulsoup4"
849 self."beautifulsoup4"
850 ];
850 ];
851 src = ./.;
851 src = ./.;
852 meta = {
852 meta = {
853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
853 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
854 };
854 };
855 };
855 };
856 "scandir" = super.buildPythonPackage {
856 "scandir" = super.buildPythonPackage {
857 name = "scandir-1.10.0";
857 name = "scandir-1.10.0";
858 doCheck = false;
858 doCheck = false;
859 src = fetchurl {
859 src = fetchurl {
860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
860 url = "https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz";
861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
861 sha256 = "1bkqwmf056pkchf05ywbnf659wqlp6lljcdb0y88wr9f0vv32ijd";
862 };
862 };
863 meta = {
863 meta = {
864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
864 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
865 };
865 };
866 };
866 };
867 "setproctitle" = super.buildPythonPackage {
867 "setproctitle" = super.buildPythonPackage {
868 name = "setproctitle-1.1.10";
868 name = "setproctitle-1.1.10";
869 doCheck = false;
869 doCheck = false;
870 src = fetchurl {
870 src = fetchurl {
871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
871 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
872 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
873 };
873 };
874 meta = {
874 meta = {
875 license = [ pkgs.lib.licenses.bsdOriginal ];
875 license = [ pkgs.lib.licenses.bsdOriginal ];
876 };
876 };
877 };
877 };
878 "setuptools" = super.buildPythonPackage {
878 "setuptools" = super.buildPythonPackage {
879 name = "setuptools-44.1.0";
879 name = "setuptools-44.1.0";
880 doCheck = false;
880 doCheck = false;
881 src = fetchurl {
881 src = fetchurl {
882 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
882 url = "https://files.pythonhosted.org/packages/ed/7b/bbf89ca71e722b7f9464ebffe4b5ee20a9e5c9a555a56e2d3914bb9119a6/setuptools-44.1.0.zip";
883 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
883 sha256 = "1jja896zvd1ppccnjbhkgagxbwchgq6vfamp6qn1hvywq6q9cjkr";
884 };
884 };
885 meta = {
885 meta = {
886 license = [ pkgs.lib.licenses.mit ];
886 license = [ pkgs.lib.licenses.mit ];
887 };
887 };
888 };
888 };
889
889
890 "setuptools-scm" = super.buildPythonPackage {
890 "setuptools-scm" = super.buildPythonPackage {
891 name = "setuptools-scm-3.5.0";
891 name = "setuptools-scm-3.5.0";
892 doCheck = false;
892 doCheck = false;
893 src = fetchurl {
893 src = fetchurl {
894 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
894 url = "https://files.pythonhosted.org/packages/b2/f7/60a645aae001a2e06cf4b8db2fba9d9f36b8fd378f10647e3e218b61b74b/setuptools_scm-3.5.0.tar.gz";
895 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
895 sha256 = "5bdf21a05792903cafe7ae0c9501182ab52497614fa6b1750d9dbae7b60c1a87";
896 };
896 };
897 meta = {
897 meta = {
898 license = [ pkgs.lib.licenses.psfl ];
898 license = [ pkgs.lib.licenses.psfl ];
899 };
899 };
900 };
900 };
901
901
902 "simplegeneric" = super.buildPythonPackage {
902 "simplegeneric" = super.buildPythonPackage {
903 name = "simplegeneric-0.8.1";
903 name = "simplegeneric-0.8.1";
904 doCheck = false;
904 doCheck = false;
905 src = fetchurl {
905 src = fetchurl {
906 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
906 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
907 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
907 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
908 };
908 };
909 meta = {
909 meta = {
910 license = [ pkgs.lib.licenses.zpl21 ];
910 license = [ pkgs.lib.licenses.zpl21 ];
911 };
911 };
912 };
912 };
913 "simplejson" = super.buildPythonPackage {
913 "simplejson" = super.buildPythonPackage {
914 name = "simplejson-3.16.0";
914 name = "simplejson-3.16.0";
915 doCheck = false;
915 doCheck = false;
916 src = fetchurl {
916 src = fetchurl {
917 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
917 url = "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz";
918 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
918 sha256 = "19cws1syk8jzq2pw43878dv6fjkb0ifvjpx0i9aajix6kc9jkwxi";
919 };
919 };
920 meta = {
920 meta = {
921 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
921 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
922 };
922 };
923 };
923 };
924 "six" = super.buildPythonPackage {
924 "six" = super.buildPythonPackage {
925 name = "six-1.11.0";
925 name = "six-1.11.0";
926 doCheck = false;
926 doCheck = false;
927 src = fetchurl {
927 src = fetchurl {
928 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
928 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
929 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
929 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
930 };
930 };
931 meta = {
931 meta = {
932 license = [ pkgs.lib.licenses.mit ];
932 license = [ pkgs.lib.licenses.mit ];
933 };
933 };
934 };
934 };
935 "subprocess32" = super.buildPythonPackage {
935 "subprocess32" = super.buildPythonPackage {
936 name = "subprocess32-3.5.4";
936 name = "subprocess32-3.5.4";
937 doCheck = false;
937 doCheck = false;
938 src = fetchurl {
938 src = fetchurl {
939 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
939 url = "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz";
940 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
940 sha256 = "17f7mvwx2271s1wrl0qac3wjqqnrqag866zs3qc8v5wp0k43fagb";
941 };
941 };
942 meta = {
942 meta = {
943 license = [ pkgs.lib.licenses.psfl ];
943 license = [ pkgs.lib.licenses.psfl ];
944 };
944 };
945 };
945 };
946 "subvertpy" = super.buildPythonPackage {
946 "subvertpy" = super.buildPythonPackage {
947 name = "subvertpy-0.10.1";
947 name = "subvertpy-0.10.1";
948 doCheck = false;
948 doCheck = false;
949 src = fetchurl {
949 src = fetchurl {
950 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
950 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
951 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
951 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
952 };
952 };
953 meta = {
953 meta = {
954 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
954 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
955 };
955 };
956 };
956 };
957 "termcolor" = super.buildPythonPackage {
957 "termcolor" = super.buildPythonPackage {
958 name = "termcolor-1.1.0";
958 name = "termcolor-1.1.0";
959 doCheck = false;
959 doCheck = false;
960 src = fetchurl {
960 src = fetchurl {
961 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
961 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
962 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
962 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
963 };
963 };
964 meta = {
964 meta = {
965 license = [ pkgs.lib.licenses.mit ];
965 license = [ pkgs.lib.licenses.mit ];
966 };
966 };
967 };
967 };
968 "traitlets" = super.buildPythonPackage {
968 "traitlets" = super.buildPythonPackage {
969 name = "traitlets-4.3.3";
969 name = "traitlets-4.3.3";
970 doCheck = false;
970 doCheck = false;
971 propagatedBuildInputs = [
971 propagatedBuildInputs = [
972 self."ipython-genutils"
972 self."ipython-genutils"
973 self."six"
973 self."six"
974 self."decorator"
974 self."decorator"
975 self."enum34"
975 self."enum34"
976 ];
976 ];
977 src = fetchurl {
977 src = fetchurl {
978 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
978 url = "https://files.pythonhosted.org/packages/75/b0/43deb021bc943f18f07cbe3dac1d681626a48997b7ffa1e7fb14ef922b21/traitlets-4.3.3.tar.gz";
979 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
979 sha256 = "1xsrwgivpkxlbr4dfndfsi098s29yqgswgjc1qqn69yxklvfw8yh";
980 };
980 };
981 meta = {
981 meta = {
982 license = [ pkgs.lib.licenses.bsdOriginal ];
982 license = [ pkgs.lib.licenses.bsdOriginal ];
983 };
983 };
984 };
984 };
985 "translationstring" = super.buildPythonPackage {
985 "translationstring" = super.buildPythonPackage {
986 name = "translationstring-1.3";
986 name = "translationstring-1.3";
987 doCheck = false;
987 doCheck = false;
988 src = fetchurl {
988 src = fetchurl {
989 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
989 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
990 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
990 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
991 };
991 };
992 meta = {
992 meta = {
993 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
993 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
994 };
994 };
995 };
995 };
996 "venusian" = super.buildPythonPackage {
996 "venusian" = super.buildPythonPackage {
997 name = "venusian-1.2.0";
997 name = "venusian-1.2.0";
998 doCheck = false;
998 doCheck = false;
999 src = fetchurl {
999 src = fetchurl {
1000 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
1000 url = "https://files.pythonhosted.org/packages/7e/6f/40a9d43ac77cb51cb62be5b5662d170f43f8037bdc4eab56336c4ca92bb7/venusian-1.2.0.tar.gz";
1001 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
1001 sha256 = "0ghyx66g8ikx9nx1mnwqvdcqm11i1vlq0hnvwl50s48bp22q5v34";
1002 };
1002 };
1003 meta = {
1003 meta = {
1004 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1004 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
1005 };
1005 };
1006 };
1006 };
1007 "waitress" = super.buildPythonPackage {
1007 "waitress" = super.buildPythonPackage {
1008 name = "waitress-1.3.1";
1008 name = "waitress-1.3.1";
1009 doCheck = false;
1009 doCheck = false;
1010 src = fetchurl {
1010 src = fetchurl {
1011 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
1011 url = "https://files.pythonhosted.org/packages/a6/e6/708da7bba65898e5d759ade8391b1077e49d07be0b0223c39f5be04def56/waitress-1.3.1.tar.gz";
1012 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
1012 sha256 = "1iysl8ka3l4cdrr0r19fh1cv28q41mwpvgsb81ji7k4shkb0k3i7";
1013 };
1013 };
1014 meta = {
1014 meta = {
1015 license = [ pkgs.lib.licenses.zpl21 ];
1015 license = [ pkgs.lib.licenses.zpl21 ];
1016 };
1016 };
1017 };
1017 };
1018 "wcwidth" = super.buildPythonPackage {
1018 "wcwidth" = super.buildPythonPackage {
1019 name = "wcwidth-0.1.9";
1019 name = "wcwidth-0.1.9";
1020 doCheck = false;
1020 doCheck = false;
1021 src = fetchurl {
1021 src = fetchurl {
1022 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
1022 url = "https://files.pythonhosted.org/packages/25/9d/0acbed6e4a4be4fc99148f275488580968f44ddb5e69b8ceb53fc9df55a0/wcwidth-0.1.9.tar.gz";
1023 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
1023 sha256 = "1wf5ycjx8s066rdvr0fgz4xds9a8zhs91c4jzxvvymm1c8l8cwzf";
1024 };
1024 };
1025 meta = {
1025 meta = {
1026 license = [ pkgs.lib.licenses.mit ];
1026 license = [ pkgs.lib.licenses.mit ];
1027 };
1027 };
1028 };
1028 };
1029 "webob" = super.buildPythonPackage {
1029 "webob" = super.buildPythonPackage {
1030 name = "webob-1.8.5";
1030 name = "webob-1.8.5";
1031 doCheck = false;
1031 doCheck = false;
1032 src = fetchurl {
1032 src = fetchurl {
1033 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
1033 url = "https://files.pythonhosted.org/packages/9d/1a/0c89c070ee2829c934cb6c7082287c822e28236a4fcf90063e6be7c35532/WebOb-1.8.5.tar.gz";
1034 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
1034 sha256 = "11khpzaxc88q31v25ic330gsf56fwmbdc9b30br8mvp0fmwspah5";
1035 };
1035 };
1036 meta = {
1036 meta = {
1037 license = [ pkgs.lib.licenses.mit ];
1037 license = [ pkgs.lib.licenses.mit ];
1038 };
1038 };
1039 };
1039 };
1040 "webtest" = super.buildPythonPackage {
1040 "webtest" = super.buildPythonPackage {
1041 name = "webtest-2.0.34";
1041 name = "webtest-2.0.34";
1042 doCheck = false;
1042 doCheck = false;
1043 propagatedBuildInputs = [
1043 propagatedBuildInputs = [
1044 self."six"
1044 self."six"
1045 self."webob"
1045 self."webob"
1046 self."waitress"
1046 self."waitress"
1047 self."beautifulsoup4"
1047 self."beautifulsoup4"
1048 ];
1048 ];
1049 src = fetchurl {
1049 src = fetchurl {
1050 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
1050 url = "https://files.pythonhosted.org/packages/2c/74/a0e63feee438735d628631e2b70d82280276a930637ac535479e5fad9427/WebTest-2.0.34.tar.gz";
1051 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
1051 sha256 = "0x1y2c8z4fmpsny4hbp6ka37si2g10r5r2jwxhvv5mx7g3blq4bi";
1052 };
1052 };
1053 meta = {
1053 meta = {
1054 license = [ pkgs.lib.licenses.mit ];
1054 license = [ pkgs.lib.licenses.mit ];
1055 };
1055 };
1056 };
1056 };
1057 "zipp" = super.buildPythonPackage {
1057 "zipp" = super.buildPythonPackage {
1058 name = "zipp-1.2.0";
1058 name = "zipp-1.2.0";
1059 doCheck = false;
1059 doCheck = false;
1060 propagatedBuildInputs = [
1060 propagatedBuildInputs = [
1061 self."contextlib2"
1061 self."contextlib2"
1062 ];
1062 ];
1063 src = fetchurl {
1063 src = fetchurl {
1064 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
1064 url = "https://files.pythonhosted.org/packages/78/08/d52f0ea643bc1068d6dc98b412f4966a9b63255d20911a23ac3220c033c4/zipp-1.2.0.tar.gz";
1065 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
1065 sha256 = "1c91lnv1bxjimh8as27hz7bghsjkkbxn1d37xq7in9c82iai0167";
1066 };
1066 };
1067 meta = {
1067 meta = {
1068 license = [ pkgs.lib.licenses.mit ];
1068 license = [ pkgs.lib.licenses.mit ];
1069 };
1069 };
1070 };
1070 };
1071 "zope.deprecation" = super.buildPythonPackage {
1071 "zope.deprecation" = super.buildPythonPackage {
1072 name = "zope.deprecation-4.4.0";
1072 name = "zope.deprecation-4.4.0";
1073 doCheck = false;
1073 doCheck = false;
1074 propagatedBuildInputs = [
1074 propagatedBuildInputs = [
1075 self."setuptools"
1075 self."setuptools"
1076 ];
1076 ];
1077 src = fetchurl {
1077 src = fetchurl {
1078 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
1078 url = "https://files.pythonhosted.org/packages/34/da/46e92d32d545dd067b9436279d84c339e8b16de2ca393d7b892bc1e1e9fd/zope.deprecation-4.4.0.tar.gz";
1079 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
1079 sha256 = "1pz2cv7gv9y1r3m0bdv7ks1alagmrn5msm5spwdzkb2by0w36i8d";
1080 };
1080 };
1081 meta = {
1081 meta = {
1082 license = [ pkgs.lib.licenses.zpl21 ];
1082 license = [ pkgs.lib.licenses.zpl21 ];
1083 };
1083 };
1084 };
1084 };
1085 "zope.interface" = super.buildPythonPackage {
1085 "zope.interface" = super.buildPythonPackage {
1086 name = "zope.interface-4.6.0";
1086 name = "zope.interface-4.6.0";
1087 doCheck = false;
1087 doCheck = false;
1088 propagatedBuildInputs = [
1088 propagatedBuildInputs = [
1089 self."setuptools"
1089 self."setuptools"
1090 ];
1090 ];
1091 src = fetchurl {
1091 src = fetchurl {
1092 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
1092 url = "https://files.pythonhosted.org/packages/4e/d0/c9d16bd5b38de44a20c6dc5d5ed80a49626fafcb3db9f9efdc2a19026db6/zope.interface-4.6.0.tar.gz";
1093 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
1093 sha256 = "1rgh2x3rcl9r0v0499kf78xy86rnmanajf4ywmqb943wpk50sg8v";
1094 };
1094 };
1095 meta = {
1095 meta = {
1096 license = [ pkgs.lib.licenses.zpl21 ];
1096 license = [ pkgs.lib.licenses.zpl21 ];
1097 };
1097 };
1098 };
1098 };
1099
1099
1100 ### Test requirements
1100 ### Test requirements
1101
1101
1102
1102
1103 }
1103 }
@@ -1,1 +1,1 b''
1 4.26.0 No newline at end of file
1 4.27.0 No newline at end of file
@@ -1,130 +1,130 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import os
17 import os
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urlparse
21 import urlparse
22
22
23 from vcsserver import exceptions
23 from vcsserver import exceptions
24 from vcsserver.exceptions import NoContentException
24 from vcsserver.exceptions import NoContentException
25 from vcsserver.hgcompat import (archival)
25 from vcsserver.hgcompat import (archival)
26 from vcsserver.lib.rc_cache import region_meta
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29
29
30 class RepoFactory(object):
30 class RepoFactory(object):
31 """
31 """
32 Utility to create instances of repository
32 Utility to create instances of repository
33
33
34 It provides internal caching of the `repo` object based on
34 It provides internal caching of the `repo` object based on
35 the :term:`call context`.
35 the :term:`call context`.
36 """
36 """
37 repo_type = None
37 repo_type = None
38
38
39 def __init__(self):
39 def __init__(self):
40 self._cache_region = region_meta.dogpile_cache_regions['repo_object']
40 pass
41
41
42 def _create_config(self, path, config):
42 def _create_config(self, path, config):
43 config = {}
43 config = {}
44 return config
44 return config
45
45
46 def _create_repo(self, wire, create):
46 def _create_repo(self, wire, create):
47 raise NotImplementedError()
47 raise NotImplementedError()
48
48
49 def repo(self, wire, create=False):
49 def repo(self, wire, create=False):
50 raise NotImplementedError()
50 raise NotImplementedError()
51
51
52
52
53 def obfuscate_qs(query_string):
53 def obfuscate_qs(query_string):
54 if query_string is None:
54 if query_string is None:
55 return None
55 return None
56
56
57 parsed = []
57 parsed = []
58 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
58 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
59 if k in ['auth_token', 'api_key']:
59 if k in ['auth_token', 'api_key']:
60 v = "*****"
60 v = "*****"
61 parsed.append((k, v))
61 parsed.append((k, v))
62
62
63 return '&'.join('{}{}'.format(
63 return '&'.join('{}{}'.format(
64 k, '={}'.format(v) if v else '') for k, v in parsed)
64 k, '={}'.format(v) if v else '') for k, v in parsed)
65
65
66
66
67 def raise_from_original(new_type):
67 def raise_from_original(new_type):
68 """
68 """
69 Raise a new exception type with original args and traceback.
69 Raise a new exception type with original args and traceback.
70 """
70 """
71 exc_type, exc_value, exc_traceback = sys.exc_info()
71 exc_type, exc_value, exc_traceback = sys.exc_info()
72 new_exc = new_type(*exc_value.args)
72 new_exc = new_type(*exc_value.args)
73 # store the original traceback into the new exc
73 # store the original traceback into the new exc
74 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
74 new_exc._org_exc_tb = traceback.format_exc(exc_traceback)
75
75
76 try:
76 try:
77 raise new_exc, None, exc_traceback
77 raise new_exc, None, exc_traceback
78 finally:
78 finally:
79 del exc_traceback
79 del exc_traceback
80
80
81
81
82 class ArchiveNode(object):
82 class ArchiveNode(object):
83 def __init__(self, path, mode, is_link, raw_bytes):
83 def __init__(self, path, mode, is_link, raw_bytes):
84 self.path = path
84 self.path = path
85 self.mode = mode
85 self.mode = mode
86 self.is_link = is_link
86 self.is_link = is_link
87 self.raw_bytes = raw_bytes
87 self.raw_bytes = raw_bytes
88
88
89
89
90 def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
90 def archive_repo(walker, archive_dest_path, kind, mtime, archive_at_path,
91 archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
91 archive_dir_name, commit_id, write_metadata=True, extra_metadata=None):
92 """
92 """
93 walker should be a file walker, for example:
93 walker should be a file walker, for example:
94 def walker():
94 def walker():
95 for file_info in files:
95 for file_info in files:
96 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
96 yield ArchiveNode(fn, mode, is_link, ctx[fn].data)
97 """
97 """
98 extra_metadata = extra_metadata or {}
98 extra_metadata = extra_metadata or {}
99
99
100 if kind == "tgz":
100 if kind == "tgz":
101 archiver = archival.tarit(archive_dest_path, mtime, "gz")
101 archiver = archival.tarit(archive_dest_path, mtime, "gz")
102 elif kind == "tbz2":
102 elif kind == "tbz2":
103 archiver = archival.tarit(archive_dest_path, mtime, "bz2")
103 archiver = archival.tarit(archive_dest_path, mtime, "bz2")
104 elif kind == 'zip':
104 elif kind == 'zip':
105 archiver = archival.zipit(archive_dest_path, mtime)
105 archiver = archival.zipit(archive_dest_path, mtime)
106 else:
106 else:
107 raise exceptions.ArchiveException()(
107 raise exceptions.ArchiveException()(
108 'Remote does not support: "%s" archive type.' % kind)
108 'Remote does not support: "%s" archive type.' % kind)
109
109
110 for f in walker(commit_id, archive_at_path):
110 for f in walker(commit_id, archive_at_path):
111 f_path = os.path.join(archive_dir_name, f.path.lstrip('/'))
111 f_path = os.path.join(archive_dir_name, f.path.lstrip('/'))
112 try:
112 try:
113 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
113 archiver.addfile(f_path, f.mode, f.is_link, f.raw_bytes())
114 except NoContentException:
114 except NoContentException:
115 # NOTE(marcink): this is a special case for SVN so we can create "empty"
115 # NOTE(marcink): this is a special case for SVN so we can create "empty"
116 # directories which arent supported by archiver
116 # directories which arent supported by archiver
117 archiver.addfile(os.path.join(f_path, '.dir'), f.mode, f.is_link, '')
117 archiver.addfile(os.path.join(f_path, '.dir'), f.mode, f.is_link, '')
118
118
119 if write_metadata:
119 if write_metadata:
120 metadata = dict([
120 metadata = dict([
121 ('commit_id', commit_id),
121 ('commit_id', commit_id),
122 ('mtime', mtime),
122 ('mtime', mtime),
123 ])
123 ])
124 metadata.update(extra_metadata)
124 metadata.update(extra_metadata)
125
125
126 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata.items()]
126 meta = ["%s:%s" % (f_name, value) for f_name, value in metadata.items()]
127 f_path = os.path.join(archive_dir_name, '.archival.txt')
127 f_path = os.path.join(archive_dir_name, '.archival.txt')
128 archiver.addfile(f_path, 0o644, False, '\n'.join(meta))
128 archiver.addfile(f_path, 0o644, False, '\n'.join(meta))
129
129
130 return archiver.done()
130 return archiver.done()
@@ -1,1226 +1,1281 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import collections
18 import collections
19 import logging
19 import logging
20 import os
20 import os
21 import posixpath as vcspath
21 import posixpath as vcspath
22 import re
22 import re
23 import stat
23 import stat
24 import traceback
24 import traceback
25 import urllib
25 import urllib
26 import urllib2
26 import urllib2
27 from functools import wraps
27 from functools import wraps
28
28
29 import more_itertools
29 import more_itertools
30 import pygit2
30 import pygit2
31 from pygit2 import Repository as LibGit2Repo
31 from pygit2 import Repository as LibGit2Repo
32 from pygit2 import index as LibGit2Index
32 from pygit2 import index as LibGit2Index
33 from dulwich import index, objects
33 from dulwich import index, objects
34 from dulwich.client import HttpGitClient, LocalGitClient
34 from dulwich.client import HttpGitClient, LocalGitClient
35 from dulwich.errors import (
35 from dulwich.errors import (
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
36 NotGitRepository, ChecksumMismatch, WrongObjectException,
37 MissingCommitError, ObjectMissing, HangupException,
37 MissingCommitError, ObjectMissing, HangupException,
38 UnexpectedCommandError)
38 UnexpectedCommandError)
39 from dulwich.repo import Repo as DulwichRepo
39 from dulwich.repo import Repo as DulwichRepo
40 from dulwich.server import update_server_info
40 from dulwich.server import update_server_info
41
41
42 from vcsserver import exceptions, settings, subprocessio
42 from vcsserver import exceptions, settings, subprocessio
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
43 from vcsserver.utils import safe_str, safe_int, safe_unicode
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
44 from vcsserver.base import RepoFactory, obfuscate_qs, ArchiveNode, archive_repo
45 from vcsserver.hgcompat import (
45 from vcsserver.hgcompat import (
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
46 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
47 from vcsserver.git_lfs.lib import LFSOidStore
47 from vcsserver.git_lfs.lib import LFSOidStore
48 from vcsserver.vcs_base import RemoteBase
48 from vcsserver.vcs_base import RemoteBase
49
49
50 DIR_STAT = stat.S_IFDIR
50 DIR_STAT = stat.S_IFDIR
51 FILE_MODE = stat.S_IFMT
51 FILE_MODE = stat.S_IFMT
52 GIT_LINK = objects.S_IFGITLINK
52 GIT_LINK = objects.S_IFGITLINK
53 PEELED_REF_MARKER = '^{}'
53 PEELED_REF_MARKER = '^{}'
54
54
55
55
56 log = logging.getLogger(__name__)
56 log = logging.getLogger(__name__)
57
57
58
58
59 def str_to_dulwich(value):
59 def str_to_dulwich(value):
60 """
60 """
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
61 Dulwich 0.10.1a requires `unicode` objects to be passed in.
62 """
62 """
63 return value.decode(settings.WIRE_ENCODING)
63 return value.decode(settings.WIRE_ENCODING)
64
64
65
65
66 def reraise_safe_exceptions(func):
66 def reraise_safe_exceptions(func):
67 """Converts Dulwich exceptions to something neutral."""
67 """Converts Dulwich exceptions to something neutral."""
68
68
69 @wraps(func)
69 @wraps(func)
70 def wrapper(*args, **kwargs):
70 def wrapper(*args, **kwargs):
71 try:
71 try:
72 return func(*args, **kwargs)
72 return func(*args, **kwargs)
73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
73 except (ChecksumMismatch, WrongObjectException, MissingCommitError, ObjectMissing,) as e:
74 exc = exceptions.LookupException(org_exc=e)
74 exc = exceptions.LookupException(org_exc=e)
75 raise exc(safe_str(e))
75 raise exc(safe_str(e))
76 except (HangupException, UnexpectedCommandError) as e:
76 except (HangupException, UnexpectedCommandError) as e:
77 exc = exceptions.VcsException(org_exc=e)
77 exc = exceptions.VcsException(org_exc=e)
78 raise exc(safe_str(e))
78 raise exc(safe_str(e))
79 except Exception as e:
79 except Exception as e:
80 # NOTE(marcink): becuase of how dulwich handles some exceptions
80 # NOTE(marcink): becuase of how dulwich handles some exceptions
81 # (KeyError on empty repos), we cannot track this and catch all
81 # (KeyError on empty repos), we cannot track this and catch all
82 # exceptions, it's an exceptions from other handlers
82 # exceptions, it's an exceptions from other handlers
83 #if not hasattr(e, '_vcs_kind'):
83 #if not hasattr(e, '_vcs_kind'):
84 #log.exception("Unhandled exception in git remote call")
84 #log.exception("Unhandled exception in git remote call")
85 #raise_from_original(exceptions.UnhandledException)
85 #raise_from_original(exceptions.UnhandledException)
86 raise
86 raise
87 return wrapper
87 return wrapper
88
88
89
89
90 class Repo(DulwichRepo):
90 class Repo(DulwichRepo):
91 """
91 """
92 A wrapper for dulwich Repo class.
92 A wrapper for dulwich Repo class.
93
93
94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
94 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
95 "Too many open files" error. We need to close all opened file descriptors
95 "Too many open files" error. We need to close all opened file descriptors
96 once the repo object is destroyed.
96 once the repo object is destroyed.
97 """
97 """
98 def __del__(self):
98 def __del__(self):
99 if hasattr(self, 'object_store'):
99 if hasattr(self, 'object_store'):
100 self.close()
100 self.close()
101
101
102
102
103 class Repository(LibGit2Repo):
103 class Repository(LibGit2Repo):
104
104
105 def __enter__(self):
105 def __enter__(self):
106 return self
106 return self
107
107
108 def __exit__(self, exc_type, exc_val, exc_tb):
108 def __exit__(self, exc_type, exc_val, exc_tb):
109 self.free()
109 self.free()
110
110
111
111
112 class GitFactory(RepoFactory):
112 class GitFactory(RepoFactory):
113 repo_type = 'git'
113 repo_type = 'git'
114
114
115 def _create_repo(self, wire, create, use_libgit2=False):
115 def _create_repo(self, wire, create, use_libgit2=False):
116 if use_libgit2:
116 if use_libgit2:
117 return Repository(wire['path'])
117 return Repository(wire['path'])
118 else:
118 else:
119 repo_path = str_to_dulwich(wire['path'])
119 repo_path = str_to_dulwich(wire['path'])
120 return Repo(repo_path)
120 return Repo(repo_path)
121
121
122 def repo(self, wire, create=False, use_libgit2=False):
122 def repo(self, wire, create=False, use_libgit2=False):
123 """
123 """
124 Get a repository instance for the given path.
124 Get a repository instance for the given path.
125 """
125 """
126 return self._create_repo(wire, create, use_libgit2)
126 return self._create_repo(wire, create, use_libgit2)
127
127
128 def repo_libgit2(self, wire):
128 def repo_libgit2(self, wire):
129 return self.repo(wire, use_libgit2=True)
129 return self.repo(wire, use_libgit2=True)
130
130
131
131
132 class GitRemote(RemoteBase):
132 class GitRemote(RemoteBase):
133
133
134 def __init__(self, factory):
134 def __init__(self, factory):
135 self._factory = factory
135 self._factory = factory
136 self._bulk_methods = {
136 self._bulk_methods = {
137 "date": self.date,
137 "date": self.date,
138 "author": self.author,
138 "author": self.author,
139 "branch": self.branch,
139 "branch": self.branch,
140 "message": self.message,
140 "message": self.message,
141 "parents": self.parents,
141 "parents": self.parents,
142 "_commit": self.revision,
142 "_commit": self.revision,
143 }
143 }
144
144
145 def _wire_to_config(self, wire):
145 def _wire_to_config(self, wire):
146 if 'config' in wire:
146 if 'config' in wire:
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
147 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
148 return {}
148 return {}
149
149
150 def _remote_conf(self, config):
150 def _remote_conf(self, config):
151 params = [
151 params = [
152 '-c', 'core.askpass=""',
152 '-c', 'core.askpass=""',
153 ]
153 ]
154 ssl_cert_dir = config.get('vcs_ssl_dir')
154 ssl_cert_dir = config.get('vcs_ssl_dir')
155 if ssl_cert_dir:
155 if ssl_cert_dir:
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
156 params.extend(['-c', 'http.sslCAinfo={}'.format(ssl_cert_dir)])
157 return params
157 return params
158
158
159 @reraise_safe_exceptions
159 @reraise_safe_exceptions
160 def discover_git_version(self):
160 def discover_git_version(self):
161 stdout, _ = self.run_git_command(
161 stdout, _ = self.run_git_command(
162 {}, ['--version'], _bare=True, _safe=True)
162 {}, ['--version'], _bare=True, _safe=True)
163 prefix = 'git version'
163 prefix = 'git version'
164 if stdout.startswith(prefix):
164 if stdout.startswith(prefix):
165 stdout = stdout[len(prefix):]
165 stdout = stdout[len(prefix):]
166 return stdout.strip()
166 return stdout.strip()
167
167
168 @reraise_safe_exceptions
168 @reraise_safe_exceptions
169 def is_empty(self, wire):
169 def is_empty(self, wire):
170 repo_init = self._factory.repo_libgit2(wire)
170 repo_init = self._factory.repo_libgit2(wire)
171 with repo_init as repo:
171 with repo_init as repo:
172
172
173 try:
173 try:
174 has_head = repo.head.name
174 has_head = repo.head.name
175 if has_head:
175 if has_head:
176 return False
176 return False
177
177
178 # NOTE(marcink): check again using more expensive method
178 # NOTE(marcink): check again using more expensive method
179 return repo.is_empty
179 return repo.is_empty
180 except Exception:
180 except Exception:
181 pass
181 pass
182
182
183 return True
183 return True
184
184
185 @reraise_safe_exceptions
185 @reraise_safe_exceptions
186 def assert_correct_path(self, wire):
186 def assert_correct_path(self, wire):
187 cache_on, context_uid, repo_id = self._cache_on(wire)
187 cache_on, context_uid, repo_id = self._cache_on(wire)
188 @self.region.conditional_cache_on_arguments(condition=cache_on)
188 region = self._region(wire)
189 @region.conditional_cache_on_arguments(condition=cache_on)
189 def _assert_correct_path(_context_uid, _repo_id):
190 def _assert_correct_path(_context_uid, _repo_id):
190 try:
191 try:
191 repo_init = self._factory.repo_libgit2(wire)
192 repo_init = self._factory.repo_libgit2(wire)
192 with repo_init as repo:
193 with repo_init as repo:
193 pass
194 pass
194 except pygit2.GitError:
195 except pygit2.GitError:
195 path = wire.get('path')
196 path = wire.get('path')
196 tb = traceback.format_exc()
197 tb = traceback.format_exc()
197 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
198 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
198 return False
199 return False
199
200
200 return True
201 return True
201 return _assert_correct_path(context_uid, repo_id)
202 return _assert_correct_path(context_uid, repo_id)
202
203
203 @reraise_safe_exceptions
204 @reraise_safe_exceptions
204 def bare(self, wire):
205 def bare(self, wire):
205 repo_init = self._factory.repo_libgit2(wire)
206 repo_init = self._factory.repo_libgit2(wire)
206 with repo_init as repo:
207 with repo_init as repo:
207 return repo.is_bare
208 return repo.is_bare
208
209
209 @reraise_safe_exceptions
210 @reraise_safe_exceptions
210 def blob_as_pretty_string(self, wire, sha):
211 def blob_as_pretty_string(self, wire, sha):
211 repo_init = self._factory.repo_libgit2(wire)
212 repo_init = self._factory.repo_libgit2(wire)
212 with repo_init as repo:
213 with repo_init as repo:
213 blob_obj = repo[sha]
214 blob_obj = repo[sha]
214 blob = blob_obj.data
215 blob = blob_obj.data
215 return blob
216 return blob
216
217
217 @reraise_safe_exceptions
218 @reraise_safe_exceptions
218 def blob_raw_length(self, wire, sha):
219 def blob_raw_length(self, wire, sha):
219 cache_on, context_uid, repo_id = self._cache_on(wire)
220 cache_on, context_uid, repo_id = self._cache_on(wire)
220 @self.region.conditional_cache_on_arguments(condition=cache_on)
221 region = self._region(wire)
222 @region.conditional_cache_on_arguments(condition=cache_on)
221 def _blob_raw_length(_repo_id, _sha):
223 def _blob_raw_length(_repo_id, _sha):
222
224
223 repo_init = self._factory.repo_libgit2(wire)
225 repo_init = self._factory.repo_libgit2(wire)
224 with repo_init as repo:
226 with repo_init as repo:
225 blob = repo[sha]
227 blob = repo[sha]
226 return blob.size
228 return blob.size
227
229
228 return _blob_raw_length(repo_id, sha)
230 return _blob_raw_length(repo_id, sha)
229
231
230 def _parse_lfs_pointer(self, raw_content):
232 def _parse_lfs_pointer(self, raw_content):
231
233
232 spec_string = 'version https://git-lfs.github.com/spec'
234 spec_string = 'version https://git-lfs.github.com/spec'
233 if raw_content and raw_content.startswith(spec_string):
235 if raw_content and raw_content.startswith(spec_string):
234 pattern = re.compile(r"""
236 pattern = re.compile(r"""
235 (?:\n)?
237 (?:\n)?
236 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
238 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
237 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
239 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
238 ^size[ ](?P<oid_size>[0-9]+)\n
240 ^size[ ](?P<oid_size>[0-9]+)\n
239 (?:\n)?
241 (?:\n)?
240 """, re.VERBOSE | re.MULTILINE)
242 """, re.VERBOSE | re.MULTILINE)
241 match = pattern.match(raw_content)
243 match = pattern.match(raw_content)
242 if match:
244 if match:
243 return match.groupdict()
245 return match.groupdict()
244
246
245 return {}
247 return {}
246
248
247 @reraise_safe_exceptions
249 @reraise_safe_exceptions
248 def is_large_file(self, wire, commit_id):
250 def is_large_file(self, wire, commit_id):
249 cache_on, context_uid, repo_id = self._cache_on(wire)
251 cache_on, context_uid, repo_id = self._cache_on(wire)
250
252
251 @self.region.conditional_cache_on_arguments(condition=cache_on)
253 region = self._region(wire)
254 @region.conditional_cache_on_arguments(condition=cache_on)
252 def _is_large_file(_repo_id, _sha):
255 def _is_large_file(_repo_id, _sha):
253 repo_init = self._factory.repo_libgit2(wire)
256 repo_init = self._factory.repo_libgit2(wire)
254 with repo_init as repo:
257 with repo_init as repo:
255 blob = repo[commit_id]
258 blob = repo[commit_id]
256 if blob.is_binary:
259 if blob.is_binary:
257 return {}
260 return {}
258
261
259 return self._parse_lfs_pointer(blob.data)
262 return self._parse_lfs_pointer(blob.data)
260
263
261 return _is_large_file(repo_id, commit_id)
264 return _is_large_file(repo_id, commit_id)
262
265
263 @reraise_safe_exceptions
266 @reraise_safe_exceptions
264 def is_binary(self, wire, tree_id):
267 def is_binary(self, wire, tree_id):
265 cache_on, context_uid, repo_id = self._cache_on(wire)
268 cache_on, context_uid, repo_id = self._cache_on(wire)
266
269
267 @self.region.conditional_cache_on_arguments(condition=cache_on)
270 region = self._region(wire)
271 @region.conditional_cache_on_arguments(condition=cache_on)
268 def _is_binary(_repo_id, _tree_id):
272 def _is_binary(_repo_id, _tree_id):
269 repo_init = self._factory.repo_libgit2(wire)
273 repo_init = self._factory.repo_libgit2(wire)
270 with repo_init as repo:
274 with repo_init as repo:
271 blob_obj = repo[tree_id]
275 blob_obj = repo[tree_id]
272 return blob_obj.is_binary
276 return blob_obj.is_binary
273
277
274 return _is_binary(repo_id, tree_id)
278 return _is_binary(repo_id, tree_id)
275
279
276 @reraise_safe_exceptions
280 @reraise_safe_exceptions
277 def in_largefiles_store(self, wire, oid):
281 def in_largefiles_store(self, wire, oid):
278 conf = self._wire_to_config(wire)
282 conf = self._wire_to_config(wire)
279 repo_init = self._factory.repo_libgit2(wire)
283 repo_init = self._factory.repo_libgit2(wire)
280 with repo_init as repo:
284 with repo_init as repo:
281 repo_name = repo.path
285 repo_name = repo.path
282
286
283 store_location = conf.get('vcs_git_lfs_store_location')
287 store_location = conf.get('vcs_git_lfs_store_location')
284 if store_location:
288 if store_location:
285
289
286 store = LFSOidStore(
290 store = LFSOidStore(
287 oid=oid, repo=repo_name, store_location=store_location)
291 oid=oid, repo=repo_name, store_location=store_location)
288 return store.has_oid()
292 return store.has_oid()
289
293
290 return False
294 return False
291
295
292 @reraise_safe_exceptions
296 @reraise_safe_exceptions
293 def store_path(self, wire, oid):
297 def store_path(self, wire, oid):
294 conf = self._wire_to_config(wire)
298 conf = self._wire_to_config(wire)
295 repo_init = self._factory.repo_libgit2(wire)
299 repo_init = self._factory.repo_libgit2(wire)
296 with repo_init as repo:
300 with repo_init as repo:
297 repo_name = repo.path
301 repo_name = repo.path
298
302
299 store_location = conf.get('vcs_git_lfs_store_location')
303 store_location = conf.get('vcs_git_lfs_store_location')
300 if store_location:
304 if store_location:
301 store = LFSOidStore(
305 store = LFSOidStore(
302 oid=oid, repo=repo_name, store_location=store_location)
306 oid=oid, repo=repo_name, store_location=store_location)
303 return store.oid_path
307 return store.oid_path
304 raise ValueError('Unable to fetch oid with path {}'.format(oid))
308 raise ValueError('Unable to fetch oid with path {}'.format(oid))
305
309
306 @reraise_safe_exceptions
310 @reraise_safe_exceptions
307 def bulk_request(self, wire, rev, pre_load):
311 def bulk_request(self, wire, rev, pre_load):
308 cache_on, context_uid, repo_id = self._cache_on(wire)
312 cache_on, context_uid, repo_id = self._cache_on(wire)
309 @self.region.conditional_cache_on_arguments(condition=cache_on)
313 region = self._region(wire)
314 @region.conditional_cache_on_arguments(condition=cache_on)
310 def _bulk_request(_repo_id, _rev, _pre_load):
315 def _bulk_request(_repo_id, _rev, _pre_load):
311 result = {}
316 result = {}
312 for attr in pre_load:
317 for attr in pre_load:
313 try:
318 try:
314 method = self._bulk_methods[attr]
319 method = self._bulk_methods[attr]
315 args = [wire, rev]
320 args = [wire, rev]
316 result[attr] = method(*args)
321 result[attr] = method(*args)
317 except KeyError as e:
322 except KeyError as e:
318 raise exceptions.VcsException(e)(
323 raise exceptions.VcsException(e)(
319 "Unknown bulk attribute: %s" % attr)
324 "Unknown bulk attribute: %s" % attr)
320 return result
325 return result
321
326
322 return _bulk_request(repo_id, rev, sorted(pre_load))
327 return _bulk_request(repo_id, rev, sorted(pre_load))
323
328
324 def _build_opener(self, url):
329 def _build_opener(self, url):
325 handlers = []
330 handlers = []
326 url_obj = url_parser(url)
331 url_obj = url_parser(url)
327 _, authinfo = url_obj.authinfo()
332 _, authinfo = url_obj.authinfo()
328
333
329 if authinfo:
334 if authinfo:
330 # create a password manager
335 # create a password manager
331 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
336 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
332 passmgr.add_password(*authinfo)
337 passmgr.add_password(*authinfo)
333
338
334 handlers.extend((httpbasicauthhandler(passmgr),
339 handlers.extend((httpbasicauthhandler(passmgr),
335 httpdigestauthhandler(passmgr)))
340 httpdigestauthhandler(passmgr)))
336
341
337 return urllib2.build_opener(*handlers)
342 return urllib2.build_opener(*handlers)
338
343
339 def _type_id_to_name(self, type_id):
344 def _type_id_to_name(self, type_id):
340 return {
345 return {
341 1: b'commit',
346 1: b'commit',
342 2: b'tree',
347 2: b'tree',
343 3: b'blob',
348 3: b'blob',
344 4: b'tag'
349 4: b'tag'
345 }[type_id]
350 }[type_id]
346
351
347 @reraise_safe_exceptions
352 @reraise_safe_exceptions
348 def check_url(self, url, config):
353 def check_url(self, url, config):
349 url_obj = url_parser(url)
354 url_obj = url_parser(url)
350 test_uri, _ = url_obj.authinfo()
355 test_uri, _ = url_obj.authinfo()
351 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
356 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
352 url_obj.query = obfuscate_qs(url_obj.query)
357 url_obj.query = obfuscate_qs(url_obj.query)
353 cleaned_uri = str(url_obj)
358 cleaned_uri = str(url_obj)
354 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
359 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
355
360
356 if not test_uri.endswith('info/refs'):
361 if not test_uri.endswith('info/refs'):
357 test_uri = test_uri.rstrip('/') + '/info/refs'
362 test_uri = test_uri.rstrip('/') + '/info/refs'
358
363
359 o = self._build_opener(url)
364 o = self._build_opener(url)
360 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
365 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
361
366
362 q = {"service": 'git-upload-pack'}
367 q = {"service": 'git-upload-pack'}
363 qs = '?%s' % urllib.urlencode(q)
368 qs = '?%s' % urllib.urlencode(q)
364 cu = "%s%s" % (test_uri, qs)
369 cu = "%s%s" % (test_uri, qs)
365 req = urllib2.Request(cu, None, {})
370 req = urllib2.Request(cu, None, {})
366
371
367 try:
372 try:
368 log.debug("Trying to open URL %s", cleaned_uri)
373 log.debug("Trying to open URL %s", cleaned_uri)
369 resp = o.open(req)
374 resp = o.open(req)
370 if resp.code != 200:
375 if resp.code != 200:
371 raise exceptions.URLError()('Return Code is not 200')
376 raise exceptions.URLError()('Return Code is not 200')
372 except Exception as e:
377 except Exception as e:
373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
378 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
374 # means it cannot be cloned
379 # means it cannot be cloned
375 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
380 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
376
381
377 # now detect if it's proper git repo
382 # now detect if it's proper git repo
378 gitdata = resp.read()
383 gitdata = resp.read()
379 if 'service=git-upload-pack' in gitdata:
384 if 'service=git-upload-pack' in gitdata:
380 pass
385 pass
381 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
386 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
382 # old style git can return some other format !
387 # old style git can return some other format !
383 pass
388 pass
384 else:
389 else:
385 raise exceptions.URLError()(
390 raise exceptions.URLError()(
386 "url [%s] does not look like an git" % (cleaned_uri,))
391 "url [%s] does not look like an git" % (cleaned_uri,))
387
392
388 return True
393 return True
389
394
390 @reraise_safe_exceptions
395 @reraise_safe_exceptions
391 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
396 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
392 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
397 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
393 remote_refs = self.pull(wire, url, apply_refs=False)
398 remote_refs = self.pull(wire, url, apply_refs=False)
394 repo = self._factory.repo(wire)
399 repo = self._factory.repo(wire)
395 if isinstance(valid_refs, list):
400 if isinstance(valid_refs, list):
396 valid_refs = tuple(valid_refs)
401 valid_refs = tuple(valid_refs)
397
402
398 for k in remote_refs:
403 for k in remote_refs:
399 # only parse heads/tags and skip so called deferred tags
404 # only parse heads/tags and skip so called deferred tags
400 if k.startswith(valid_refs) and not k.endswith(deferred):
405 if k.startswith(valid_refs) and not k.endswith(deferred):
401 repo[k] = remote_refs[k]
406 repo[k] = remote_refs[k]
402
407
403 if update_after_clone:
408 if update_after_clone:
404 # we want to checkout HEAD
409 # we want to checkout HEAD
405 repo["HEAD"] = remote_refs["HEAD"]
410 repo["HEAD"] = remote_refs["HEAD"]
406 index.build_index_from_tree(repo.path, repo.index_path(),
411 index.build_index_from_tree(repo.path, repo.index_path(),
407 repo.object_store, repo["HEAD"].tree)
412 repo.object_store, repo["HEAD"].tree)
408
413
409 @reraise_safe_exceptions
414 @reraise_safe_exceptions
410 def branch(self, wire, commit_id):
415 def branch(self, wire, commit_id):
411 cache_on, context_uid, repo_id = self._cache_on(wire)
416 cache_on, context_uid, repo_id = self._cache_on(wire)
412 @self.region.conditional_cache_on_arguments(condition=cache_on)
417 region = self._region(wire)
418 @region.conditional_cache_on_arguments(condition=cache_on)
413 def _branch(_context_uid, _repo_id, _commit_id):
419 def _branch(_context_uid, _repo_id, _commit_id):
414 regex = re.compile('^refs/heads')
420 regex = re.compile('^refs/heads')
415
421
416 def filter_with(ref):
422 def filter_with(ref):
417 return regex.match(ref[0]) and ref[1] == _commit_id
423 return regex.match(ref[0]) and ref[1] == _commit_id
418
424
419 branches = filter(filter_with, self.get_refs(wire).items())
425 branches = filter(filter_with, self.get_refs(wire).items())
420 return [x[0].split('refs/heads/')[-1] for x in branches]
426 return [x[0].split('refs/heads/')[-1] for x in branches]
421
427
422 return _branch(context_uid, repo_id, commit_id)
428 return _branch(context_uid, repo_id, commit_id)
423
429
424 @reraise_safe_exceptions
430 @reraise_safe_exceptions
425 def commit_branches(self, wire, commit_id):
431 def commit_branches(self, wire, commit_id):
426 cache_on, context_uid, repo_id = self._cache_on(wire)
432 cache_on, context_uid, repo_id = self._cache_on(wire)
427 @self.region.conditional_cache_on_arguments(condition=cache_on)
433 region = self._region(wire)
434 @region.conditional_cache_on_arguments(condition=cache_on)
428 def _commit_branches(_context_uid, _repo_id, _commit_id):
435 def _commit_branches(_context_uid, _repo_id, _commit_id):
429 repo_init = self._factory.repo_libgit2(wire)
436 repo_init = self._factory.repo_libgit2(wire)
430 with repo_init as repo:
437 with repo_init as repo:
431 branches = [x for x in repo.branches.with_commit(_commit_id)]
438 branches = [x for x in repo.branches.with_commit(_commit_id)]
432 return branches
439 return branches
433
440
434 return _commit_branches(context_uid, repo_id, commit_id)
441 return _commit_branches(context_uid, repo_id, commit_id)
435
442
436 @reraise_safe_exceptions
443 @reraise_safe_exceptions
437 def add_object(self, wire, content):
444 def add_object(self, wire, content):
438 repo_init = self._factory.repo_libgit2(wire)
445 repo_init = self._factory.repo_libgit2(wire)
439 with repo_init as repo:
446 with repo_init as repo:
440 blob = objects.Blob()
447 blob = objects.Blob()
441 blob.set_raw_string(content)
448 blob.set_raw_string(content)
442 repo.object_store.add_object(blob)
449 repo.object_store.add_object(blob)
443 return blob.id
450 return blob.id
444
451
445 # TODO: this is quite complex, check if that can be simplified
452 # TODO: this is quite complex, check if that can be simplified
446 @reraise_safe_exceptions
453 @reraise_safe_exceptions
447 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
454 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
455 # Defines the root tree
456 class _Root(object):
457 def __repr__(self):
458 return 'ROOT TREE'
459 ROOT = _Root()
460
448 repo = self._factory.repo(wire)
461 repo = self._factory.repo(wire)
449 object_store = repo.object_store
462 object_store = repo.object_store
450
463
451 # Create tree and populates it with blobs
464 # Create tree and populates it with blobs
452 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
465
466 if commit_tree and repo[commit_tree]:
467 git_commit = repo[commit_data['parents'][0]]
468 commit_tree = repo[git_commit.tree] # root tree
469 else:
470 commit_tree = objects.Tree()
453
471
454 for node in updated:
472 for node in updated:
455 # Compute subdirs if needed
473 # Compute subdirs if needed
456 dirpath, nodename = vcspath.split(node['path'])
474 dirpath, nodename = vcspath.split(node['path'])
457 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
475 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
458 parent = commit_tree
476 parent = commit_tree
459 ancestors = [('', parent)]
477 ancestors = [('', parent)]
460
478
461 # Tries to dig for the deepest existing tree
479 # Tries to dig for the deepest existing tree
462 while dirnames:
480 while dirnames:
463 curdir = dirnames.pop(0)
481 curdir = dirnames.pop(0)
464 try:
482 try:
465 dir_id = parent[curdir][1]
483 dir_id = parent[curdir][1]
466 except KeyError:
484 except KeyError:
467 # put curdir back into dirnames and stops
485 # put curdir back into dirnames and stops
468 dirnames.insert(0, curdir)
486 dirnames.insert(0, curdir)
469 break
487 break
470 else:
488 else:
471 # If found, updates parent
489 # If found, updates parent
472 parent = repo[dir_id]
490 parent = repo[dir_id]
473 ancestors.append((curdir, parent))
491 ancestors.append((curdir, parent))
474 # Now parent is deepest existing tree and we need to create
492 # Now parent is deepest existing tree and we need to create
475 # subtrees for dirnames (in reverse order)
493 # subtrees for dirnames (in reverse order)
476 # [this only applies for nodes from added]
494 # [this only applies for nodes from added]
477 new_trees = []
495 new_trees = []
478
496
479 blob = objects.Blob.from_string(node['content'])
497 blob = objects.Blob.from_string(node['content'])
480
498
481 if dirnames:
499 if dirnames:
482 # If there are trees which should be created we need to build
500 # If there are trees which should be created we need to build
483 # them now (in reverse order)
501 # them now (in reverse order)
484 reversed_dirnames = list(reversed(dirnames))
502 reversed_dirnames = list(reversed(dirnames))
485 curtree = objects.Tree()
503 curtree = objects.Tree()
486 curtree[node['node_path']] = node['mode'], blob.id
504 curtree[node['node_path']] = node['mode'], blob.id
487 new_trees.append(curtree)
505 new_trees.append(curtree)
488 for dirname in reversed_dirnames[:-1]:
506 for dirname in reversed_dirnames[:-1]:
489 newtree = objects.Tree()
507 newtree = objects.Tree()
490 newtree[dirname] = (DIR_STAT, curtree.id)
508 newtree[dirname] = (DIR_STAT, curtree.id)
491 new_trees.append(newtree)
509 new_trees.append(newtree)
492 curtree = newtree
510 curtree = newtree
493 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
511 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
494 else:
512 else:
495 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
513 parent.add(name=node['node_path'], mode=node['mode'], hexsha=blob.id)
496
514
497 new_trees.append(parent)
515 new_trees.append(parent)
498 # Update ancestors
516 # Update ancestors
499 reversed_ancestors = reversed(
517 reversed_ancestors = reversed(
500 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
518 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
501 for parent, tree, path in reversed_ancestors:
519 for parent, tree, path in reversed_ancestors:
502 parent[path] = (DIR_STAT, tree.id)
520 parent[path] = (DIR_STAT, tree.id)
503 object_store.add_object(tree)
521 object_store.add_object(tree)
504
522
505 object_store.add_object(blob)
523 object_store.add_object(blob)
506 for tree in new_trees:
524 for tree in new_trees:
507 object_store.add_object(tree)
525 object_store.add_object(tree)
508
526
509 for node_path in removed:
527 for node_path in removed:
510 paths = node_path.split('/')
528 paths = node_path.split('/')
511 tree = commit_tree
529 tree = commit_tree # start with top-level
512 trees = [tree]
530 trees = [{'tree': tree, 'path': ROOT}]
513 # Traverse deep into the forest...
531 # Traverse deep into the forest...
532 # resolve final tree by iterating the path.
533 # e.g a/b/c.txt will get
534 # - root as tree then
535 # - 'a' as tree,
536 # - 'b' as tree,
537 # - stop at c as blob.
514 for path in paths:
538 for path in paths:
515 try:
539 try:
516 obj = repo[tree[path][1]]
540 obj = repo[tree[path][1]]
517 if isinstance(obj, objects.Tree):
541 if isinstance(obj, objects.Tree):
518 trees.append(obj)
542 trees.append({'tree': obj, 'path': path})
519 tree = obj
543 tree = obj
520 except KeyError:
544 except KeyError:
521 break
545 break
546 #PROBLEM:
547 """
548 We're not editing same reference tree object
549 """
522 # Cut down the blob and all rotten trees on the way back...
550 # Cut down the blob and all rotten trees on the way back...
523 for path, tree in reversed(zip(paths, trees)):
551 for path, tree_data in reversed(zip(paths, trees)):
524 del tree[path]
552 tree = tree_data['tree']
525 if tree:
553 tree.__delitem__(path)
554 # This operation edits the tree, we need to mark new commit back
555
556 if len(tree) > 0:
526 # This tree still has elements - don't remove it or any
557 # This tree still has elements - don't remove it or any
527 # of it's parents
558 # of it's parents
528 break
559 break
529
560
530 object_store.add_object(commit_tree)
561 object_store.add_object(commit_tree)
531
562
532 # Create commit
563 # Create commit
533 commit = objects.Commit()
564 commit = objects.Commit()
534 commit.tree = commit_tree.id
565 commit.tree = commit_tree.id
535 for k, v in commit_data.iteritems():
566 for k, v in commit_data.items():
536 setattr(commit, k, v)
567 setattr(commit, k, v)
537 object_store.add_object(commit)
568 object_store.add_object(commit)
538
569
539 self.create_branch(wire, branch, commit.id)
570 self.create_branch(wire, branch, commit.id)
540
571
541 # dulwich set-ref
572 # dulwich set-ref
542 ref = 'refs/heads/%s' % branch
573 ref = 'refs/heads/%s' % branch
543 repo.refs[ref] = commit.id
574 repo.refs[ref] = commit.id
544
575
545 return commit.id
576 return commit.id
546
577
547 @reraise_safe_exceptions
578 @reraise_safe_exceptions
548 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
579 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
549 if url != 'default' and '://' not in url:
580 if url != 'default' and '://' not in url:
550 client = LocalGitClient(url)
581 client = LocalGitClient(url)
551 else:
582 else:
552 url_obj = url_parser(url)
583 url_obj = url_parser(url)
553 o = self._build_opener(url)
584 o = self._build_opener(url)
554 url, _ = url_obj.authinfo()
585 url, _ = url_obj.authinfo()
555 client = HttpGitClient(base_url=url, opener=o)
586 client = HttpGitClient(base_url=url, opener=o)
556 repo = self._factory.repo(wire)
587 repo = self._factory.repo(wire)
557
588
558 determine_wants = repo.object_store.determine_wants_all
589 determine_wants = repo.object_store.determine_wants_all
559 if refs:
590 if refs:
560 def determine_wants_requested(references):
591 def determine_wants_requested(references):
561 return [references[r] for r in references if r in refs]
592 return [references[r] for r in references if r in refs]
562 determine_wants = determine_wants_requested
593 determine_wants = determine_wants_requested
563
594
564 try:
595 try:
565 remote_refs = client.fetch(
596 remote_refs = client.fetch(
566 path=url, target=repo, determine_wants=determine_wants)
597 path=url, target=repo, determine_wants=determine_wants)
567 except NotGitRepository as e:
598 except NotGitRepository as e:
568 log.warning(
599 log.warning(
569 'Trying to fetch from "%s" failed, not a Git repository.', url)
600 'Trying to fetch from "%s" failed, not a Git repository.', url)
570 # Exception can contain unicode which we convert
601 # Exception can contain unicode which we convert
571 raise exceptions.AbortException(e)(repr(e))
602 raise exceptions.AbortException(e)(repr(e))
572
603
573 # mikhail: client.fetch() returns all the remote refs, but fetches only
604 # mikhail: client.fetch() returns all the remote refs, but fetches only
574 # refs filtered by `determine_wants` function. We need to filter result
605 # refs filtered by `determine_wants` function. We need to filter result
575 # as well
606 # as well
576 if refs:
607 if refs:
577 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
608 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
578
609
579 if apply_refs:
610 if apply_refs:
580 # TODO: johbo: Needs proper test coverage with a git repository
611 # TODO: johbo: Needs proper test coverage with a git repository
581 # that contains a tag object, so that we would end up with
612 # that contains a tag object, so that we would end up with
582 # a peeled ref at this point.
613 # a peeled ref at this point.
583 for k in remote_refs:
614 for k in remote_refs:
584 if k.endswith(PEELED_REF_MARKER):
615 if k.endswith(PEELED_REF_MARKER):
585 log.debug("Skipping peeled reference %s", k)
616 log.debug("Skipping peeled reference %s", k)
586 continue
617 continue
587 repo[k] = remote_refs[k]
618 repo[k] = remote_refs[k]
588
619
589 if refs and not update_after:
620 if refs and not update_after:
590 # mikhail: explicitly set the head to the last ref.
621 # mikhail: explicitly set the head to the last ref.
591 repo['HEAD'] = remote_refs[refs[-1]]
622 repo["HEAD"] = remote_refs[refs[-1]]
592
623
593 if update_after:
624 if update_after:
594 # we want to checkout HEAD
625 # we want to checkout HEAD
595 repo["HEAD"] = remote_refs["HEAD"]
626 repo["HEAD"] = remote_refs["HEAD"]
596 index.build_index_from_tree(repo.path, repo.index_path(),
627 index.build_index_from_tree(repo.path, repo.index_path(),
597 repo.object_store, repo["HEAD"].tree)
628 repo.object_store, repo["HEAD"].tree)
598 return remote_refs
629 return remote_refs
599
630
600 @reraise_safe_exceptions
631 @reraise_safe_exceptions
601 def sync_fetch(self, wire, url, refs=None, all_refs=False):
632 def sync_fetch(self, wire, url, refs=None, all_refs=False):
602 repo = self._factory.repo(wire)
633 repo = self._factory.repo(wire)
603 if refs and not isinstance(refs, (list, tuple)):
634 if refs and not isinstance(refs, (list, tuple)):
604 refs = [refs]
635 refs = [refs]
605
636
606 config = self._wire_to_config(wire)
637 config = self._wire_to_config(wire)
607 # get all remote refs we'll use to fetch later
638 # get all remote refs we'll use to fetch later
608 cmd = ['ls-remote']
639 cmd = ['ls-remote']
609 if not all_refs:
640 if not all_refs:
610 cmd += ['--heads', '--tags']
641 cmd += ['--heads', '--tags']
611 cmd += [url]
642 cmd += [url]
612 output, __ = self.run_git_command(
643 output, __ = self.run_git_command(
613 wire, cmd, fail_on_stderr=False,
644 wire, cmd, fail_on_stderr=False,
614 _copts=self._remote_conf(config),
645 _copts=self._remote_conf(config),
615 extra_env={'GIT_TERMINAL_PROMPT': '0'})
646 extra_env={'GIT_TERMINAL_PROMPT': '0'})
616
647
617 remote_refs = collections.OrderedDict()
648 remote_refs = collections.OrderedDict()
618 fetch_refs = []
649 fetch_refs = []
619
650
620 for ref_line in output.splitlines():
651 for ref_line in output.splitlines():
621 sha, ref = ref_line.split('\t')
652 sha, ref = ref_line.split('\t')
622 sha = sha.strip()
653 sha = sha.strip()
623 if ref in remote_refs:
654 if ref in remote_refs:
624 # duplicate, skip
655 # duplicate, skip
625 continue
656 continue
626 if ref.endswith(PEELED_REF_MARKER):
657 if ref.endswith(PEELED_REF_MARKER):
627 log.debug("Skipping peeled reference %s", ref)
658 log.debug("Skipping peeled reference %s", ref)
628 continue
659 continue
629 # don't sync HEAD
660 # don't sync HEAD
630 if ref in ['HEAD']:
661 if ref in ['HEAD']:
631 continue
662 continue
632
663
633 remote_refs[ref] = sha
664 remote_refs[ref] = sha
634
665
635 if refs and sha in refs:
666 if refs and sha in refs:
636 # we filter fetch using our specified refs
667 # we filter fetch using our specified refs
637 fetch_refs.append('{}:{}'.format(ref, ref))
668 fetch_refs.append('{}:{}'.format(ref, ref))
638 elif not refs:
669 elif not refs:
639 fetch_refs.append('{}:{}'.format(ref, ref))
670 fetch_refs.append('{}:{}'.format(ref, ref))
640 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
671 log.debug('Finished obtaining fetch refs, total: %s', len(fetch_refs))
641
672
642 if fetch_refs:
673 if fetch_refs:
643 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
674 for chunk in more_itertools.chunked(fetch_refs, 1024 * 4):
644 fetch_refs_chunks = list(chunk)
675 fetch_refs_chunks = list(chunk)
645 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
676 log.debug('Fetching %s refs from import url', len(fetch_refs_chunks))
646 _out, _err = self.run_git_command(
677 _out, _err = self.run_git_command(
647 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
678 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs_chunks,
648 fail_on_stderr=False,
679 fail_on_stderr=False,
649 _copts=self._remote_conf(config),
680 _copts=self._remote_conf(config),
650 extra_env={'GIT_TERMINAL_PROMPT': '0'})
681 extra_env={'GIT_TERMINAL_PROMPT': '0'})
651
682
652 return remote_refs
683 return remote_refs
653
684
654 @reraise_safe_exceptions
685 @reraise_safe_exceptions
655 def sync_push(self, wire, url, refs=None):
686 def sync_push(self, wire, url, refs=None):
656 if not self.check_url(url, wire):
687 if not self.check_url(url, wire):
657 return
688 return
658 config = self._wire_to_config(wire)
689 config = self._wire_to_config(wire)
659 self._factory.repo(wire)
690 self._factory.repo(wire)
660 self.run_git_command(
691 self.run_git_command(
661 wire, ['push', url, '--mirror'], fail_on_stderr=False,
692 wire, ['push', url, '--mirror'], fail_on_stderr=False,
662 _copts=self._remote_conf(config),
693 _copts=self._remote_conf(config),
663 extra_env={'GIT_TERMINAL_PROMPT': '0'})
694 extra_env={'GIT_TERMINAL_PROMPT': '0'})
664
695
665 @reraise_safe_exceptions
696 @reraise_safe_exceptions
666 def get_remote_refs(self, wire, url):
697 def get_remote_refs(self, wire, url):
667 repo = Repo(url)
698 repo = Repo(url)
668 return repo.get_refs()
699 return repo.get_refs()
669
700
670 @reraise_safe_exceptions
701 @reraise_safe_exceptions
671 def get_description(self, wire):
702 def get_description(self, wire):
672 repo = self._factory.repo(wire)
703 repo = self._factory.repo(wire)
673 return repo.get_description()
704 return repo.get_description()
674
705
675 @reraise_safe_exceptions
706 @reraise_safe_exceptions
676 def get_missing_revs(self, wire, rev1, rev2, path2):
707 def get_missing_revs(self, wire, rev1, rev2, path2):
677 repo = self._factory.repo(wire)
708 repo = self._factory.repo(wire)
678 LocalGitClient(thin_packs=False).fetch(path2, repo)
709 LocalGitClient(thin_packs=False).fetch(path2, repo)
679
710
680 wire_remote = wire.copy()
711 wire_remote = wire.copy()
681 wire_remote['path'] = path2
712 wire_remote['path'] = path2
682 repo_remote = self._factory.repo(wire_remote)
713 repo_remote = self._factory.repo(wire_remote)
683 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
714 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
684
715
685 revs = [
716 revs = [
686 x.commit.id
717 x.commit.id
687 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
718 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
688 return revs
719 return revs
689
720
690 @reraise_safe_exceptions
721 @reraise_safe_exceptions
691 def get_object(self, wire, sha, maybe_unreachable=False):
722 def get_object(self, wire, sha, maybe_unreachable=False):
692 cache_on, context_uid, repo_id = self._cache_on(wire)
723 cache_on, context_uid, repo_id = self._cache_on(wire)
693 @self.region.conditional_cache_on_arguments(condition=cache_on)
724 region = self._region(wire)
725 @region.conditional_cache_on_arguments(condition=cache_on)
694 def _get_object(_context_uid, _repo_id, _sha):
726 def _get_object(_context_uid, _repo_id, _sha):
695 repo_init = self._factory.repo_libgit2(wire)
727 repo_init = self._factory.repo_libgit2(wire)
696 with repo_init as repo:
728 with repo_init as repo:
697
729
698 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
730 missing_commit_err = 'Commit {} does not exist for `{}`'.format(sha, wire['path'])
699 try:
731 try:
700 commit = repo.revparse_single(sha)
732 commit = repo.revparse_single(sha)
701 except KeyError:
733 except KeyError:
702 # NOTE(marcink): KeyError doesn't give us any meaningful information
734 # NOTE(marcink): KeyError doesn't give us any meaningful information
703 # here, we instead give something more explicit
735 # here, we instead give something more explicit
704 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
736 e = exceptions.RefNotFoundException('SHA: %s not found', sha)
705 raise exceptions.LookupException(e)(missing_commit_err)
737 raise exceptions.LookupException(e)(missing_commit_err)
706 except ValueError as e:
738 except ValueError as e:
707 raise exceptions.LookupException(e)(missing_commit_err)
739 raise exceptions.LookupException(e)(missing_commit_err)
708
740
709 is_tag = False
741 is_tag = False
710 if isinstance(commit, pygit2.Tag):
742 if isinstance(commit, pygit2.Tag):
711 commit = repo.get(commit.target)
743 commit = repo.get(commit.target)
712 is_tag = True
744 is_tag = True
713
745
714 check_dangling = True
746 check_dangling = True
715 if is_tag:
747 if is_tag:
716 check_dangling = False
748 check_dangling = False
717
749
718 if check_dangling and maybe_unreachable:
750 if check_dangling and maybe_unreachable:
719 check_dangling = False
751 check_dangling = False
720
752
721 # we used a reference and it parsed means we're not having a dangling commit
753 # we used a reference and it parsed means we're not having a dangling commit
722 if sha != commit.hex:
754 if sha != commit.hex:
723 check_dangling = False
755 check_dangling = False
724
756
725 if check_dangling:
757 if check_dangling:
726 # check for dangling commit
758 # check for dangling commit
727 for branch in repo.branches.with_commit(commit.hex):
759 for branch in repo.branches.with_commit(commit.hex):
728 if branch:
760 if branch:
729 break
761 break
730 else:
762 else:
731 # NOTE(marcink): Empty error doesn't give us any meaningful information
763 # NOTE(marcink): Empty error doesn't give us any meaningful information
732 # here, we instead give something more explicit
764 # here, we instead give something more explicit
733 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
765 e = exceptions.RefNotFoundException('SHA: %s not found in branches', sha)
734 raise exceptions.LookupException(e)(missing_commit_err)
766 raise exceptions.LookupException(e)(missing_commit_err)
735
767
736 commit_id = commit.hex
768 commit_id = commit.hex
737 type_id = commit.type
769 type_id = commit.type
738
770
739 return {
771 return {
740 'id': commit_id,
772 'id': commit_id,
741 'type': self._type_id_to_name(type_id),
773 'type': self._type_id_to_name(type_id),
742 'commit_id': commit_id,
774 'commit_id': commit_id,
743 'idx': 0
775 'idx': 0
744 }
776 }
745
777
746 return _get_object(context_uid, repo_id, sha)
778 return _get_object(context_uid, repo_id, sha)
747
779
748 @reraise_safe_exceptions
780 @reraise_safe_exceptions
749 def get_refs(self, wire):
781 def get_refs(self, wire):
750 cache_on, context_uid, repo_id = self._cache_on(wire)
782 cache_on, context_uid, repo_id = self._cache_on(wire)
751 @self.region.conditional_cache_on_arguments(condition=cache_on)
783 region = self._region(wire)
784 @region.conditional_cache_on_arguments(condition=cache_on)
752 def _get_refs(_context_uid, _repo_id):
785 def _get_refs(_context_uid, _repo_id):
753
786
754 repo_init = self._factory.repo_libgit2(wire)
787 repo_init = self._factory.repo_libgit2(wire)
755 with repo_init as repo:
788 with repo_init as repo:
756 regex = re.compile('^refs/(heads|tags)/')
789 regex = re.compile('^refs/(heads|tags)/')
757 return {x.name: x.target.hex for x in
790 return {x.name: x.target.hex for x in
758 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
791 filter(lambda ref: regex.match(ref.name) ,repo.listall_reference_objects())}
759
792
760 return _get_refs(context_uid, repo_id)
793 return _get_refs(context_uid, repo_id)
761
794
762 @reraise_safe_exceptions
795 @reraise_safe_exceptions
763 def get_branch_pointers(self, wire):
796 def get_branch_pointers(self, wire):
764 cache_on, context_uid, repo_id = self._cache_on(wire)
797 cache_on, context_uid, repo_id = self._cache_on(wire)
765 @self.region.conditional_cache_on_arguments(condition=cache_on)
798 region = self._region(wire)
799 @region.conditional_cache_on_arguments(condition=cache_on)
766 def _get_branch_pointers(_context_uid, _repo_id):
800 def _get_branch_pointers(_context_uid, _repo_id):
767
801
768 repo_init = self._factory.repo_libgit2(wire)
802 repo_init = self._factory.repo_libgit2(wire)
769 regex = re.compile('^refs/heads')
803 regex = re.compile('^refs/heads')
770 with repo_init as repo:
804 with repo_init as repo:
771 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
805 branches = filter(lambda ref: regex.match(ref.name), repo.listall_reference_objects())
772 return {x.target.hex: x.shorthand for x in branches}
806 return {x.target.hex: x.shorthand for x in branches}
773
807
774 return _get_branch_pointers(context_uid, repo_id)
808 return _get_branch_pointers(context_uid, repo_id)
775
809
776 @reraise_safe_exceptions
810 @reraise_safe_exceptions
777 def head(self, wire, show_exc=True):
811 def head(self, wire, show_exc=True):
778 cache_on, context_uid, repo_id = self._cache_on(wire)
812 cache_on, context_uid, repo_id = self._cache_on(wire)
779 @self.region.conditional_cache_on_arguments(condition=cache_on)
813 region = self._region(wire)
814 @region.conditional_cache_on_arguments(condition=cache_on)
780 def _head(_context_uid, _repo_id, _show_exc):
815 def _head(_context_uid, _repo_id, _show_exc):
781 repo_init = self._factory.repo_libgit2(wire)
816 repo_init = self._factory.repo_libgit2(wire)
782 with repo_init as repo:
817 with repo_init as repo:
783 try:
818 try:
784 return repo.head.peel().hex
819 return repo.head.peel().hex
785 except Exception:
820 except Exception:
786 if show_exc:
821 if show_exc:
787 raise
822 raise
788 return _head(context_uid, repo_id, show_exc)
823 return _head(context_uid, repo_id, show_exc)
789
824
790 @reraise_safe_exceptions
825 @reraise_safe_exceptions
791 def init(self, wire):
826 def init(self, wire):
792 repo_path = str_to_dulwich(wire['path'])
827 repo_path = str_to_dulwich(wire['path'])
793 self.repo = Repo.init(repo_path)
828 self.repo = Repo.init(repo_path)
794
829
795 @reraise_safe_exceptions
830 @reraise_safe_exceptions
796 def init_bare(self, wire):
831 def init_bare(self, wire):
797 repo_path = str_to_dulwich(wire['path'])
832 repo_path = str_to_dulwich(wire['path'])
798 self.repo = Repo.init_bare(repo_path)
833 self.repo = Repo.init_bare(repo_path)
799
834
800 @reraise_safe_exceptions
835 @reraise_safe_exceptions
801 def revision(self, wire, rev):
836 def revision(self, wire, rev):
802
837
803 cache_on, context_uid, repo_id = self._cache_on(wire)
838 cache_on, context_uid, repo_id = self._cache_on(wire)
804 @self.region.conditional_cache_on_arguments(condition=cache_on)
839 region = self._region(wire)
840 @region.conditional_cache_on_arguments(condition=cache_on)
805 def _revision(_context_uid, _repo_id, _rev):
841 def _revision(_context_uid, _repo_id, _rev):
806 repo_init = self._factory.repo_libgit2(wire)
842 repo_init = self._factory.repo_libgit2(wire)
807 with repo_init as repo:
843 with repo_init as repo:
808 commit = repo[rev]
844 commit = repo[rev]
809 obj_data = {
845 obj_data = {
810 'id': commit.id.hex,
846 'id': commit.id.hex,
811 }
847 }
812 # tree objects itself don't have tree_id attribute
848 # tree objects itself don't have tree_id attribute
813 if hasattr(commit, 'tree_id'):
849 if hasattr(commit, 'tree_id'):
814 obj_data['tree'] = commit.tree_id.hex
850 obj_data['tree'] = commit.tree_id.hex
815
851
816 return obj_data
852 return obj_data
817 return _revision(context_uid, repo_id, rev)
853 return _revision(context_uid, repo_id, rev)
818
854
819 @reraise_safe_exceptions
855 @reraise_safe_exceptions
820 def date(self, wire, commit_id):
856 def date(self, wire, commit_id):
821 cache_on, context_uid, repo_id = self._cache_on(wire)
857 cache_on, context_uid, repo_id = self._cache_on(wire)
822 @self.region.conditional_cache_on_arguments(condition=cache_on)
858 region = self._region(wire)
859 @region.conditional_cache_on_arguments(condition=cache_on)
823 def _date(_repo_id, _commit_id):
860 def _date(_repo_id, _commit_id):
824 repo_init = self._factory.repo_libgit2(wire)
861 repo_init = self._factory.repo_libgit2(wire)
825 with repo_init as repo:
862 with repo_init as repo:
826 commit = repo[commit_id]
863 commit = repo[commit_id]
827
864
828 if hasattr(commit, 'commit_time'):
865 if hasattr(commit, 'commit_time'):
829 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
866 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
830 else:
867 else:
831 commit = commit.get_object()
868 commit = commit.get_object()
832 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
869 commit_time, commit_time_offset = commit.commit_time, commit.commit_time_offset
833
870
834 # TODO(marcink): check dulwich difference of offset vs timezone
871 # TODO(marcink): check dulwich difference of offset vs timezone
835 return [commit_time, commit_time_offset]
872 return [commit_time, commit_time_offset]
836 return _date(repo_id, commit_id)
873 return _date(repo_id, commit_id)
837
874
838 @reraise_safe_exceptions
875 @reraise_safe_exceptions
839 def author(self, wire, commit_id):
876 def author(self, wire, commit_id):
840 cache_on, context_uid, repo_id = self._cache_on(wire)
877 cache_on, context_uid, repo_id = self._cache_on(wire)
841 @self.region.conditional_cache_on_arguments(condition=cache_on)
878 region = self._region(wire)
879 @region.conditional_cache_on_arguments(condition=cache_on)
842 def _author(_repo_id, _commit_id):
880 def _author(_repo_id, _commit_id):
843 repo_init = self._factory.repo_libgit2(wire)
881 repo_init = self._factory.repo_libgit2(wire)
844 with repo_init as repo:
882 with repo_init as repo:
845 commit = repo[commit_id]
883 commit = repo[commit_id]
846
884
847 if hasattr(commit, 'author'):
885 if hasattr(commit, 'author'):
848 author = commit.author
886 author = commit.author
849 else:
887 else:
850 author = commit.get_object().author
888 author = commit.get_object().author
851
889
852 if author.email:
890 if author.email:
853 return u"{} <{}>".format(author.name, author.email)
891 return u"{} <{}>".format(author.name, author.email)
854
892
855 try:
893 try:
856 return u"{}".format(author.name)
894 return u"{}".format(author.name)
857 except Exception:
895 except Exception:
858 return u"{}".format(safe_unicode(author.raw_name))
896 return u"{}".format(safe_unicode(author.raw_name))
859
897
860 return _author(repo_id, commit_id)
898 return _author(repo_id, commit_id)
861
899
862 @reraise_safe_exceptions
900 @reraise_safe_exceptions
863 def message(self, wire, commit_id):
901 def message(self, wire, commit_id):
864 cache_on, context_uid, repo_id = self._cache_on(wire)
902 cache_on, context_uid, repo_id = self._cache_on(wire)
865 @self.region.conditional_cache_on_arguments(condition=cache_on)
903 region = self._region(wire)
904 @region.conditional_cache_on_arguments(condition=cache_on)
866 def _message(_repo_id, _commit_id):
905 def _message(_repo_id, _commit_id):
867 repo_init = self._factory.repo_libgit2(wire)
906 repo_init = self._factory.repo_libgit2(wire)
868 with repo_init as repo:
907 with repo_init as repo:
869 commit = repo[commit_id]
908 commit = repo[commit_id]
870 return commit.message
909 return commit.message
871 return _message(repo_id, commit_id)
910 return _message(repo_id, commit_id)
872
911
873 @reraise_safe_exceptions
912 @reraise_safe_exceptions
874 def parents(self, wire, commit_id):
913 def parents(self, wire, commit_id):
875 cache_on, context_uid, repo_id = self._cache_on(wire)
914 cache_on, context_uid, repo_id = self._cache_on(wire)
876 @self.region.conditional_cache_on_arguments(condition=cache_on)
915 region = self._region(wire)
916 @region.conditional_cache_on_arguments(condition=cache_on)
877 def _parents(_repo_id, _commit_id):
917 def _parents(_repo_id, _commit_id):
878 repo_init = self._factory.repo_libgit2(wire)
918 repo_init = self._factory.repo_libgit2(wire)
879 with repo_init as repo:
919 with repo_init as repo:
880 commit = repo[commit_id]
920 commit = repo[commit_id]
881 if hasattr(commit, 'parent_ids'):
921 if hasattr(commit, 'parent_ids'):
882 parent_ids = commit.parent_ids
922 parent_ids = commit.parent_ids
883 else:
923 else:
884 parent_ids = commit.get_object().parent_ids
924 parent_ids = commit.get_object().parent_ids
885
925
886 return [x.hex for x in parent_ids]
926 return [x.hex for x in parent_ids]
887 return _parents(repo_id, commit_id)
927 return _parents(repo_id, commit_id)
888
928
889 @reraise_safe_exceptions
929 @reraise_safe_exceptions
890 def children(self, wire, commit_id):
930 def children(self, wire, commit_id):
891 cache_on, context_uid, repo_id = self._cache_on(wire)
931 cache_on, context_uid, repo_id = self._cache_on(wire)
892 @self.region.conditional_cache_on_arguments(condition=cache_on)
932 region = self._region(wire)
933 @region.conditional_cache_on_arguments(condition=cache_on)
893 def _children(_repo_id, _commit_id):
934 def _children(_repo_id, _commit_id):
894 output, __ = self.run_git_command(
935 output, __ = self.run_git_command(
895 wire, ['rev-list', '--all', '--children'])
936 wire, ['rev-list', '--all', '--children'])
896
937
897 child_ids = []
938 child_ids = []
898 pat = re.compile(r'^%s' % commit_id)
939 pat = re.compile(r'^%s' % commit_id)
899 for l in output.splitlines():
940 for l in output.splitlines():
900 if pat.match(l):
941 if pat.match(l):
901 found_ids = l.split(' ')[1:]
942 found_ids = l.split(' ')[1:]
902 child_ids.extend(found_ids)
943 child_ids.extend(found_ids)
903
944
904 return child_ids
945 return child_ids
905 return _children(repo_id, commit_id)
946 return _children(repo_id, commit_id)
906
947
907 @reraise_safe_exceptions
948 @reraise_safe_exceptions
908 def set_refs(self, wire, key, value):
949 def set_refs(self, wire, key, value):
909 repo_init = self._factory.repo_libgit2(wire)
950 repo_init = self._factory.repo_libgit2(wire)
910 with repo_init as repo:
951 with repo_init as repo:
911 repo.references.create(key, value, force=True)
952 repo.references.create(key, value, force=True)
912
953
913 @reraise_safe_exceptions
954 @reraise_safe_exceptions
914 def create_branch(self, wire, branch_name, commit_id, force=False):
955 def create_branch(self, wire, branch_name, commit_id, force=False):
915 repo_init = self._factory.repo_libgit2(wire)
956 repo_init = self._factory.repo_libgit2(wire)
916 with repo_init as repo:
957 with repo_init as repo:
917 commit = repo[commit_id]
958 commit = repo[commit_id]
918
959
919 if force:
960 if force:
920 repo.branches.local.create(branch_name, commit, force=force)
961 repo.branches.local.create(branch_name, commit, force=force)
921 elif not repo.branches.get(branch_name):
962 elif not repo.branches.get(branch_name):
922 # create only if that branch isn't existing
963 # create only if that branch isn't existing
923 repo.branches.local.create(branch_name, commit, force=force)
964 repo.branches.local.create(branch_name, commit, force=force)
924
965
925 @reraise_safe_exceptions
966 @reraise_safe_exceptions
926 def remove_ref(self, wire, key):
967 def remove_ref(self, wire, key):
927 repo_init = self._factory.repo_libgit2(wire)
968 repo_init = self._factory.repo_libgit2(wire)
928 with repo_init as repo:
969 with repo_init as repo:
929 repo.references.delete(key)
970 repo.references.delete(key)
930
971
931 @reraise_safe_exceptions
972 @reraise_safe_exceptions
932 def tag_remove(self, wire, tag_name):
973 def tag_remove(self, wire, tag_name):
933 repo_init = self._factory.repo_libgit2(wire)
974 repo_init = self._factory.repo_libgit2(wire)
934 with repo_init as repo:
975 with repo_init as repo:
935 key = 'refs/tags/{}'.format(tag_name)
976 key = 'refs/tags/{}'.format(tag_name)
936 repo.references.delete(key)
977 repo.references.delete(key)
937
978
938 @reraise_safe_exceptions
979 @reraise_safe_exceptions
939 def tree_changes(self, wire, source_id, target_id):
980 def tree_changes(self, wire, source_id, target_id):
940 # TODO(marcink): remove this seems it's only used by tests
981 # TODO(marcink): remove this seems it's only used by tests
941 repo = self._factory.repo(wire)
982 repo = self._factory.repo(wire)
942 source = repo[source_id].tree if source_id else None
983 source = repo[source_id].tree if source_id else None
943 target = repo[target_id].tree
984 target = repo[target_id].tree
944 result = repo.object_store.tree_changes(source, target)
985 result = repo.object_store.tree_changes(source, target)
945 return list(result)
986 return list(result)
946
987
947 @reraise_safe_exceptions
988 @reraise_safe_exceptions
948 def tree_and_type_for_path(self, wire, commit_id, path):
989 def tree_and_type_for_path(self, wire, commit_id, path):
949
990
950 cache_on, context_uid, repo_id = self._cache_on(wire)
991 cache_on, context_uid, repo_id = self._cache_on(wire)
951 @self.region.conditional_cache_on_arguments(condition=cache_on)
992 region = self._region(wire)
993 @region.conditional_cache_on_arguments(condition=cache_on)
952 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
994 def _tree_and_type_for_path(_context_uid, _repo_id, _commit_id, _path):
953 repo_init = self._factory.repo_libgit2(wire)
995 repo_init = self._factory.repo_libgit2(wire)
954
996
955 with repo_init as repo:
997 with repo_init as repo:
956 commit = repo[commit_id]
998 commit = repo[commit_id]
957 try:
999 try:
958 tree = commit.tree[path]
1000 tree = commit.tree[path]
959 except KeyError:
1001 except KeyError:
960 return None, None, None
1002 return None, None, None
961
1003
962 return tree.id.hex, tree.type, tree.filemode
1004 return tree.id.hex, tree.type, tree.filemode
963 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
1005 return _tree_and_type_for_path(context_uid, repo_id, commit_id, path)
964
1006
965 @reraise_safe_exceptions
1007 @reraise_safe_exceptions
966 def tree_items(self, wire, tree_id):
1008 def tree_items(self, wire, tree_id):
967 cache_on, context_uid, repo_id = self._cache_on(wire)
1009 cache_on, context_uid, repo_id = self._cache_on(wire)
968 @self.region.conditional_cache_on_arguments(condition=cache_on)
1010 region = self._region(wire)
1011 @region.conditional_cache_on_arguments(condition=cache_on)
969 def _tree_items(_repo_id, _tree_id):
1012 def _tree_items(_repo_id, _tree_id):
970
1013
971 repo_init = self._factory.repo_libgit2(wire)
1014 repo_init = self._factory.repo_libgit2(wire)
972 with repo_init as repo:
1015 with repo_init as repo:
973 try:
1016 try:
974 tree = repo[tree_id]
1017 tree = repo[tree_id]
975 except KeyError:
1018 except KeyError:
976 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1019 raise ObjectMissing('No tree with id: {}'.format(tree_id))
977
1020
978 result = []
1021 result = []
979 for item in tree:
1022 for item in tree:
980 item_sha = item.hex
1023 item_sha = item.hex
981 item_mode = item.filemode
1024 item_mode = item.filemode
982 item_type = item.type
1025 item_type = item.type
983
1026
984 if item_type == 'commit':
1027 if item_type == 'commit':
985 # NOTE(marcink): submodules we translate to 'link' for backward compat
1028 # NOTE(marcink): submodules we translate to 'link' for backward compat
986 item_type = 'link'
1029 item_type = 'link'
987
1030
988 result.append((item.name, item_mode, item_sha, item_type))
1031 result.append((item.name, item_mode, item_sha, item_type))
989 return result
1032 return result
990 return _tree_items(repo_id, tree_id)
1033 return _tree_items(repo_id, tree_id)
991
1034
992 @reraise_safe_exceptions
1035 @reraise_safe_exceptions
993 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1036 def diff_2(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
994 """
1037 """
995 Old version that uses subprocess to call diff
1038 Old version that uses subprocess to call diff
996 """
1039 """
997
1040
998 flags = [
1041 flags = [
999 '-U%s' % context, '--patch',
1042 '-U%s' % context, '--patch',
1000 '--binary',
1043 '--binary',
1001 '--find-renames',
1044 '--find-renames',
1002 '--no-indent-heuristic',
1045 '--no-indent-heuristic',
1003 # '--indent-heuristic',
1046 # '--indent-heuristic',
1004 #'--full-index',
1047 #'--full-index',
1005 #'--abbrev=40'
1048 #'--abbrev=40'
1006 ]
1049 ]
1007
1050
1008 if opt_ignorews:
1051 if opt_ignorews:
1009 flags.append('--ignore-all-space')
1052 flags.append('--ignore-all-space')
1010
1053
1011 if commit_id_1 == self.EMPTY_COMMIT:
1054 if commit_id_1 == self.EMPTY_COMMIT:
1012 cmd = ['show'] + flags + [commit_id_2]
1055 cmd = ['show'] + flags + [commit_id_2]
1013 else:
1056 else:
1014 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1057 cmd = ['diff'] + flags + [commit_id_1, commit_id_2]
1015
1058
1016 if file_filter:
1059 if file_filter:
1017 cmd.extend(['--', file_filter])
1060 cmd.extend(['--', file_filter])
1018
1061
1019 diff, __ = self.run_git_command(wire, cmd)
1062 diff, __ = self.run_git_command(wire, cmd)
1020 # If we used 'show' command, strip first few lines (until actual diff
1063 # If we used 'show' command, strip first few lines (until actual diff
1021 # starts)
1064 # starts)
1022 if commit_id_1 == self.EMPTY_COMMIT:
1065 if commit_id_1 == self.EMPTY_COMMIT:
1023 lines = diff.splitlines()
1066 lines = diff.splitlines()
1024 x = 0
1067 x = 0
1025 for line in lines:
1068 for line in lines:
1026 if line.startswith('diff'):
1069 if line.startswith('diff'):
1027 break
1070 break
1028 x += 1
1071 x += 1
1029 # Append new line just like 'diff' command do
1072 # Append new line just like 'diff' command do
1030 diff = '\n'.join(lines[x:]) + '\n'
1073 diff = '\n'.join(lines[x:]) + '\n'
1031 return diff
1074 return diff
1032
1075
1033 @reraise_safe_exceptions
1076 @reraise_safe_exceptions
1034 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1077 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_ignorews, context):
1035 repo_init = self._factory.repo_libgit2(wire)
1078 repo_init = self._factory.repo_libgit2(wire)
1036 with repo_init as repo:
1079 with repo_init as repo:
1037 swap = True
1080 swap = True
1038 flags = 0
1081 flags = 0
1039 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1082 flags |= pygit2.GIT_DIFF_SHOW_BINARY
1040
1083
1041 if opt_ignorews:
1084 if opt_ignorews:
1042 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1085 flags |= pygit2.GIT_DIFF_IGNORE_WHITESPACE
1043
1086
1044 if commit_id_1 == self.EMPTY_COMMIT:
1087 if commit_id_1 == self.EMPTY_COMMIT:
1045 comm1 = repo[commit_id_2]
1088 comm1 = repo[commit_id_2]
1046 diff_obj = comm1.tree.diff_to_tree(
1089 diff_obj = comm1.tree.diff_to_tree(
1047 flags=flags, context_lines=context, swap=swap)
1090 flags=flags, context_lines=context, swap=swap)
1048
1091
1049 else:
1092 else:
1050 comm1 = repo[commit_id_2]
1093 comm1 = repo[commit_id_2]
1051 comm2 = repo[commit_id_1]
1094 comm2 = repo[commit_id_1]
1052 diff_obj = comm1.tree.diff_to_tree(
1095 diff_obj = comm1.tree.diff_to_tree(
1053 comm2.tree, flags=flags, context_lines=context, swap=swap)
1096 comm2.tree, flags=flags, context_lines=context, swap=swap)
1054 similar_flags = 0
1097 similar_flags = 0
1055 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1098 similar_flags |= pygit2.GIT_DIFF_FIND_RENAMES
1056 diff_obj.find_similar(flags=similar_flags)
1099 diff_obj.find_similar(flags=similar_flags)
1057
1100
1058 if file_filter:
1101 if file_filter:
1059 for p in diff_obj:
1102 for p in diff_obj:
1060 if p.delta.old_file.path == file_filter:
1103 if p.delta.old_file.path == file_filter:
1061 return p.patch or ''
1104 return p.patch or ''
1062 # fo matching path == no diff
1105 # fo matching path == no diff
1063 return ''
1106 return ''
1064 return diff_obj.patch or ''
1107 return diff_obj.patch or ''
1065
1108
1066 @reraise_safe_exceptions
1109 @reraise_safe_exceptions
1067 def node_history(self, wire, commit_id, path, limit):
1110 def node_history(self, wire, commit_id, path, limit):
1068 cache_on, context_uid, repo_id = self._cache_on(wire)
1111 cache_on, context_uid, repo_id = self._cache_on(wire)
1069 @self.region.conditional_cache_on_arguments(condition=cache_on)
1112 region = self._region(wire)
1113 @region.conditional_cache_on_arguments(condition=cache_on)
1070 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1114 def _node_history(_context_uid, _repo_id, _commit_id, _path, _limit):
1071 # optimize for n==1, rev-list is much faster for that use-case
1115 # optimize for n==1, rev-list is much faster for that use-case
1072 if limit == 1:
1116 if limit == 1:
1073 cmd = ['rev-list', '-1', commit_id, '--', path]
1117 cmd = ['rev-list', '-1', commit_id, '--', path]
1074 else:
1118 else:
1075 cmd = ['log']
1119 cmd = ['log']
1076 if limit:
1120 if limit:
1077 cmd.extend(['-n', str(safe_int(limit, 0))])
1121 cmd.extend(['-n', str(safe_int(limit, 0))])
1078 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1122 cmd.extend(['--pretty=format: %H', '-s', commit_id, '--', path])
1079
1123
1080 output, __ = self.run_git_command(wire, cmd)
1124 output, __ = self.run_git_command(wire, cmd)
1081 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1125 commit_ids = re.findall(r'[0-9a-fA-F]{40}', output)
1082
1126
1083 return [x for x in commit_ids]
1127 return [x for x in commit_ids]
1084 return _node_history(context_uid, repo_id, commit_id, path, limit)
1128 return _node_history(context_uid, repo_id, commit_id, path, limit)
1085
1129
1086 @reraise_safe_exceptions
1130 @reraise_safe_exceptions
1087 def node_annotate(self, wire, commit_id, path):
1131 def node_annotate(self, wire, commit_id, path):
1088
1132
1089 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1133 cmd = ['blame', '-l', '--root', '-r', commit_id, '--', path]
1090 # -l ==> outputs long shas (and we need all 40 characters)
1134 # -l ==> outputs long shas (and we need all 40 characters)
1091 # --root ==> doesn't put '^' character for boundaries
1135 # --root ==> doesn't put '^' character for boundaries
1092 # -r commit_id ==> blames for the given commit
1136 # -r commit_id ==> blames for the given commit
1093 output, __ = self.run_git_command(wire, cmd)
1137 output, __ = self.run_git_command(wire, cmd)
1094
1138
1095 result = []
1139 result = []
1096 for i, blame_line in enumerate(output.split('\n')[:-1]):
1140 for i, blame_line in enumerate(output.split('\n')[:-1]):
1097 line_no = i + 1
1141 line_no = i + 1
1098 commit_id, line = re.split(r' ', blame_line, 1)
1142 commit_id, line = re.split(r' ', blame_line, 1)
1099 result.append((line_no, commit_id, line))
1143 result.append((line_no, commit_id, line))
1100 return result
1144 return result
1101
1145
1102 @reraise_safe_exceptions
1146 @reraise_safe_exceptions
1103 def update_server_info(self, wire):
1147 def update_server_info(self, wire):
1104 repo = self._factory.repo(wire)
1148 repo = self._factory.repo(wire)
1105 update_server_info(repo)
1149 update_server_info(repo)
1106
1150
1107 @reraise_safe_exceptions
1151 @reraise_safe_exceptions
1108 def get_all_commit_ids(self, wire):
1152 def get_all_commit_ids(self, wire):
1109
1153
1110 cache_on, context_uid, repo_id = self._cache_on(wire)
1154 cache_on, context_uid, repo_id = self._cache_on(wire)
1111 @self.region.conditional_cache_on_arguments(condition=cache_on)
1155 region = self._region(wire)
1156 @region.conditional_cache_on_arguments(condition=cache_on)
1112 def _get_all_commit_ids(_context_uid, _repo_id):
1157 def _get_all_commit_ids(_context_uid, _repo_id):
1113
1158
1114 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1159 cmd = ['rev-list', '--reverse', '--date-order', '--branches', '--tags']
1115 try:
1160 try:
1116 output, __ = self.run_git_command(wire, cmd)
1161 output, __ = self.run_git_command(wire, cmd)
1117 return output.splitlines()
1162 return output.splitlines()
1118 except Exception:
1163 except Exception:
1119 # Can be raised for empty repositories
1164 # Can be raised for empty repositories
1120 return []
1165 return []
1121 return _get_all_commit_ids(context_uid, repo_id)
1166 return _get_all_commit_ids(context_uid, repo_id)
1122
1167
1123 @reraise_safe_exceptions
1168 @reraise_safe_exceptions
1124 def run_git_command(self, wire, cmd, **opts):
1169 def run_git_command(self, wire, cmd, **opts):
1125 path = wire.get('path', None)
1170 path = wire.get('path', None)
1126
1171
1127 if path and os.path.isdir(path):
1172 if path and os.path.isdir(path):
1128 opts['cwd'] = path
1173 opts['cwd'] = path
1129
1174
1130 if '_bare' in opts:
1175 if '_bare' in opts:
1131 _copts = []
1176 _copts = []
1132 del opts['_bare']
1177 del opts['_bare']
1133 else:
1178 else:
1134 _copts = ['-c', 'core.quotepath=false', ]
1179 _copts = ['-c', 'core.quotepath=false', ]
1135 safe_call = False
1180 safe_call = False
1136 if '_safe' in opts:
1181 if '_safe' in opts:
1137 # no exc on failure
1182 # no exc on failure
1138 del opts['_safe']
1183 del opts['_safe']
1139 safe_call = True
1184 safe_call = True
1140
1185
1141 if '_copts' in opts:
1186 if '_copts' in opts:
1142 _copts.extend(opts['_copts'] or [])
1187 _copts.extend(opts['_copts'] or [])
1143 del opts['_copts']
1188 del opts['_copts']
1144
1189
1145 gitenv = os.environ.copy()
1190 gitenv = os.environ.copy()
1146 gitenv.update(opts.pop('extra_env', {}))
1191 gitenv.update(opts.pop('extra_env', {}))
1147 # need to clean fix GIT_DIR !
1192 # need to clean fix GIT_DIR !
1148 if 'GIT_DIR' in gitenv:
1193 if 'GIT_DIR' in gitenv:
1149 del gitenv['GIT_DIR']
1194 del gitenv['GIT_DIR']
1150 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1195 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
1151 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1196 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
1152
1197
1153 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1198 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
1154 _opts = {'env': gitenv, 'shell': False}
1199 _opts = {'env': gitenv, 'shell': False}
1155
1200
1156 proc = None
1201 proc = None
1157 try:
1202 try:
1158 _opts.update(opts)
1203 _opts.update(opts)
1159 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1204 proc = subprocessio.SubprocessIOChunker(cmd, **_opts)
1160
1205
1161 return ''.join(proc), ''.join(proc.error)
1206 return ''.join(proc), ''.join(proc.error)
1162 except (EnvironmentError, OSError) as err:
1207 except (EnvironmentError, OSError) as err:
1163 cmd = ' '.join(cmd) # human friendly CMD
1208 cmd = ' '.join(cmd) # human friendly CMD
1164 tb_err = ("Couldn't run git command (%s).\n"
1209 tb_err = ("Couldn't run git command (%s).\n"
1165 "Original error was:%s\n"
1210 "Original error was:%s\n"
1166 "Call options:%s\n"
1211 "Call options:%s\n"
1167 % (cmd, err, _opts))
1212 % (cmd, err, _opts))
1168 log.exception(tb_err)
1213 log.exception(tb_err)
1169 if safe_call:
1214 if safe_call:
1170 return '', err
1215 return '', err
1171 else:
1216 else:
1172 raise exceptions.VcsException()(tb_err)
1217 raise exceptions.VcsException()(tb_err)
1173 finally:
1218 finally:
1174 if proc:
1219 if proc:
1175 proc.close()
1220 proc.close()
1176
1221
1177 @reraise_safe_exceptions
1222 @reraise_safe_exceptions
1178 def install_hooks(self, wire, force=False):
1223 def install_hooks(self, wire, force=False):
1179 from vcsserver.hook_utils import install_git_hooks
1224 from vcsserver.hook_utils import install_git_hooks
1180 bare = self.bare(wire)
1225 bare = self.bare(wire)
1181 path = wire['path']
1226 path = wire['path']
1182 return install_git_hooks(path, bare, force_create=force)
1227 return install_git_hooks(path, bare, force_create=force)
1183
1228
1184 @reraise_safe_exceptions
1229 @reraise_safe_exceptions
1185 def get_hooks_info(self, wire):
1230 def get_hooks_info(self, wire):
1186 from vcsserver.hook_utils import (
1231 from vcsserver.hook_utils import (
1187 get_git_pre_hook_version, get_git_post_hook_version)
1232 get_git_pre_hook_version, get_git_post_hook_version)
1188 bare = self.bare(wire)
1233 bare = self.bare(wire)
1189 path = wire['path']
1234 path = wire['path']
1190 return {
1235 return {
1191 'pre_version': get_git_pre_hook_version(path, bare),
1236 'pre_version': get_git_pre_hook_version(path, bare),
1192 'post_version': get_git_post_hook_version(path, bare),
1237 'post_version': get_git_post_hook_version(path, bare),
1193 }
1238 }
1194
1239
1195 @reraise_safe_exceptions
1240 @reraise_safe_exceptions
1241 def set_head_ref(self, wire, head_name):
1242 log.debug('Setting refs/head to `%s`', head_name)
1243 cmd = ['symbolic-ref', 'HEAD', 'refs/heads/%s' % head_name]
1244 output, __ = self.run_git_command(wire, cmd)
1245 return [head_name] + output.splitlines()
1246
1247 @reraise_safe_exceptions
1196 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1248 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1197 archive_dir_name, commit_id):
1249 archive_dir_name, commit_id):
1198
1250
1199 def file_walker(_commit_id, path):
1251 def file_walker(_commit_id, path):
1200 repo_init = self._factory.repo_libgit2(wire)
1252 repo_init = self._factory.repo_libgit2(wire)
1201
1253
1202 with repo_init as repo:
1254 with repo_init as repo:
1203 commit = repo[commit_id]
1255 commit = repo[commit_id]
1204
1256
1205 if path in ['', '/']:
1257 if path in ['', '/']:
1206 tree = commit.tree
1258 tree = commit.tree
1207 else:
1259 else:
1208 tree = commit.tree[path.rstrip('/')]
1260 tree = commit.tree[path.rstrip('/')]
1209 tree_id = tree.id.hex
1261 tree_id = tree.id.hex
1210 try:
1262 try:
1211 tree = repo[tree_id]
1263 tree = repo[tree_id]
1212 except KeyError:
1264 except KeyError:
1213 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1265 raise ObjectMissing('No tree with id: {}'.format(tree_id))
1214
1266
1215 index = LibGit2Index.Index()
1267 index = LibGit2Index.Index()
1216 index.read_tree(tree)
1268 index.read_tree(tree)
1217 file_iter = index
1269 file_iter = index
1218
1270
1219 for fn in file_iter:
1271 for fn in file_iter:
1220 file_path = fn.path
1272 file_path = fn.path
1221 mode = fn.mode
1273 mode = fn.mode
1222 is_link = stat.S_ISLNK(mode)
1274 is_link = stat.S_ISLNK(mode)
1223 yield ArchiveNode(file_path, mode, is_link, repo[fn.id].read_raw)
1275 if mode == pygit2.GIT_FILEMODE_COMMIT:
1276 log.debug('Skipping path %s as a commit node', file_path)
1277 continue
1278 yield ArchiveNode(file_path, mode, is_link, repo[fn.hex].read_raw)
1224
1279
1225 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1280 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1226 archive_dir_name, commit_id)
1281 archive_dir_name, commit_id)
@@ -1,1022 +1,1047 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import functools
17 import functools
18 import io
18 import io
19 import logging
19 import logging
20 import os
20 import os
21 import stat
21 import stat
22 import urllib
22 import urllib
23 import urllib2
23 import urllib2
24 import traceback
24 import traceback
25
25
26 from hgext import largefiles, rebase, purge
26 from hgext import largefiles, rebase, purge
27 from hgext.strip import strip as hgext_strip
27 from hgext.strip import strip as hgext_strip
28 from mercurial import commands
28 from mercurial import commands
29 from mercurial import unionrepo
29 from mercurial import unionrepo
30 from mercurial import verify
30 from mercurial import verify
31 from mercurial import repair
31 from mercurial import repair
32
32
33 import vcsserver
33 import vcsserver
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
35 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original, archive_repo, ArchiveNode
36 from vcsserver.hgcompat import (
36 from vcsserver.hgcompat import (
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
37 archival, bin, clone, config as hgconfig, diffopts, hex, get_ctx,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
38 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
39 makepeer, instance, match, memctx, exchange, memfilectx, nullrev, hg_merge,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
40 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
41 RepoLookupError, InterventionRequired, RequirementError,
41 RepoLookupError, InterventionRequired, RequirementError,
42 alwaysmatcher, patternmatcher, hgutil)
42 alwaysmatcher, patternmatcher, hgutil)
43 from vcsserver.vcs_base import RemoteBase
43 from vcsserver.vcs_base import RemoteBase
44
44
45 log = logging.getLogger(__name__)
45 log = logging.getLogger(__name__)
46
46
47
47
48 def make_ui_from_config(repo_config):
48 def make_ui_from_config(repo_config):
49
49
50 class LoggingUI(ui.ui):
50 class LoggingUI(ui.ui):
51 def status(self, *msg, **opts):
51 def status(self, *msg, **opts):
52 log.info(' '.join(msg).rstrip('\n'))
52 log.info(' '.join(msg).rstrip('\n'))
53 super(LoggingUI, self).status(*msg, **opts)
53 super(LoggingUI, self).status(*msg, **opts)
54
54
55 def warn(self, *msg, **opts):
55 def warn(self, *msg, **opts):
56 log.warn(' '.join(msg).rstrip('\n'))
56 log.warn(' '.join(msg).rstrip('\n'))
57 super(LoggingUI, self).warn(*msg, **opts)
57 super(LoggingUI, self).warn(*msg, **opts)
58
58
59 def error(self, *msg, **opts):
59 def error(self, *msg, **opts):
60 log.error(' '.join(msg).rstrip('\n'))
60 log.error(' '.join(msg).rstrip('\n'))
61 super(LoggingUI, self).error(*msg, **opts)
61 super(LoggingUI, self).error(*msg, **opts)
62
62
63 def note(self, *msg, **opts):
63 def note(self, *msg, **opts):
64 log.info(' '.join(msg).rstrip('\n'))
64 log.info(' '.join(msg).rstrip('\n'))
65 super(LoggingUI, self).note(*msg, **opts)
65 super(LoggingUI, self).note(*msg, **opts)
66
66
67 def debug(self, *msg, **opts):
67 def debug(self, *msg, **opts):
68 log.debug(' '.join(msg).rstrip('\n'))
68 log.debug(' '.join(msg).rstrip('\n'))
69 super(LoggingUI, self).debug(*msg, **opts)
69 super(LoggingUI, self).debug(*msg, **opts)
70
70
71 baseui = LoggingUI()
71 baseui = LoggingUI()
72
72
73 # clean the baseui object
73 # clean the baseui object
74 baseui._ocfg = hgconfig.config()
74 baseui._ocfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
75 baseui._ucfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
76 baseui._tcfg = hgconfig.config()
77
77
78 for section, option, value in repo_config:
78 for section, option, value in repo_config:
79 baseui.setconfig(section, option, value)
79 baseui.setconfig(section, option, value)
80
80
81 # make our hgweb quiet so it doesn't print output
81 # make our hgweb quiet so it doesn't print output
82 baseui.setconfig('ui', 'quiet', 'true')
82 baseui.setconfig('ui', 'quiet', 'true')
83
83
84 baseui.setconfig('ui', 'paginate', 'never')
84 baseui.setconfig('ui', 'paginate', 'never')
85 # for better Error reporting of Mercurial
85 # for better Error reporting of Mercurial
86 baseui.setconfig('ui', 'message-output', 'stderr')
86 baseui.setconfig('ui', 'message-output', 'stderr')
87
87
88 # force mercurial to only use 1 thread, otherwise it may try to set a
88 # force mercurial to only use 1 thread, otherwise it may try to set a
89 # signal in a non-main thread, thus generating a ValueError.
89 # signal in a non-main thread, thus generating a ValueError.
90 baseui.setconfig('worker', 'numcpus', 1)
90 baseui.setconfig('worker', 'numcpus', 1)
91
91
92 # If there is no config for the largefiles extension, we explicitly disable
92 # If there is no config for the largefiles extension, we explicitly disable
93 # it here. This overrides settings from repositories hgrc file. Recent
93 # it here. This overrides settings from repositories hgrc file. Recent
94 # mercurial versions enable largefiles in hgrc on clone from largefile
94 # mercurial versions enable largefiles in hgrc on clone from largefile
95 # repo.
95 # repo.
96 if not baseui.hasconfig('extensions', 'largefiles'):
96 if not baseui.hasconfig('extensions', 'largefiles'):
97 log.debug('Explicitly disable largefiles extension for repo.')
97 log.debug('Explicitly disable largefiles extension for repo.')
98 baseui.setconfig('extensions', 'largefiles', '!')
98 baseui.setconfig('extensions', 'largefiles', '!')
99
99
100 return baseui
100 return baseui
101
101
102
102
103 def reraise_safe_exceptions(func):
103 def reraise_safe_exceptions(func):
104 """Decorator for converting mercurial exceptions to something neutral."""
104 """Decorator for converting mercurial exceptions to something neutral."""
105
105
106 def wrapper(*args, **kwargs):
106 def wrapper(*args, **kwargs):
107 try:
107 try:
108 return func(*args, **kwargs)
108 return func(*args, **kwargs)
109 except (Abort, InterventionRequired) as e:
109 except (Abort, InterventionRequired) as e:
110 raise_from_original(exceptions.AbortException(e))
110 raise_from_original(exceptions.AbortException(e))
111 except RepoLookupError as e:
111 except RepoLookupError as e:
112 raise_from_original(exceptions.LookupException(e))
112 raise_from_original(exceptions.LookupException(e))
113 except RequirementError as e:
113 except RequirementError as e:
114 raise_from_original(exceptions.RequirementException(e))
114 raise_from_original(exceptions.RequirementException(e))
115 except RepoError as e:
115 except RepoError as e:
116 raise_from_original(exceptions.VcsException(e))
116 raise_from_original(exceptions.VcsException(e))
117 except LookupError as e:
117 except LookupError as e:
118 raise_from_original(exceptions.LookupException(e))
118 raise_from_original(exceptions.LookupException(e))
119 except Exception as e:
119 except Exception as e:
120 if not hasattr(e, '_vcs_kind'):
120 if not hasattr(e, '_vcs_kind'):
121 log.exception("Unhandled exception in hg remote call")
121 log.exception("Unhandled exception in hg remote call")
122 raise_from_original(exceptions.UnhandledException(e))
122 raise_from_original(exceptions.UnhandledException(e))
123
123
124 raise
124 raise
125 return wrapper
125 return wrapper
126
126
127
127
128 class MercurialFactory(RepoFactory):
128 class MercurialFactory(RepoFactory):
129 repo_type = 'hg'
129 repo_type = 'hg'
130
130
131 def _create_config(self, config, hooks=True):
131 def _create_config(self, config, hooks=True):
132 if not hooks:
132 if not hooks:
133 hooks_to_clean = frozenset((
133 hooks_to_clean = frozenset((
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
134 'changegroup.repo_size', 'preoutgoing.pre_pull',
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
135 'outgoing.pull_logger', 'prechangegroup.pre_push'))
136 new_config = []
136 new_config = []
137 for section, option, value in config:
137 for section, option, value in config:
138 if section == 'hooks' and option in hooks_to_clean:
138 if section == 'hooks' and option in hooks_to_clean:
139 continue
139 continue
140 new_config.append((section, option, value))
140 new_config.append((section, option, value))
141 config = new_config
141 config = new_config
142
142
143 baseui = make_ui_from_config(config)
143 baseui = make_ui_from_config(config)
144 return baseui
144 return baseui
145
145
146 def _create_repo(self, wire, create):
146 def _create_repo(self, wire, create):
147 baseui = self._create_config(wire["config"])
147 baseui = self._create_config(wire["config"])
148 return instance(baseui, wire["path"], create)
148 return instance(baseui, wire["path"], create)
149
149
150 def repo(self, wire, create=False):
150 def repo(self, wire, create=False):
151 """
151 """
152 Get a repository instance for the given path.
152 Get a repository instance for the given path.
153 """
153 """
154 return self._create_repo(wire, create)
154 return self._create_repo(wire, create)
155
155
156
156
157 def patch_ui_message_output(baseui):
157 def patch_ui_message_output(baseui):
158 baseui.setconfig('ui', 'quiet', 'false')
158 baseui.setconfig('ui', 'quiet', 'false')
159 output = io.BytesIO()
159 output = io.BytesIO()
160
160
161 def write(data, **unused_kwargs):
161 def write(data, **unused_kwargs):
162 output.write(data)
162 output.write(data)
163
163
164 baseui.status = write
164 baseui.status = write
165 baseui.write = write
165 baseui.write = write
166 baseui.warn = write
166 baseui.warn = write
167 baseui.debug = write
167 baseui.debug = write
168
168
169 return baseui, output
169 return baseui, output
170
170
171
171
172 class HgRemote(RemoteBase):
172 class HgRemote(RemoteBase):
173
173
174 def __init__(self, factory):
174 def __init__(self, factory):
175 self._factory = factory
175 self._factory = factory
176 self._bulk_methods = {
176 self._bulk_methods = {
177 "affected_files": self.ctx_files,
177 "affected_files": self.ctx_files,
178 "author": self.ctx_user,
178 "author": self.ctx_user,
179 "branch": self.ctx_branch,
179 "branch": self.ctx_branch,
180 "children": self.ctx_children,
180 "children": self.ctx_children,
181 "date": self.ctx_date,
181 "date": self.ctx_date,
182 "message": self.ctx_description,
182 "message": self.ctx_description,
183 "parents": self.ctx_parents,
183 "parents": self.ctx_parents,
184 "status": self.ctx_status,
184 "status": self.ctx_status,
185 "obsolete": self.ctx_obsolete,
185 "obsolete": self.ctx_obsolete,
186 "phase": self.ctx_phase,
186 "phase": self.ctx_phase,
187 "hidden": self.ctx_hidden,
187 "hidden": self.ctx_hidden,
188 "_file_paths": self.ctx_list,
188 "_file_paths": self.ctx_list,
189 }
189 }
190
190
191 def _get_ctx(self, repo, ref):
191 def _get_ctx(self, repo, ref):
192 return get_ctx(repo, ref)
192 return get_ctx(repo, ref)
193
193
194 @reraise_safe_exceptions
194 @reraise_safe_exceptions
195 def discover_hg_version(self):
195 def discover_hg_version(self):
196 from mercurial import util
196 from mercurial import util
197 return util.version()
197 return util.version()
198
198
199 @reraise_safe_exceptions
199 @reraise_safe_exceptions
200 def is_empty(self, wire):
200 def is_empty(self, wire):
201 repo = self._factory.repo(wire)
201 repo = self._factory.repo(wire)
202
202
203 try:
203 try:
204 return len(repo) == 0
204 return len(repo) == 0
205 except Exception:
205 except Exception:
206 log.exception("failed to read object_store")
206 log.exception("failed to read object_store")
207 return False
207 return False
208
208
209 @reraise_safe_exceptions
209 @reraise_safe_exceptions
210 def bookmarks(self, wire):
210 def bookmarks(self, wire):
211 cache_on, context_uid, repo_id = self._cache_on(wire)
211 cache_on, context_uid, repo_id = self._cache_on(wire)
212 @self.region.conditional_cache_on_arguments(condition=cache_on)
212 region = self._region(wire)
213 @region.conditional_cache_on_arguments(condition=cache_on)
213 def _bookmarks(_context_uid, _repo_id):
214 def _bookmarks(_context_uid, _repo_id):
214 repo = self._factory.repo(wire)
215 repo = self._factory.repo(wire)
215 return dict(repo._bookmarks)
216 return dict(repo._bookmarks)
216
217
217 return _bookmarks(context_uid, repo_id)
218 return _bookmarks(context_uid, repo_id)
218
219
219 @reraise_safe_exceptions
220 @reraise_safe_exceptions
220 def branches(self, wire, normal, closed):
221 def branches(self, wire, normal, closed):
221 cache_on, context_uid, repo_id = self._cache_on(wire)
222 cache_on, context_uid, repo_id = self._cache_on(wire)
222 @self.region.conditional_cache_on_arguments(condition=cache_on)
223 region = self._region(wire)
224 @region.conditional_cache_on_arguments(condition=cache_on)
223 def _branches(_context_uid, _repo_id, _normal, _closed):
225 def _branches(_context_uid, _repo_id, _normal, _closed):
224 repo = self._factory.repo(wire)
226 repo = self._factory.repo(wire)
225 iter_branches = repo.branchmap().iterbranches()
227 iter_branches = repo.branchmap().iterbranches()
226 bt = {}
228 bt = {}
227 for branch_name, _heads, tip, is_closed in iter_branches:
229 for branch_name, _heads, tip, is_closed in iter_branches:
228 if normal and not is_closed:
230 if normal and not is_closed:
229 bt[branch_name] = tip
231 bt[branch_name] = tip
230 if closed and is_closed:
232 if closed and is_closed:
231 bt[branch_name] = tip
233 bt[branch_name] = tip
232
234
233 return bt
235 return bt
234
236
235 return _branches(context_uid, repo_id, normal, closed)
237 return _branches(context_uid, repo_id, normal, closed)
236
238
237 @reraise_safe_exceptions
239 @reraise_safe_exceptions
238 def bulk_request(self, wire, commit_id, pre_load):
240 def bulk_request(self, wire, commit_id, pre_load):
239 cache_on, context_uid, repo_id = self._cache_on(wire)
241 cache_on, context_uid, repo_id = self._cache_on(wire)
240 @self.region.conditional_cache_on_arguments(condition=cache_on)
242 region = self._region(wire)
243 @region.conditional_cache_on_arguments(condition=cache_on)
241 def _bulk_request(_repo_id, _commit_id, _pre_load):
244 def _bulk_request(_repo_id, _commit_id, _pre_load):
242 result = {}
245 result = {}
243 for attr in pre_load:
246 for attr in pre_load:
244 try:
247 try:
245 method = self._bulk_methods[attr]
248 method = self._bulk_methods[attr]
246 result[attr] = method(wire, commit_id)
249 result[attr] = method(wire, commit_id)
247 except KeyError as e:
250 except KeyError as e:
248 raise exceptions.VcsException(e)(
251 raise exceptions.VcsException(e)(
249 'Unknown bulk attribute: "%s"' % attr)
252 'Unknown bulk attribute: "%s"' % attr)
250 return result
253 return result
251
254
252 return _bulk_request(repo_id, commit_id, sorted(pre_load))
255 return _bulk_request(repo_id, commit_id, sorted(pre_load))
253
256
254 @reraise_safe_exceptions
257 @reraise_safe_exceptions
255 def ctx_branch(self, wire, commit_id):
258 def ctx_branch(self, wire, commit_id):
256 cache_on, context_uid, repo_id = self._cache_on(wire)
259 cache_on, context_uid, repo_id = self._cache_on(wire)
257 @self.region.conditional_cache_on_arguments(condition=cache_on)
260 region = self._region(wire)
261 @region.conditional_cache_on_arguments(condition=cache_on)
258 def _ctx_branch(_repo_id, _commit_id):
262 def _ctx_branch(_repo_id, _commit_id):
259 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
260 ctx = self._get_ctx(repo, commit_id)
264 ctx = self._get_ctx(repo, commit_id)
261 return ctx.branch()
265 return ctx.branch()
262 return _ctx_branch(repo_id, commit_id)
266 return _ctx_branch(repo_id, commit_id)
263
267
264 @reraise_safe_exceptions
268 @reraise_safe_exceptions
265 def ctx_date(self, wire, commit_id):
269 def ctx_date(self, wire, commit_id):
266 cache_on, context_uid, repo_id = self._cache_on(wire)
270 cache_on, context_uid, repo_id = self._cache_on(wire)
267 @self.region.conditional_cache_on_arguments(condition=cache_on)
271 region = self._region(wire)
272 @region.conditional_cache_on_arguments(condition=cache_on)
268 def _ctx_date(_repo_id, _commit_id):
273 def _ctx_date(_repo_id, _commit_id):
269 repo = self._factory.repo(wire)
274 repo = self._factory.repo(wire)
270 ctx = self._get_ctx(repo, commit_id)
275 ctx = self._get_ctx(repo, commit_id)
271 return ctx.date()
276 return ctx.date()
272 return _ctx_date(repo_id, commit_id)
277 return _ctx_date(repo_id, commit_id)
273
278
274 @reraise_safe_exceptions
279 @reraise_safe_exceptions
275 def ctx_description(self, wire, revision):
280 def ctx_description(self, wire, revision):
276 repo = self._factory.repo(wire)
281 repo = self._factory.repo(wire)
277 ctx = self._get_ctx(repo, revision)
282 ctx = self._get_ctx(repo, revision)
278 return ctx.description()
283 return ctx.description()
279
284
280 @reraise_safe_exceptions
285 @reraise_safe_exceptions
281 def ctx_files(self, wire, commit_id):
286 def ctx_files(self, wire, commit_id):
282 cache_on, context_uid, repo_id = self._cache_on(wire)
287 cache_on, context_uid, repo_id = self._cache_on(wire)
283 @self.region.conditional_cache_on_arguments(condition=cache_on)
288 region = self._region(wire)
289 @region.conditional_cache_on_arguments(condition=cache_on)
284 def _ctx_files(_repo_id, _commit_id):
290 def _ctx_files(_repo_id, _commit_id):
285 repo = self._factory.repo(wire)
291 repo = self._factory.repo(wire)
286 ctx = self._get_ctx(repo, commit_id)
292 ctx = self._get_ctx(repo, commit_id)
287 return ctx.files()
293 return ctx.files()
288
294
289 return _ctx_files(repo_id, commit_id)
295 return _ctx_files(repo_id, commit_id)
290
296
291 @reraise_safe_exceptions
297 @reraise_safe_exceptions
292 def ctx_list(self, path, revision):
298 def ctx_list(self, path, revision):
293 repo = self._factory.repo(path)
299 repo = self._factory.repo(path)
294 ctx = self._get_ctx(repo, revision)
300 ctx = self._get_ctx(repo, revision)
295 return list(ctx)
301 return list(ctx)
296
302
297 @reraise_safe_exceptions
303 @reraise_safe_exceptions
298 def ctx_parents(self, wire, commit_id):
304 def ctx_parents(self, wire, commit_id):
299 cache_on, context_uid, repo_id = self._cache_on(wire)
305 cache_on, context_uid, repo_id = self._cache_on(wire)
300 @self.region.conditional_cache_on_arguments(condition=cache_on)
306 region = self._region(wire)
307 @region.conditional_cache_on_arguments(condition=cache_on)
301 def _ctx_parents(_repo_id, _commit_id):
308 def _ctx_parents(_repo_id, _commit_id):
302 repo = self._factory.repo(wire)
309 repo = self._factory.repo(wire)
303 ctx = self._get_ctx(repo, commit_id)
310 ctx = self._get_ctx(repo, commit_id)
304 return [parent.hex() for parent in ctx.parents()
311 return [parent.hex() for parent in ctx.parents()
305 if not (parent.hidden() or parent.obsolete())]
312 if not (parent.hidden() or parent.obsolete())]
306
313
307 return _ctx_parents(repo_id, commit_id)
314 return _ctx_parents(repo_id, commit_id)
308
315
309 @reraise_safe_exceptions
316 @reraise_safe_exceptions
310 def ctx_children(self, wire, commit_id):
317 def ctx_children(self, wire, commit_id):
311 cache_on, context_uid, repo_id = self._cache_on(wire)
318 cache_on, context_uid, repo_id = self._cache_on(wire)
312 @self.region.conditional_cache_on_arguments(condition=cache_on)
319 region = self._region(wire)
320 @region.conditional_cache_on_arguments(condition=cache_on)
313 def _ctx_children(_repo_id, _commit_id):
321 def _ctx_children(_repo_id, _commit_id):
314 repo = self._factory.repo(wire)
322 repo = self._factory.repo(wire)
315 ctx = self._get_ctx(repo, commit_id)
323 ctx = self._get_ctx(repo, commit_id)
316 return [child.hex() for child in ctx.children()
324 return [child.hex() for child in ctx.children()
317 if not (child.hidden() or child.obsolete())]
325 if not (child.hidden() or child.obsolete())]
318
326
319 return _ctx_children(repo_id, commit_id)
327 return _ctx_children(repo_id, commit_id)
320
328
321 @reraise_safe_exceptions
329 @reraise_safe_exceptions
322 def ctx_phase(self, wire, commit_id):
330 def ctx_phase(self, wire, commit_id):
323 cache_on, context_uid, repo_id = self._cache_on(wire)
331 cache_on, context_uid, repo_id = self._cache_on(wire)
324 @self.region.conditional_cache_on_arguments(condition=cache_on)
332 region = self._region(wire)
333 @region.conditional_cache_on_arguments(condition=cache_on)
325 def _ctx_phase(_context_uid, _repo_id, _commit_id):
334 def _ctx_phase(_context_uid, _repo_id, _commit_id):
326 repo = self._factory.repo(wire)
335 repo = self._factory.repo(wire)
327 ctx = self._get_ctx(repo, commit_id)
336 ctx = self._get_ctx(repo, commit_id)
328 # public=0, draft=1, secret=3
337 # public=0, draft=1, secret=3
329 return ctx.phase()
338 return ctx.phase()
330 return _ctx_phase(context_uid, repo_id, commit_id)
339 return _ctx_phase(context_uid, repo_id, commit_id)
331
340
332 @reraise_safe_exceptions
341 @reraise_safe_exceptions
333 def ctx_obsolete(self, wire, commit_id):
342 def ctx_obsolete(self, wire, commit_id):
334 cache_on, context_uid, repo_id = self._cache_on(wire)
343 cache_on, context_uid, repo_id = self._cache_on(wire)
335 @self.region.conditional_cache_on_arguments(condition=cache_on)
344 region = self._region(wire)
345 @region.conditional_cache_on_arguments(condition=cache_on)
336 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
346 def _ctx_obsolete(_context_uid, _repo_id, _commit_id):
337 repo = self._factory.repo(wire)
347 repo = self._factory.repo(wire)
338 ctx = self._get_ctx(repo, commit_id)
348 ctx = self._get_ctx(repo, commit_id)
339 return ctx.obsolete()
349 return ctx.obsolete()
340 return _ctx_obsolete(context_uid, repo_id, commit_id)
350 return _ctx_obsolete(context_uid, repo_id, commit_id)
341
351
342 @reraise_safe_exceptions
352 @reraise_safe_exceptions
343 def ctx_hidden(self, wire, commit_id):
353 def ctx_hidden(self, wire, commit_id):
344 cache_on, context_uid, repo_id = self._cache_on(wire)
354 cache_on, context_uid, repo_id = self._cache_on(wire)
345 @self.region.conditional_cache_on_arguments(condition=cache_on)
355 region = self._region(wire)
356 @region.conditional_cache_on_arguments(condition=cache_on)
346 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
357 def _ctx_hidden(_context_uid, _repo_id, _commit_id):
347 repo = self._factory.repo(wire)
358 repo = self._factory.repo(wire)
348 ctx = self._get_ctx(repo, commit_id)
359 ctx = self._get_ctx(repo, commit_id)
349 return ctx.hidden()
360 return ctx.hidden()
350 return _ctx_hidden(context_uid, repo_id, commit_id)
361 return _ctx_hidden(context_uid, repo_id, commit_id)
351
362
352 @reraise_safe_exceptions
363 @reraise_safe_exceptions
353 def ctx_substate(self, wire, revision):
364 def ctx_substate(self, wire, revision):
354 repo = self._factory.repo(wire)
365 repo = self._factory.repo(wire)
355 ctx = self._get_ctx(repo, revision)
366 ctx = self._get_ctx(repo, revision)
356 return ctx.substate
367 return ctx.substate
357
368
358 @reraise_safe_exceptions
369 @reraise_safe_exceptions
359 def ctx_status(self, wire, revision):
370 def ctx_status(self, wire, revision):
360 repo = self._factory.repo(wire)
371 repo = self._factory.repo(wire)
361 ctx = self._get_ctx(repo, revision)
372 ctx = self._get_ctx(repo, revision)
362 status = repo[ctx.p1().node()].status(other=ctx.node())
373 status = repo[ctx.p1().node()].status(other=ctx.node())
363 # object of status (odd, custom named tuple in mercurial) is not
374 # object of status (odd, custom named tuple in mercurial) is not
364 # correctly serializable, we make it a list, as the underling
375 # correctly serializable, we make it a list, as the underling
365 # API expects this to be a list
376 # API expects this to be a list
366 return list(status)
377 return list(status)
367
378
368 @reraise_safe_exceptions
379 @reraise_safe_exceptions
369 def ctx_user(self, wire, revision):
380 def ctx_user(self, wire, revision):
370 repo = self._factory.repo(wire)
381 repo = self._factory.repo(wire)
371 ctx = self._get_ctx(repo, revision)
382 ctx = self._get_ctx(repo, revision)
372 return ctx.user()
383 return ctx.user()
373
384
374 @reraise_safe_exceptions
385 @reraise_safe_exceptions
375 def check_url(self, url, config):
386 def check_url(self, url, config):
376 _proto = None
387 _proto = None
377 if '+' in url[:url.find('://')]:
388 if '+' in url[:url.find('://')]:
378 _proto = url[0:url.find('+')]
389 _proto = url[0:url.find('+')]
379 url = url[url.find('+') + 1:]
390 url = url[url.find('+') + 1:]
380 handlers = []
391 handlers = []
381 url_obj = url_parser(url)
392 url_obj = url_parser(url)
382 test_uri, authinfo = url_obj.authinfo()
393 test_uri, authinfo = url_obj.authinfo()
383 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
394 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
384 url_obj.query = obfuscate_qs(url_obj.query)
395 url_obj.query = obfuscate_qs(url_obj.query)
385
396
386 cleaned_uri = str(url_obj)
397 cleaned_uri = str(url_obj)
387 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
398 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
388
399
389 if authinfo:
400 if authinfo:
390 # create a password manager
401 # create a password manager
391 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
402 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
392 passmgr.add_password(*authinfo)
403 passmgr.add_password(*authinfo)
393
404
394 handlers.extend((httpbasicauthhandler(passmgr),
405 handlers.extend((httpbasicauthhandler(passmgr),
395 httpdigestauthhandler(passmgr)))
406 httpdigestauthhandler(passmgr)))
396
407
397 o = urllib2.build_opener(*handlers)
408 o = urllib2.build_opener(*handlers)
398 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
409 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
399 ('Accept', 'application/mercurial-0.1')]
410 ('Accept', 'application/mercurial-0.1')]
400
411
401 q = {"cmd": 'between'}
412 q = {"cmd": 'between'}
402 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
413 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
403 qs = '?%s' % urllib.urlencode(q)
414 qs = '?%s' % urllib.urlencode(q)
404 cu = "%s%s" % (test_uri, qs)
415 cu = "%s%s" % (test_uri, qs)
405 req = urllib2.Request(cu, None, {})
416 req = urllib2.Request(cu, None, {})
406
417
407 try:
418 try:
408 log.debug("Trying to open URL %s", cleaned_uri)
419 log.debug("Trying to open URL %s", cleaned_uri)
409 resp = o.open(req)
420 resp = o.open(req)
410 if resp.code != 200:
421 if resp.code != 200:
411 raise exceptions.URLError()('Return Code is not 200')
422 raise exceptions.URLError()('Return Code is not 200')
412 except Exception as e:
423 except Exception as e:
413 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
424 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
414 # means it cannot be cloned
425 # means it cannot be cloned
415 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
426 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
416
427
417 # now check if it's a proper hg repo, but don't do it for svn
428 # now check if it's a proper hg repo, but don't do it for svn
418 try:
429 try:
419 if _proto == 'svn':
430 if _proto == 'svn':
420 pass
431 pass
421 else:
432 else:
422 # check for pure hg repos
433 # check for pure hg repos
423 log.debug(
434 log.debug(
424 "Verifying if URL is a Mercurial repository: %s",
435 "Verifying if URL is a Mercurial repository: %s",
425 cleaned_uri)
436 cleaned_uri)
426 ui = make_ui_from_config(config)
437 ui = make_ui_from_config(config)
427 peer_checker = makepeer(ui, url)
438 peer_checker = makepeer(ui, url)
428 peer_checker.lookup('tip')
439 peer_checker.lookup('tip')
429 except Exception as e:
440 except Exception as e:
430 log.warning("URL is not a valid Mercurial repository: %s",
441 log.warning("URL is not a valid Mercurial repository: %s",
431 cleaned_uri)
442 cleaned_uri)
432 raise exceptions.URLError(e)(
443 raise exceptions.URLError(e)(
433 "url [%s] does not look like an hg repo org_exc: %s"
444 "url [%s] does not look like an hg repo org_exc: %s"
434 % (cleaned_uri, e))
445 % (cleaned_uri, e))
435
446
436 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
447 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
437 return True
448 return True
438
449
439 @reraise_safe_exceptions
450 @reraise_safe_exceptions
440 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
451 def diff(self, wire, commit_id_1, commit_id_2, file_filter, opt_git, opt_ignorews, context):
441 repo = self._factory.repo(wire)
452 repo = self._factory.repo(wire)
442
453
443 if file_filter:
454 if file_filter:
444 match_filter = match(file_filter[0], '', [file_filter[1]])
455 match_filter = match(file_filter[0], '', [file_filter[1]])
445 else:
456 else:
446 match_filter = file_filter
457 match_filter = file_filter
447 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
458 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context, showfunc=1)
448
459
449 try:
460 try:
450 return "".join(patch.diff(
461 return "".join(patch.diff(
451 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
462 repo, node1=commit_id_1, node2=commit_id_2, match=match_filter, opts=opts))
452 except RepoLookupError as e:
463 except RepoLookupError as e:
453 raise exceptions.LookupException(e)()
464 raise exceptions.LookupException(e)()
454
465
455 @reraise_safe_exceptions
466 @reraise_safe_exceptions
456 def node_history(self, wire, revision, path, limit):
467 def node_history(self, wire, revision, path, limit):
457 cache_on, context_uid, repo_id = self._cache_on(wire)
468 cache_on, context_uid, repo_id = self._cache_on(wire)
458 @self.region.conditional_cache_on_arguments(condition=cache_on)
469 region = self._region(wire)
470 @region.conditional_cache_on_arguments(condition=cache_on)
459 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
471 def _node_history(_context_uid, _repo_id, _revision, _path, _limit):
460 repo = self._factory.repo(wire)
472 repo = self._factory.repo(wire)
461
473
462 ctx = self._get_ctx(repo, revision)
474 ctx = self._get_ctx(repo, revision)
463 fctx = ctx.filectx(path)
475 fctx = ctx.filectx(path)
464
476
465 def history_iter():
477 def history_iter():
466 limit_rev = fctx.rev()
478 limit_rev = fctx.rev()
467 for obj in reversed(list(fctx.filelog())):
479 for obj in reversed(list(fctx.filelog())):
468 obj = fctx.filectx(obj)
480 obj = fctx.filectx(obj)
469 ctx = obj.changectx()
481 ctx = obj.changectx()
470 if ctx.hidden() or ctx.obsolete():
482 if ctx.hidden() or ctx.obsolete():
471 continue
483 continue
472
484
473 if limit_rev >= obj.rev():
485 if limit_rev >= obj.rev():
474 yield obj
486 yield obj
475
487
476 history = []
488 history = []
477 for cnt, obj in enumerate(history_iter()):
489 for cnt, obj in enumerate(history_iter()):
478 if limit and cnt >= limit:
490 if limit and cnt >= limit:
479 break
491 break
480 history.append(hex(obj.node()))
492 history.append(hex(obj.node()))
481
493
482 return [x for x in history]
494 return [x for x in history]
483 return _node_history(context_uid, repo_id, revision, path, limit)
495 return _node_history(context_uid, repo_id, revision, path, limit)
484
496
485 @reraise_safe_exceptions
497 @reraise_safe_exceptions
486 def node_history_untill(self, wire, revision, path, limit):
498 def node_history_untill(self, wire, revision, path, limit):
487 cache_on, context_uid, repo_id = self._cache_on(wire)
499 cache_on, context_uid, repo_id = self._cache_on(wire)
488 @self.region.conditional_cache_on_arguments(condition=cache_on)
500 region = self._region(wire)
501 @region.conditional_cache_on_arguments(condition=cache_on)
489 def _node_history_until(_context_uid, _repo_id):
502 def _node_history_until(_context_uid, _repo_id):
490 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
491 ctx = self._get_ctx(repo, revision)
504 ctx = self._get_ctx(repo, revision)
492 fctx = ctx.filectx(path)
505 fctx = ctx.filectx(path)
493
506
494 file_log = list(fctx.filelog())
507 file_log = list(fctx.filelog())
495 if limit:
508 if limit:
496 # Limit to the last n items
509 # Limit to the last n items
497 file_log = file_log[-limit:]
510 file_log = file_log[-limit:]
498
511
499 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
512 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
500 return _node_history_until(context_uid, repo_id, revision, path, limit)
513 return _node_history_until(context_uid, repo_id, revision, path, limit)
501
514
502 @reraise_safe_exceptions
515 @reraise_safe_exceptions
503 def fctx_annotate(self, wire, revision, path):
516 def fctx_annotate(self, wire, revision, path):
504 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
505 ctx = self._get_ctx(repo, revision)
518 ctx = self._get_ctx(repo, revision)
506 fctx = ctx.filectx(path)
519 fctx = ctx.filectx(path)
507
520
508 result = []
521 result = []
509 for i, annotate_obj in enumerate(fctx.annotate(), 1):
522 for i, annotate_obj in enumerate(fctx.annotate(), 1):
510 ln_no = i
523 ln_no = i
511 sha = hex(annotate_obj.fctx.node())
524 sha = hex(annotate_obj.fctx.node())
512 content = annotate_obj.text
525 content = annotate_obj.text
513 result.append((ln_no, sha, content))
526 result.append((ln_no, sha, content))
514 return result
527 return result
515
528
516 @reraise_safe_exceptions
529 @reraise_safe_exceptions
517 def fctx_node_data(self, wire, revision, path):
530 def fctx_node_data(self, wire, revision, path):
518 repo = self._factory.repo(wire)
531 repo = self._factory.repo(wire)
519 ctx = self._get_ctx(repo, revision)
532 ctx = self._get_ctx(repo, revision)
520 fctx = ctx.filectx(path)
533 fctx = ctx.filectx(path)
521 return fctx.data()
534 return fctx.data()
522
535
523 @reraise_safe_exceptions
536 @reraise_safe_exceptions
524 def fctx_flags(self, wire, commit_id, path):
537 def fctx_flags(self, wire, commit_id, path):
525 cache_on, context_uid, repo_id = self._cache_on(wire)
538 cache_on, context_uid, repo_id = self._cache_on(wire)
526 @self.region.conditional_cache_on_arguments(condition=cache_on)
539 region = self._region(wire)
540 @region.conditional_cache_on_arguments(condition=cache_on)
527 def _fctx_flags(_repo_id, _commit_id, _path):
541 def _fctx_flags(_repo_id, _commit_id, _path):
528 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
529 ctx = self._get_ctx(repo, commit_id)
543 ctx = self._get_ctx(repo, commit_id)
530 fctx = ctx.filectx(path)
544 fctx = ctx.filectx(path)
531 return fctx.flags()
545 return fctx.flags()
532
546
533 return _fctx_flags(repo_id, commit_id, path)
547 return _fctx_flags(repo_id, commit_id, path)
534
548
535 @reraise_safe_exceptions
549 @reraise_safe_exceptions
536 def fctx_size(self, wire, commit_id, path):
550 def fctx_size(self, wire, commit_id, path):
537 cache_on, context_uid, repo_id = self._cache_on(wire)
551 cache_on, context_uid, repo_id = self._cache_on(wire)
538 @self.region.conditional_cache_on_arguments(condition=cache_on)
552 region = self._region(wire)
553 @region.conditional_cache_on_arguments(condition=cache_on)
539 def _fctx_size(_repo_id, _revision, _path):
554 def _fctx_size(_repo_id, _revision, _path):
540 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
541 ctx = self._get_ctx(repo, commit_id)
556 ctx = self._get_ctx(repo, commit_id)
542 fctx = ctx.filectx(path)
557 fctx = ctx.filectx(path)
543 return fctx.size()
558 return fctx.size()
544 return _fctx_size(repo_id, commit_id, path)
559 return _fctx_size(repo_id, commit_id, path)
545
560
546 @reraise_safe_exceptions
561 @reraise_safe_exceptions
547 def get_all_commit_ids(self, wire, name):
562 def get_all_commit_ids(self, wire, name):
548 cache_on, context_uid, repo_id = self._cache_on(wire)
563 cache_on, context_uid, repo_id = self._cache_on(wire)
549 @self.region.conditional_cache_on_arguments(condition=cache_on)
564 region = self._region(wire)
565 @region.conditional_cache_on_arguments(condition=cache_on)
550 def _get_all_commit_ids(_context_uid, _repo_id, _name):
566 def _get_all_commit_ids(_context_uid, _repo_id, _name):
551 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
552 repo = repo.filtered(name)
568 repo = repo.filtered(name)
553 revs = map(lambda x: hex(x[7]), repo.changelog.index)
569 revs = map(lambda x: hex(x[7]), repo.changelog.index)
554 return revs
570 return revs
555 return _get_all_commit_ids(context_uid, repo_id, name)
571 return _get_all_commit_ids(context_uid, repo_id, name)
556
572
557 @reraise_safe_exceptions
573 @reraise_safe_exceptions
558 def get_config_value(self, wire, section, name, untrusted=False):
574 def get_config_value(self, wire, section, name, untrusted=False):
559 repo = self._factory.repo(wire)
575 repo = self._factory.repo(wire)
560 return repo.ui.config(section, name, untrusted=untrusted)
576 return repo.ui.config(section, name, untrusted=untrusted)
561
577
562 @reraise_safe_exceptions
578 @reraise_safe_exceptions
563 def is_large_file(self, wire, commit_id, path):
579 def is_large_file(self, wire, commit_id, path):
564 cache_on, context_uid, repo_id = self._cache_on(wire)
580 cache_on, context_uid, repo_id = self._cache_on(wire)
565 @self.region.conditional_cache_on_arguments(condition=cache_on)
581 region = self._region(wire)
582 @region.conditional_cache_on_arguments(condition=cache_on)
566 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
583 def _is_large_file(_context_uid, _repo_id, _commit_id, _path):
567 return largefiles.lfutil.isstandin(path)
584 return largefiles.lfutil.isstandin(path)
568
585
569 return _is_large_file(context_uid, repo_id, commit_id, path)
586 return _is_large_file(context_uid, repo_id, commit_id, path)
570
587
571 @reraise_safe_exceptions
588 @reraise_safe_exceptions
572 def is_binary(self, wire, revision, path):
589 def is_binary(self, wire, revision, path):
573 cache_on, context_uid, repo_id = self._cache_on(wire)
590 cache_on, context_uid, repo_id = self._cache_on(wire)
574
591
575 @self.region.conditional_cache_on_arguments(condition=cache_on)
592 region = self._region(wire)
593 @region.conditional_cache_on_arguments(condition=cache_on)
576 def _is_binary(_repo_id, _sha, _path):
594 def _is_binary(_repo_id, _sha, _path):
577 repo = self._factory.repo(wire)
595 repo = self._factory.repo(wire)
578 ctx = self._get_ctx(repo, revision)
596 ctx = self._get_ctx(repo, revision)
579 fctx = ctx.filectx(path)
597 fctx = ctx.filectx(path)
580 return fctx.isbinary()
598 return fctx.isbinary()
581
599
582 return _is_binary(repo_id, revision, path)
600 return _is_binary(repo_id, revision, path)
583
601
584 @reraise_safe_exceptions
602 @reraise_safe_exceptions
585 def in_largefiles_store(self, wire, sha):
603 def in_largefiles_store(self, wire, sha):
586 repo = self._factory.repo(wire)
604 repo = self._factory.repo(wire)
587 return largefiles.lfutil.instore(repo, sha)
605 return largefiles.lfutil.instore(repo, sha)
588
606
589 @reraise_safe_exceptions
607 @reraise_safe_exceptions
590 def in_user_cache(self, wire, sha):
608 def in_user_cache(self, wire, sha):
591 repo = self._factory.repo(wire)
609 repo = self._factory.repo(wire)
592 return largefiles.lfutil.inusercache(repo.ui, sha)
610 return largefiles.lfutil.inusercache(repo.ui, sha)
593
611
594 @reraise_safe_exceptions
612 @reraise_safe_exceptions
595 def store_path(self, wire, sha):
613 def store_path(self, wire, sha):
596 repo = self._factory.repo(wire)
614 repo = self._factory.repo(wire)
597 return largefiles.lfutil.storepath(repo, sha)
615 return largefiles.lfutil.storepath(repo, sha)
598
616
599 @reraise_safe_exceptions
617 @reraise_safe_exceptions
600 def link(self, wire, sha, path):
618 def link(self, wire, sha, path):
601 repo = self._factory.repo(wire)
619 repo = self._factory.repo(wire)
602 largefiles.lfutil.link(
620 largefiles.lfutil.link(
603 largefiles.lfutil.usercachepath(repo.ui, sha), path)
621 largefiles.lfutil.usercachepath(repo.ui, sha), path)
604
622
605 @reraise_safe_exceptions
623 @reraise_safe_exceptions
606 def localrepository(self, wire, create=False):
624 def localrepository(self, wire, create=False):
607 self._factory.repo(wire, create=create)
625 self._factory.repo(wire, create=create)
608
626
609 @reraise_safe_exceptions
627 @reraise_safe_exceptions
610 def lookup(self, wire, revision, both):
628 def lookup(self, wire, revision, both):
611 cache_on, context_uid, repo_id = self._cache_on(wire)
629 cache_on, context_uid, repo_id = self._cache_on(wire)
612
630
613 @self.region.conditional_cache_on_arguments(condition=cache_on)
631 region = self._region(wire)
632 @region.conditional_cache_on_arguments(condition=cache_on)
614 def _lookup(_context_uid, _repo_id, _revision, _both):
633 def _lookup(_context_uid, _repo_id, _revision, _both):
615
634
616 repo = self._factory.repo(wire)
635 repo = self._factory.repo(wire)
617 rev = _revision
636 rev = _revision
618 if isinstance(rev, int):
637 if isinstance(rev, int):
619 # NOTE(marcink):
638 # NOTE(marcink):
620 # since Mercurial doesn't support negative indexes properly
639 # since Mercurial doesn't support negative indexes properly
621 # we need to shift accordingly by one to get proper index, e.g
640 # we need to shift accordingly by one to get proper index, e.g
622 # repo[-1] => repo[-2]
641 # repo[-1] => repo[-2]
623 # repo[0] => repo[-1]
642 # repo[0] => repo[-1]
624 if rev <= 0:
643 if rev <= 0:
625 rev = rev + -1
644 rev = rev + -1
626 try:
645 try:
627 ctx = self._get_ctx(repo, rev)
646 ctx = self._get_ctx(repo, rev)
628 except (TypeError, RepoLookupError) as e:
647 except (TypeError, RepoLookupError) as e:
629 e._org_exc_tb = traceback.format_exc()
648 e._org_exc_tb = traceback.format_exc()
630 raise exceptions.LookupException(e)(rev)
649 raise exceptions.LookupException(e)(rev)
631 except LookupError as e:
650 except LookupError as e:
632 e._org_exc_tb = traceback.format_exc()
651 e._org_exc_tb = traceback.format_exc()
633 raise exceptions.LookupException(e)(e.name)
652 raise exceptions.LookupException(e)(e.name)
634
653
635 if not both:
654 if not both:
636 return ctx.hex()
655 return ctx.hex()
637
656
638 ctx = repo[ctx.hex()]
657 ctx = repo[ctx.hex()]
639 return ctx.hex(), ctx.rev()
658 return ctx.hex(), ctx.rev()
640
659
641 return _lookup(context_uid, repo_id, revision, both)
660 return _lookup(context_uid, repo_id, revision, both)
642
661
643 @reraise_safe_exceptions
662 @reraise_safe_exceptions
644 def sync_push(self, wire, url):
663 def sync_push(self, wire, url):
645 if not self.check_url(url, wire['config']):
664 if not self.check_url(url, wire['config']):
646 return
665 return
647
666
648 repo = self._factory.repo(wire)
667 repo = self._factory.repo(wire)
649
668
650 # Disable any prompts for this repo
669 # Disable any prompts for this repo
651 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
670 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
652
671
653 bookmarks = dict(repo._bookmarks).keys()
672 bookmarks = dict(repo._bookmarks).keys()
654 remote = peer(repo, {}, url)
673 remote = peer(repo, {}, url)
655 # Disable any prompts for this remote
674 # Disable any prompts for this remote
656 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
675 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
657
676
658 return exchange.push(
677 return exchange.push(
659 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
678 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
660
679
661 @reraise_safe_exceptions
680 @reraise_safe_exceptions
662 def revision(self, wire, rev):
681 def revision(self, wire, rev):
663 repo = self._factory.repo(wire)
682 repo = self._factory.repo(wire)
664 ctx = self._get_ctx(repo, rev)
683 ctx = self._get_ctx(repo, rev)
665 return ctx.rev()
684 return ctx.rev()
666
685
667 @reraise_safe_exceptions
686 @reraise_safe_exceptions
668 def rev_range(self, wire, commit_filter):
687 def rev_range(self, wire, commit_filter):
669 cache_on, context_uid, repo_id = self._cache_on(wire)
688 cache_on, context_uid, repo_id = self._cache_on(wire)
670
689
671 @self.region.conditional_cache_on_arguments(condition=cache_on)
690 region = self._region(wire)
691 @region.conditional_cache_on_arguments(condition=cache_on)
672 def _rev_range(_context_uid, _repo_id, _filter):
692 def _rev_range(_context_uid, _repo_id, _filter):
673 repo = self._factory.repo(wire)
693 repo = self._factory.repo(wire)
674 revisions = [rev for rev in revrange(repo, commit_filter)]
694 revisions = [rev for rev in revrange(repo, commit_filter)]
675 return revisions
695 return revisions
676
696
677 return _rev_range(context_uid, repo_id, sorted(commit_filter))
697 return _rev_range(context_uid, repo_id, sorted(commit_filter))
678
698
679 @reraise_safe_exceptions
699 @reraise_safe_exceptions
680 def rev_range_hash(self, wire, node):
700 def rev_range_hash(self, wire, node):
681 repo = self._factory.repo(wire)
701 repo = self._factory.repo(wire)
682
702
683 def get_revs(repo, rev_opt):
703 def get_revs(repo, rev_opt):
684 if rev_opt:
704 if rev_opt:
685 revs = revrange(repo, rev_opt)
705 revs = revrange(repo, rev_opt)
686 if len(revs) == 0:
706 if len(revs) == 0:
687 return (nullrev, nullrev)
707 return (nullrev, nullrev)
688 return max(revs), min(revs)
708 return max(revs), min(revs)
689 else:
709 else:
690 return len(repo) - 1, 0
710 return len(repo) - 1, 0
691
711
692 stop, start = get_revs(repo, [node + ':'])
712 stop, start = get_revs(repo, [node + ':'])
693 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
713 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
694 return revs
714 return revs
695
715
696 @reraise_safe_exceptions
716 @reraise_safe_exceptions
697 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
717 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
698 other_path = kwargs.pop('other_path', None)
718 other_path = kwargs.pop('other_path', None)
699
719
700 # case when we want to compare two independent repositories
720 # case when we want to compare two independent repositories
701 if other_path and other_path != wire["path"]:
721 if other_path and other_path != wire["path"]:
702 baseui = self._factory._create_config(wire["config"])
722 baseui = self._factory._create_config(wire["config"])
703 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
723 repo = unionrepo.makeunionrepository(baseui, other_path, wire["path"])
704 else:
724 else:
705 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
706 return list(repo.revs(rev_spec, *args))
726 return list(repo.revs(rev_spec, *args))
707
727
708 @reraise_safe_exceptions
728 @reraise_safe_exceptions
709 def verify(self, wire,):
729 def verify(self, wire,):
710 repo = self._factory.repo(wire)
730 repo = self._factory.repo(wire)
711 baseui = self._factory._create_config(wire['config'])
731 baseui = self._factory._create_config(wire['config'])
712
732
713 baseui, output = patch_ui_message_output(baseui)
733 baseui, output = patch_ui_message_output(baseui)
714
734
715 repo.ui = baseui
735 repo.ui = baseui
716 verify.verify(repo)
736 verify.verify(repo)
717 return output.getvalue()
737 return output.getvalue()
718
738
719 @reraise_safe_exceptions
739 @reraise_safe_exceptions
720 def hg_update_cache(self, wire,):
740 def hg_update_cache(self, wire,):
721 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
722 baseui = self._factory._create_config(wire['config'])
742 baseui = self._factory._create_config(wire['config'])
723 baseui, output = patch_ui_message_output(baseui)
743 baseui, output = patch_ui_message_output(baseui)
724
744
725 repo.ui = baseui
745 repo.ui = baseui
726 with repo.wlock(), repo.lock():
746 with repo.wlock(), repo.lock():
727 repo.updatecaches(full=True)
747 repo.updatecaches(full=True)
728
748
729 return output.getvalue()
749 return output.getvalue()
730
750
731 @reraise_safe_exceptions
751 @reraise_safe_exceptions
732 def hg_rebuild_fn_cache(self, wire,):
752 def hg_rebuild_fn_cache(self, wire,):
733 repo = self._factory.repo(wire)
753 repo = self._factory.repo(wire)
734 baseui = self._factory._create_config(wire['config'])
754 baseui = self._factory._create_config(wire['config'])
735 baseui, output = patch_ui_message_output(baseui)
755 baseui, output = patch_ui_message_output(baseui)
736
756
737 repo.ui = baseui
757 repo.ui = baseui
738
758
739 repair.rebuildfncache(baseui, repo)
759 repair.rebuildfncache(baseui, repo)
740
760
741 return output.getvalue()
761 return output.getvalue()
742
762
743 @reraise_safe_exceptions
763 @reraise_safe_exceptions
744 def tags(self, wire):
764 def tags(self, wire):
745 cache_on, context_uid, repo_id = self._cache_on(wire)
765 cache_on, context_uid, repo_id = self._cache_on(wire)
746 @self.region.conditional_cache_on_arguments(condition=cache_on)
766 region = self._region(wire)
767 @region.conditional_cache_on_arguments(condition=cache_on)
747 def _tags(_context_uid, _repo_id):
768 def _tags(_context_uid, _repo_id):
748 repo = self._factory.repo(wire)
769 repo = self._factory.repo(wire)
749 return repo.tags()
770 return repo.tags()
750
771
751 return _tags(context_uid, repo_id)
772 return _tags(context_uid, repo_id)
752
773
753 @reraise_safe_exceptions
774 @reraise_safe_exceptions
754 def update(self, wire, node=None, clean=False):
775 def update(self, wire, node=None, clean=False):
755 repo = self._factory.repo(wire)
776 repo = self._factory.repo(wire)
756 baseui = self._factory._create_config(wire['config'])
777 baseui = self._factory._create_config(wire['config'])
757 commands.update(baseui, repo, node=node, clean=clean)
778 commands.update(baseui, repo, node=node, clean=clean)
758
779
759 @reraise_safe_exceptions
780 @reraise_safe_exceptions
760 def identify(self, wire):
781 def identify(self, wire):
761 repo = self._factory.repo(wire)
782 repo = self._factory.repo(wire)
762 baseui = self._factory._create_config(wire['config'])
783 baseui = self._factory._create_config(wire['config'])
763 output = io.BytesIO()
784 output = io.BytesIO()
764 baseui.write = output.write
785 baseui.write = output.write
765 # This is required to get a full node id
786 # This is required to get a full node id
766 baseui.debugflag = True
787 baseui.debugflag = True
767 commands.identify(baseui, repo, id=True)
788 commands.identify(baseui, repo, id=True)
768
789
769 return output.getvalue()
790 return output.getvalue()
770
791
771 @reraise_safe_exceptions
792 @reraise_safe_exceptions
772 def heads(self, wire, branch=None):
793 def heads(self, wire, branch=None):
773 repo = self._factory.repo(wire)
794 repo = self._factory.repo(wire)
774 baseui = self._factory._create_config(wire['config'])
795 baseui = self._factory._create_config(wire['config'])
775 output = io.BytesIO()
796 output = io.BytesIO()
776
797
777 def write(data, **unused_kwargs):
798 def write(data, **unused_kwargs):
778 output.write(data)
799 output.write(data)
779
800
780 baseui.write = write
801 baseui.write = write
781 if branch:
802 if branch:
782 args = [branch]
803 args = [branch]
783 else:
804 else:
784 args = []
805 args = []
785 commands.heads(baseui, repo, template='{node} ', *args)
806 commands.heads(baseui, repo, template='{node} ', *args)
786
807
787 return output.getvalue()
808 return output.getvalue()
788
809
789 @reraise_safe_exceptions
810 @reraise_safe_exceptions
790 def ancestor(self, wire, revision1, revision2):
811 def ancestor(self, wire, revision1, revision2):
791 repo = self._factory.repo(wire)
812 repo = self._factory.repo(wire)
792 changelog = repo.changelog
813 changelog = repo.changelog
793 lookup = repo.lookup
814 lookup = repo.lookup
794 a = changelog.ancestor(lookup(revision1), lookup(revision2))
815 a = changelog.ancestor(lookup(revision1), lookup(revision2))
795 return hex(a)
816 return hex(a)
796
817
797 @reraise_safe_exceptions
818 @reraise_safe_exceptions
798 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
819 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
799 baseui = self._factory._create_config(wire["config"], hooks=hooks)
820 baseui = self._factory._create_config(wire["config"], hooks=hooks)
800 clone(baseui, source, dest, noupdate=not update_after_clone)
821 clone(baseui, source, dest, noupdate=not update_after_clone)
801
822
802 @reraise_safe_exceptions
823 @reraise_safe_exceptions
803 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
824 def commitctx(self, wire, message, parents, commit_time, commit_timezone, user, files, extra, removed, updated):
804
825
805 repo = self._factory.repo(wire)
826 repo = self._factory.repo(wire)
806 baseui = self._factory._create_config(wire['config'])
827 baseui = self._factory._create_config(wire['config'])
807 publishing = baseui.configbool('phases', 'publish')
828 publishing = baseui.configbool('phases', 'publish')
808 if publishing:
829 if publishing:
809 new_commit = 'public'
830 new_commit = 'public'
810 else:
831 else:
811 new_commit = 'draft'
832 new_commit = 'draft'
812
833
813 def _filectxfn(_repo, ctx, path):
834 def _filectxfn(_repo, ctx, path):
814 """
835 """
815 Marks given path as added/changed/removed in a given _repo. This is
836 Marks given path as added/changed/removed in a given _repo. This is
816 for internal mercurial commit function.
837 for internal mercurial commit function.
817 """
838 """
818
839
819 # check if this path is removed
840 # check if this path is removed
820 if path in removed:
841 if path in removed:
821 # returning None is a way to mark node for removal
842 # returning None is a way to mark node for removal
822 return None
843 return None
823
844
824 # check if this path is added
845 # check if this path is added
825 for node in updated:
846 for node in updated:
826 if node['path'] == path:
847 if node['path'] == path:
827 return memfilectx(
848 return memfilectx(
828 _repo,
849 _repo,
829 changectx=ctx,
850 changectx=ctx,
830 path=node['path'],
851 path=node['path'],
831 data=node['content'],
852 data=node['content'],
832 islink=False,
853 islink=False,
833 isexec=bool(node['mode'] & stat.S_IXUSR),
854 isexec=bool(node['mode'] & stat.S_IXUSR),
834 copysource=False)
855 copysource=False)
835
856
836 raise exceptions.AbortException()(
857 raise exceptions.AbortException()(
837 "Given path haven't been marked as added, "
858 "Given path haven't been marked as added, "
838 "changed or removed (%s)" % path)
859 "changed or removed (%s)" % path)
839
860
840 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
861 with repo.ui.configoverride({('phases', 'new-commit'): new_commit}):
841
862
842 commit_ctx = memctx(
863 commit_ctx = memctx(
843 repo=repo,
864 repo=repo,
844 parents=parents,
865 parents=parents,
845 text=message,
866 text=message,
846 files=files,
867 files=files,
847 filectxfn=_filectxfn,
868 filectxfn=_filectxfn,
848 user=user,
869 user=user,
849 date=(commit_time, commit_timezone),
870 date=(commit_time, commit_timezone),
850 extra=extra)
871 extra=extra)
851
872
852 n = repo.commitctx(commit_ctx)
873 n = repo.commitctx(commit_ctx)
853 new_id = hex(n)
874 new_id = hex(n)
854
875
855 return new_id
876 return new_id
856
877
857 @reraise_safe_exceptions
878 @reraise_safe_exceptions
858 def pull(self, wire, url, commit_ids=None):
879 def pull(self, wire, url, commit_ids=None):
859 repo = self._factory.repo(wire)
880 repo = self._factory.repo(wire)
860 # Disable any prompts for this repo
881 # Disable any prompts for this repo
861 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
882 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
862
883
863 remote = peer(repo, {}, url)
884 remote = peer(repo, {}, url)
864 # Disable any prompts for this remote
885 # Disable any prompts for this remote
865 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
886 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
866
887
867 if commit_ids:
888 if commit_ids:
868 commit_ids = [bin(commit_id) for commit_id in commit_ids]
889 commit_ids = [bin(commit_id) for commit_id in commit_ids]
869
890
870 return exchange.pull(
891 return exchange.pull(
871 repo, remote, heads=commit_ids, force=None).cgresult
892 repo, remote, heads=commit_ids, force=None).cgresult
872
893
873 @reraise_safe_exceptions
894 @reraise_safe_exceptions
874 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
895 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None, hooks=True):
875 repo = self._factory.repo(wire)
896 repo = self._factory.repo(wire)
876 baseui = self._factory._create_config(wire['config'], hooks=hooks)
897 baseui = self._factory._create_config(wire['config'], hooks=hooks)
877
898
878 # Mercurial internally has a lot of logic that checks ONLY if
899 # Mercurial internally has a lot of logic that checks ONLY if
879 # option is defined, we just pass those if they are defined then
900 # option is defined, we just pass those if they are defined then
880 opts = {}
901 opts = {}
881 if bookmark:
902 if bookmark:
882 opts['bookmark'] = bookmark
903 opts['bookmark'] = bookmark
883 if branch:
904 if branch:
884 opts['branch'] = branch
905 opts['branch'] = branch
885 if revision:
906 if revision:
886 opts['rev'] = revision
907 opts['rev'] = revision
887
908
888 commands.pull(baseui, repo, source, **opts)
909 commands.pull(baseui, repo, source, **opts)
889
910
890 @reraise_safe_exceptions
911 @reraise_safe_exceptions
891 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
912 def push(self, wire, revisions, dest_path, hooks=True, push_branches=False):
892 repo = self._factory.repo(wire)
913 repo = self._factory.repo(wire)
893 baseui = self._factory._create_config(wire['config'], hooks=hooks)
914 baseui = self._factory._create_config(wire['config'], hooks=hooks)
894 commands.push(baseui, repo, dest=dest_path, rev=revisions,
915 commands.push(baseui, repo, dest=dest_path, rev=revisions,
895 new_branch=push_branches)
916 new_branch=push_branches)
896
917
897 @reraise_safe_exceptions
918 @reraise_safe_exceptions
898 def strip(self, wire, revision, update, backup):
919 def strip(self, wire, revision, update, backup):
899 repo = self._factory.repo(wire)
920 repo = self._factory.repo(wire)
900 ctx = self._get_ctx(repo, revision)
921 ctx = self._get_ctx(repo, revision)
901 hgext_strip(
922 hgext_strip(
902 repo.baseui, repo, ctx.node(), update=update, backup=backup)
923 repo.baseui, repo, ctx.node(), update=update, backup=backup)
903
924
904 @reraise_safe_exceptions
925 @reraise_safe_exceptions
905 def get_unresolved_files(self, wire):
926 def get_unresolved_files(self, wire):
906 repo = self._factory.repo(wire)
927 repo = self._factory.repo(wire)
907
928
908 log.debug('Calculating unresolved files for repo: %s', repo)
929 log.debug('Calculating unresolved files for repo: %s', repo)
909 output = io.BytesIO()
930 output = io.BytesIO()
910
931
911 def write(data, **unused_kwargs):
932 def write(data, **unused_kwargs):
912 output.write(data)
933 output.write(data)
913
934
914 baseui = self._factory._create_config(wire['config'])
935 baseui = self._factory._create_config(wire['config'])
915 baseui.write = write
936 baseui.write = write
916
937
917 commands.resolve(baseui, repo, list=True)
938 commands.resolve(baseui, repo, list=True)
918 unresolved = output.getvalue().splitlines(0)
939 unresolved = output.getvalue().splitlines(0)
919 return unresolved
940 return unresolved
920
941
921 @reraise_safe_exceptions
942 @reraise_safe_exceptions
922 def merge(self, wire, revision):
943 def merge(self, wire, revision):
923 repo = self._factory.repo(wire)
944 repo = self._factory.repo(wire)
924 baseui = self._factory._create_config(wire['config'])
945 baseui = self._factory._create_config(wire['config'])
925 repo.ui.setconfig('ui', 'merge', 'internal:dump')
946 repo.ui.setconfig('ui', 'merge', 'internal:dump')
926
947
927 # In case of sub repositories are used mercurial prompts the user in
948 # In case of sub repositories are used mercurial prompts the user in
928 # case of merge conflicts or different sub repository sources. By
949 # case of merge conflicts or different sub repository sources. By
929 # setting the interactive flag to `False` mercurial doesn't prompt the
950 # setting the interactive flag to `False` mercurial doesn't prompt the
930 # used but instead uses a default value.
951 # used but instead uses a default value.
931 repo.ui.setconfig('ui', 'interactive', False)
952 repo.ui.setconfig('ui', 'interactive', False)
932 commands.merge(baseui, repo, rev=revision)
953 commands.merge(baseui, repo, rev=revision)
933
954
934 @reraise_safe_exceptions
955 @reraise_safe_exceptions
935 def merge_state(self, wire):
956 def merge_state(self, wire):
936 repo = self._factory.repo(wire)
957 repo = self._factory.repo(wire)
937 repo.ui.setconfig('ui', 'merge', 'internal:dump')
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
938
959
939 # In case of sub repositories are used mercurial prompts the user in
960 # In case of sub repositories are used mercurial prompts the user in
940 # case of merge conflicts or different sub repository sources. By
961 # case of merge conflicts or different sub repository sources. By
941 # setting the interactive flag to `False` mercurial doesn't prompt the
962 # setting the interactive flag to `False` mercurial doesn't prompt the
942 # used but instead uses a default value.
963 # used but instead uses a default value.
943 repo.ui.setconfig('ui', 'interactive', False)
964 repo.ui.setconfig('ui', 'interactive', False)
944 ms = hg_merge.mergestate(repo)
965 ms = hg_merge.mergestate(repo)
945 return [x for x in ms.unresolved()]
966 return [x for x in ms.unresolved()]
946
967
947 @reraise_safe_exceptions
968 @reraise_safe_exceptions
948 def commit(self, wire, message, username, close_branch=False):
969 def commit(self, wire, message, username, close_branch=False):
949 repo = self._factory.repo(wire)
970 repo = self._factory.repo(wire)
950 baseui = self._factory._create_config(wire['config'])
971 baseui = self._factory._create_config(wire['config'])
951 repo.ui.setconfig('ui', 'username', username)
972 repo.ui.setconfig('ui', 'username', username)
952 commands.commit(baseui, repo, message=message, close_branch=close_branch)
973 commands.commit(baseui, repo, message=message, close_branch=close_branch)
953
974
954 @reraise_safe_exceptions
975 @reraise_safe_exceptions
955 def rebase(self, wire, source=None, dest=None, abort=False):
976 def rebase(self, wire, source=None, dest=None, abort=False):
956 repo = self._factory.repo(wire)
977 repo = self._factory.repo(wire)
957 baseui = self._factory._create_config(wire['config'])
978 baseui = self._factory._create_config(wire['config'])
958 repo.ui.setconfig('ui', 'merge', 'internal:dump')
979 repo.ui.setconfig('ui', 'merge', 'internal:dump')
959 # In case of sub repositories are used mercurial prompts the user in
980 # In case of sub repositories are used mercurial prompts the user in
960 # case of merge conflicts or different sub repository sources. By
981 # case of merge conflicts or different sub repository sources. By
961 # setting the interactive flag to `False` mercurial doesn't prompt the
982 # setting the interactive flag to `False` mercurial doesn't prompt the
962 # used but instead uses a default value.
983 # used but instead uses a default value.
963 repo.ui.setconfig('ui', 'interactive', False)
984 repo.ui.setconfig('ui', 'interactive', False)
964 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
985 rebase.rebase(baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
965
986
966 @reraise_safe_exceptions
987 @reraise_safe_exceptions
967 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
988 def tag(self, wire, name, revision, message, local, user, tag_time, tag_timezone):
968 repo = self._factory.repo(wire)
989 repo = self._factory.repo(wire)
969 ctx = self._get_ctx(repo, revision)
990 ctx = self._get_ctx(repo, revision)
970 node = ctx.node()
991 node = ctx.node()
971
992
972 date = (tag_time, tag_timezone)
993 date = (tag_time, tag_timezone)
973 try:
994 try:
974 hg_tag.tag(repo, name, node, message, local, user, date)
995 hg_tag.tag(repo, name, node, message, local, user, date)
975 except Abort as e:
996 except Abort as e:
976 log.exception("Tag operation aborted")
997 log.exception("Tag operation aborted")
977 # Exception can contain unicode which we convert
998 # Exception can contain unicode which we convert
978 raise exceptions.AbortException(e)(repr(e))
999 raise exceptions.AbortException(e)(repr(e))
979
1000
980 @reraise_safe_exceptions
1001 @reraise_safe_exceptions
981 def bookmark(self, wire, bookmark, revision=None):
1002 def bookmark(self, wire, bookmark, revision=None):
982 repo = self._factory.repo(wire)
1003 repo = self._factory.repo(wire)
983 baseui = self._factory._create_config(wire['config'])
1004 baseui = self._factory._create_config(wire['config'])
984 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
1005 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
985
1006
986 @reraise_safe_exceptions
1007 @reraise_safe_exceptions
987 def install_hooks(self, wire, force=False):
1008 def install_hooks(self, wire, force=False):
988 # we don't need any special hooks for Mercurial
1009 # we don't need any special hooks for Mercurial
989 pass
1010 pass
990
1011
991 @reraise_safe_exceptions
1012 @reraise_safe_exceptions
992 def get_hooks_info(self, wire):
1013 def get_hooks_info(self, wire):
993 return {
1014 return {
994 'pre_version': vcsserver.__version__,
1015 'pre_version': vcsserver.__version__,
995 'post_version': vcsserver.__version__,
1016 'post_version': vcsserver.__version__,
996 }
1017 }
997
1018
998 @reraise_safe_exceptions
1019 @reraise_safe_exceptions
1020 def set_head_ref(self, wire, head_name):
1021 pass
1022
1023 @reraise_safe_exceptions
999 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1024 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
1000 archive_dir_name, commit_id):
1025 archive_dir_name, commit_id):
1001
1026
1002 def file_walker(_commit_id, path):
1027 def file_walker(_commit_id, path):
1003 repo = self._factory.repo(wire)
1028 repo = self._factory.repo(wire)
1004 ctx = repo[_commit_id]
1029 ctx = repo[_commit_id]
1005 is_root = path in ['', '/']
1030 is_root = path in ['', '/']
1006 if is_root:
1031 if is_root:
1007 matcher = alwaysmatcher(badfn=None)
1032 matcher = alwaysmatcher(badfn=None)
1008 else:
1033 else:
1009 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1034 matcher = patternmatcher('', [(b'glob', path+'/**', b'')], badfn=None)
1010 file_iter = ctx.manifest().walk(matcher)
1035 file_iter = ctx.manifest().walk(matcher)
1011
1036
1012 for fn in file_iter:
1037 for fn in file_iter:
1013 file_path = fn
1038 file_path = fn
1014 flags = ctx.flags(fn)
1039 flags = ctx.flags(fn)
1015 mode = b'x' in flags and 0o755 or 0o644
1040 mode = b'x' in flags and 0o755 or 0o644
1016 is_link = b'l' in flags
1041 is_link = b'l' in flags
1017
1042
1018 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1043 yield ArchiveNode(file_path, mode, is_link, ctx[fn].data)
1019
1044
1020 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1045 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
1021 archive_dir_name, commit_id)
1046 archive_dir_name, commit_id)
1022
1047
@@ -1,729 +1,729 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2020 RhodeCode GmbH
4 # Copyright (C) 2014-2020 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 try:
49 try:
50 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 except Exception:
51 except Exception:
52 log.error('Connection failed on %s', connection)
52 log.error('Hooks calling Connection failed on %s', connection.__dict__)
53 raise
53 raise
54 response = connection.getresponse()
54 response = connection.getresponse()
55
55
56 response_data = response.read()
56 response_data = response.read()
57
57
58 try:
58 try:
59 return json.loads(response_data)
59 return json.loads(response_data)
60 except Exception:
60 except Exception:
61 log.exception('Failed to decode hook response json data. '
61 log.exception('Failed to decode hook response json data. '
62 'response_code:%s, raw_data:%s',
62 'response_code:%s, raw_data:%s',
63 response.status, response_data)
63 response.status, response_data)
64 raise
64 raise
65
65
66 def _serialize(self, hook_name, extras):
66 def _serialize(self, hook_name, extras):
67 data = {
67 data = {
68 'method': hook_name,
68 'method': hook_name,
69 'extras': extras
69 'extras': extras
70 }
70 }
71 return json.dumps(data)
71 return json.dumps(data)
72
72
73
73
74 class HooksDummyClient(object):
74 class HooksDummyClient(object):
75 def __init__(self, hooks_module):
75 def __init__(self, hooks_module):
76 self._hooks_module = importlib.import_module(hooks_module)
76 self._hooks_module = importlib.import_module(hooks_module)
77
77
78 def __call__(self, hook_name, extras):
78 def __call__(self, hook_name, extras):
79 with self._hooks_module.Hooks() as hooks:
79 with self._hooks_module.Hooks() as hooks:
80 return getattr(hooks, hook_name)(extras)
80 return getattr(hooks, hook_name)(extras)
81
81
82
82
83 class HooksShadowRepoClient(object):
83 class HooksShadowRepoClient(object):
84
84
85 def __call__(self, hook_name, extras):
85 def __call__(self, hook_name, extras):
86 return {'output': '', 'status': 0}
86 return {'output': '', 'status': 0}
87
87
88
88
89 class RemoteMessageWriter(object):
89 class RemoteMessageWriter(object):
90 """Writer base class."""
90 """Writer base class."""
91 def write(self, message):
91 def write(self, message):
92 raise NotImplementedError()
92 raise NotImplementedError()
93
93
94
94
95 class HgMessageWriter(RemoteMessageWriter):
95 class HgMessageWriter(RemoteMessageWriter):
96 """Writer that knows how to send messages to mercurial clients."""
96 """Writer that knows how to send messages to mercurial clients."""
97
97
98 def __init__(self, ui):
98 def __init__(self, ui):
99 self.ui = ui
99 self.ui = ui
100
100
101 def write(self, message):
101 def write(self, message):
102 # TODO: Check why the quiet flag is set by default.
102 # TODO: Check why the quiet flag is set by default.
103 old = self.ui.quiet
103 old = self.ui.quiet
104 self.ui.quiet = False
104 self.ui.quiet = False
105 self.ui.status(message.encode('utf-8'))
105 self.ui.status(message.encode('utf-8'))
106 self.ui.quiet = old
106 self.ui.quiet = old
107
107
108
108
109 class GitMessageWriter(RemoteMessageWriter):
109 class GitMessageWriter(RemoteMessageWriter):
110 """Writer that knows how to send messages to git clients."""
110 """Writer that knows how to send messages to git clients."""
111
111
112 def __init__(self, stdout=None):
112 def __init__(self, stdout=None):
113 self.stdout = stdout or sys.stdout
113 self.stdout = stdout or sys.stdout
114
114
115 def write(self, message):
115 def write(self, message):
116 self.stdout.write(message.encode('utf-8'))
116 self.stdout.write(message.encode('utf-8'))
117
117
118
118
119 class SvnMessageWriter(RemoteMessageWriter):
119 class SvnMessageWriter(RemoteMessageWriter):
120 """Writer that knows how to send messages to svn clients."""
120 """Writer that knows how to send messages to svn clients."""
121
121
122 def __init__(self, stderr=None):
122 def __init__(self, stderr=None):
123 # SVN needs data sent to stderr for back-to-client messaging
123 # SVN needs data sent to stderr for back-to-client messaging
124 self.stderr = stderr or sys.stderr
124 self.stderr = stderr or sys.stderr
125
125
126 def write(self, message):
126 def write(self, message):
127 self.stderr.write(message.encode('utf-8'))
127 self.stderr.write(message.encode('utf-8'))
128
128
129
129
130 def _handle_exception(result):
130 def _handle_exception(result):
131 exception_class = result.get('exception')
131 exception_class = result.get('exception')
132 exception_traceback = result.get('exception_traceback')
132 exception_traceback = result.get('exception_traceback')
133
133
134 if exception_traceback:
134 if exception_traceback:
135 log.error('Got traceback from remote call:%s', exception_traceback)
135 log.error('Got traceback from remote call:%s', exception_traceback)
136
136
137 if exception_class == 'HTTPLockedRC':
137 if exception_class == 'HTTPLockedRC':
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
138 raise exceptions.RepositoryLockedException()(*result['exception_args'])
139 elif exception_class == 'HTTPBranchProtected':
139 elif exception_class == 'HTTPBranchProtected':
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
140 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
141 elif exception_class == 'RepositoryError':
141 elif exception_class == 'RepositoryError':
142 raise exceptions.VcsException()(*result['exception_args'])
142 raise exceptions.VcsException()(*result['exception_args'])
143 elif exception_class:
143 elif exception_class:
144 raise Exception('Got remote exception "%s" with args "%s"' %
144 raise Exception('Got remote exception "%s" with args "%s"' %
145 (exception_class, result['exception_args']))
145 (exception_class, result['exception_args']))
146
146
147
147
148 def _get_hooks_client(extras):
148 def _get_hooks_client(extras):
149 hooks_uri = extras.get('hooks_uri')
149 hooks_uri = extras.get('hooks_uri')
150 is_shadow_repo = extras.get('is_shadow_repo')
150 is_shadow_repo = extras.get('is_shadow_repo')
151 if hooks_uri:
151 if hooks_uri:
152 return HooksHttpClient(extras['hooks_uri'])
152 return HooksHttpClient(extras['hooks_uri'])
153 elif is_shadow_repo:
153 elif is_shadow_repo:
154 return HooksShadowRepoClient()
154 return HooksShadowRepoClient()
155 else:
155 else:
156 return HooksDummyClient(extras['hooks_module'])
156 return HooksDummyClient(extras['hooks_module'])
157
157
158
158
159 def _call_hook(hook_name, extras, writer):
159 def _call_hook(hook_name, extras, writer):
160 hooks_client = _get_hooks_client(extras)
160 hooks_client = _get_hooks_client(extras)
161 log.debug('Hooks, using client:%s', hooks_client)
161 log.debug('Hooks, using client:%s', hooks_client)
162 result = hooks_client(hook_name, extras)
162 result = hooks_client(hook_name, extras)
163 log.debug('Hooks got result: %s', result)
163 log.debug('Hooks got result: %s', result)
164
164
165 _handle_exception(result)
165 _handle_exception(result)
166 writer.write(result['output'])
166 writer.write(result['output'])
167
167
168 return result['status']
168 return result['status']
169
169
170
170
171 def _extras_from_ui(ui):
171 def _extras_from_ui(ui):
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
172 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
173 if not hook_data:
173 if not hook_data:
174 # maybe it's inside environ ?
174 # maybe it's inside environ ?
175 env_hook_data = os.environ.get('RC_SCM_DATA')
175 env_hook_data = os.environ.get('RC_SCM_DATA')
176 if env_hook_data:
176 if env_hook_data:
177 hook_data = env_hook_data
177 hook_data = env_hook_data
178
178
179 extras = {}
179 extras = {}
180 if hook_data:
180 if hook_data:
181 extras = json.loads(hook_data)
181 extras = json.loads(hook_data)
182 return extras
182 return extras
183
183
184
184
185 def _rev_range_hash(repo, node, check_heads=False):
185 def _rev_range_hash(repo, node, check_heads=False):
186 from vcsserver.hgcompat import get_ctx
186 from vcsserver.hgcompat import get_ctx
187
187
188 commits = []
188 commits = []
189 revs = []
189 revs = []
190 start = get_ctx(repo, node).rev()
190 start = get_ctx(repo, node).rev()
191 end = len(repo)
191 end = len(repo)
192 for rev in range(start, end):
192 for rev in range(start, end):
193 revs.append(rev)
193 revs.append(rev)
194 ctx = get_ctx(repo, rev)
194 ctx = get_ctx(repo, rev)
195 commit_id = mercurial.node.hex(ctx.node())
195 commit_id = mercurial.node.hex(ctx.node())
196 branch = ctx.branch()
196 branch = ctx.branch()
197 commits.append((commit_id, branch))
197 commits.append((commit_id, branch))
198
198
199 parent_heads = []
199 parent_heads = []
200 if check_heads:
200 if check_heads:
201 parent_heads = _check_heads(repo, start, end, revs)
201 parent_heads = _check_heads(repo, start, end, revs)
202 return commits, parent_heads
202 return commits, parent_heads
203
203
204
204
205 def _check_heads(repo, start, end, commits):
205 def _check_heads(repo, start, end, commits):
206 from vcsserver.hgcompat import get_ctx
206 from vcsserver.hgcompat import get_ctx
207 changelog = repo.changelog
207 changelog = repo.changelog
208 parents = set()
208 parents = set()
209
209
210 for new_rev in commits:
210 for new_rev in commits:
211 for p in changelog.parentrevs(new_rev):
211 for p in changelog.parentrevs(new_rev):
212 if p == mercurial.node.nullrev:
212 if p == mercurial.node.nullrev:
213 continue
213 continue
214 if p < start:
214 if p < start:
215 parents.add(p)
215 parents.add(p)
216
216
217 for p in parents:
217 for p in parents:
218 branch = get_ctx(repo, p).branch()
218 branch = get_ctx(repo, p).branch()
219 # The heads descending from that parent, on the same branch
219 # The heads descending from that parent, on the same branch
220 parent_heads = set([p])
220 parent_heads = set([p])
221 reachable = set([p])
221 reachable = set([p])
222 for x in xrange(p + 1, end):
222 for x in xrange(p + 1, end):
223 if get_ctx(repo, x).branch() != branch:
223 if get_ctx(repo, x).branch() != branch:
224 continue
224 continue
225 for pp in changelog.parentrevs(x):
225 for pp in changelog.parentrevs(x):
226 if pp in reachable:
226 if pp in reachable:
227 reachable.add(x)
227 reachable.add(x)
228 parent_heads.discard(pp)
228 parent_heads.discard(pp)
229 parent_heads.add(x)
229 parent_heads.add(x)
230 # More than one head? Suggest merging
230 # More than one head? Suggest merging
231 if len(parent_heads) > 1:
231 if len(parent_heads) > 1:
232 return list(parent_heads)
232 return list(parent_heads)
233
233
234 return []
234 return []
235
235
236
236
237 def _get_git_env():
237 def _get_git_env():
238 env = {}
238 env = {}
239 for k, v in os.environ.items():
239 for k, v in os.environ.items():
240 if k.startswith('GIT'):
240 if k.startswith('GIT'):
241 env[k] = v
241 env[k] = v
242
242
243 # serialized version
243 # serialized version
244 return [(k, v) for k, v in env.items()]
244 return [(k, v) for k, v in env.items()]
245
245
246
246
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
247 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
248 env = {}
248 env = {}
249 for k, v in os.environ.items():
249 for k, v in os.environ.items():
250 if k.startswith('HG'):
250 if k.startswith('HG'):
251 env[k] = v
251 env[k] = v
252
252
253 env['HG_NODE'] = old_rev
253 env['HG_NODE'] = old_rev
254 env['HG_NODE_LAST'] = new_rev
254 env['HG_NODE_LAST'] = new_rev
255 env['HG_TXNID'] = txnid
255 env['HG_TXNID'] = txnid
256 env['HG_PENDING'] = repo_path
256 env['HG_PENDING'] = repo_path
257
257
258 return [(k, v) for k, v in env.items()]
258 return [(k, v) for k, v in env.items()]
259
259
260
260
261 def repo_size(ui, repo, **kwargs):
261 def repo_size(ui, repo, **kwargs):
262 extras = _extras_from_ui(ui)
262 extras = _extras_from_ui(ui)
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
263 return _call_hook('repo_size', extras, HgMessageWriter(ui))
264
264
265
265
266 def pre_pull(ui, repo, **kwargs):
266 def pre_pull(ui, repo, **kwargs):
267 extras = _extras_from_ui(ui)
267 extras = _extras_from_ui(ui)
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
268 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
269
269
270
270
271 def pre_pull_ssh(ui, repo, **kwargs):
271 def pre_pull_ssh(ui, repo, **kwargs):
272 extras = _extras_from_ui(ui)
272 extras = _extras_from_ui(ui)
273 if extras and extras.get('SSH'):
273 if extras and extras.get('SSH'):
274 return pre_pull(ui, repo, **kwargs)
274 return pre_pull(ui, repo, **kwargs)
275 return 0
275 return 0
276
276
277
277
278 def post_pull(ui, repo, **kwargs):
278 def post_pull(ui, repo, **kwargs):
279 extras = _extras_from_ui(ui)
279 extras = _extras_from_ui(ui)
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
280 return _call_hook('post_pull', extras, HgMessageWriter(ui))
281
281
282
282
283 def post_pull_ssh(ui, repo, **kwargs):
283 def post_pull_ssh(ui, repo, **kwargs):
284 extras = _extras_from_ui(ui)
284 extras = _extras_from_ui(ui)
285 if extras and extras.get('SSH'):
285 if extras and extras.get('SSH'):
286 return post_pull(ui, repo, **kwargs)
286 return post_pull(ui, repo, **kwargs)
287 return 0
287 return 0
288
288
289
289
290 def pre_push(ui, repo, node=None, **kwargs):
290 def pre_push(ui, repo, node=None, **kwargs):
291 """
291 """
292 Mercurial pre_push hook
292 Mercurial pre_push hook
293 """
293 """
294 extras = _extras_from_ui(ui)
294 extras = _extras_from_ui(ui)
295 detect_force_push = extras.get('detect_force_push')
295 detect_force_push = extras.get('detect_force_push')
296
296
297 rev_data = []
297 rev_data = []
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
298 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
299 branches = collections.defaultdict(list)
299 branches = collections.defaultdict(list)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
300 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
301 for commit_id, branch in commits:
301 for commit_id, branch in commits:
302 branches[branch].append(commit_id)
302 branches[branch].append(commit_id)
303
303
304 for branch, commits in branches.items():
304 for branch, commits in branches.items():
305 old_rev = kwargs.get('node_last') or commits[0]
305 old_rev = kwargs.get('node_last') or commits[0]
306 rev_data.append({
306 rev_data.append({
307 'total_commits': len(commits),
307 'total_commits': len(commits),
308 'old_rev': old_rev,
308 'old_rev': old_rev,
309 'new_rev': commits[-1],
309 'new_rev': commits[-1],
310 'ref': '',
310 'ref': '',
311 'type': 'branch',
311 'type': 'branch',
312 'name': branch,
312 'name': branch,
313 })
313 })
314
314
315 for push_ref in rev_data:
315 for push_ref in rev_data:
316 push_ref['multiple_heads'] = _heads
316 push_ref['multiple_heads'] = _heads
317
317
318 repo_path = os.path.join(
318 repo_path = os.path.join(
319 extras.get('repo_store', ''), extras.get('repository', ''))
319 extras.get('repo_store', ''), extras.get('repository', ''))
320 push_ref['hg_env'] = _get_hg_env(
320 push_ref['hg_env'] = _get_hg_env(
321 old_rev=push_ref['old_rev'],
321 old_rev=push_ref['old_rev'],
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
322 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
323 repo_path=repo_path)
323 repo_path=repo_path)
324
324
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
325 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
326 extras['commit_ids'] = rev_data
326 extras['commit_ids'] = rev_data
327
327
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
328 return _call_hook('pre_push', extras, HgMessageWriter(ui))
329
329
330
330
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
331 def pre_push_ssh(ui, repo, node=None, **kwargs):
332 extras = _extras_from_ui(ui)
332 extras = _extras_from_ui(ui)
333 if extras.get('SSH'):
333 if extras.get('SSH'):
334 return pre_push(ui, repo, node, **kwargs)
334 return pre_push(ui, repo, node, **kwargs)
335
335
336 return 0
336 return 0
337
337
338
338
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
339 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
340 """
340 """
341 Mercurial pre_push hook for SSH
341 Mercurial pre_push hook for SSH
342 """
342 """
343 extras = _extras_from_ui(ui)
343 extras = _extras_from_ui(ui)
344 if extras.get('SSH'):
344 if extras.get('SSH'):
345 permission = extras['SSH_PERMISSIONS']
345 permission = extras['SSH_PERMISSIONS']
346
346
347 if 'repository.write' == permission or 'repository.admin' == permission:
347 if 'repository.write' == permission or 'repository.admin' == permission:
348 return 0
348 return 0
349
349
350 # non-zero ret code
350 # non-zero ret code
351 return 1
351 return 1
352
352
353 return 0
353 return 0
354
354
355
355
356 def post_push(ui, repo, node, **kwargs):
356 def post_push(ui, repo, node, **kwargs):
357 """
357 """
358 Mercurial post_push hook
358 Mercurial post_push hook
359 """
359 """
360 extras = _extras_from_ui(ui)
360 extras = _extras_from_ui(ui)
361
361
362 commit_ids = []
362 commit_ids = []
363 branches = []
363 branches = []
364 bookmarks = []
364 bookmarks = []
365 tags = []
365 tags = []
366
366
367 commits, _heads = _rev_range_hash(repo, node)
367 commits, _heads = _rev_range_hash(repo, node)
368 for commit_id, branch in commits:
368 for commit_id, branch in commits:
369 commit_ids.append(commit_id)
369 commit_ids.append(commit_id)
370 if branch not in branches:
370 if branch not in branches:
371 branches.append(branch)
371 branches.append(branch)
372
372
373 if hasattr(ui, '_rc_pushkey_branches'):
373 if hasattr(ui, '_rc_pushkey_branches'):
374 bookmarks = ui._rc_pushkey_branches
374 bookmarks = ui._rc_pushkey_branches
375
375
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
376 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
377 extras['commit_ids'] = commit_ids
377 extras['commit_ids'] = commit_ids
378 extras['new_refs'] = {
378 extras['new_refs'] = {
379 'branches': branches,
379 'branches': branches,
380 'bookmarks': bookmarks,
380 'bookmarks': bookmarks,
381 'tags': tags
381 'tags': tags
382 }
382 }
383
383
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
384 return _call_hook('post_push', extras, HgMessageWriter(ui))
385
385
386
386
387 def post_push_ssh(ui, repo, node, **kwargs):
387 def post_push_ssh(ui, repo, node, **kwargs):
388 """
388 """
389 Mercurial post_push hook for SSH
389 Mercurial post_push hook for SSH
390 """
390 """
391 if _extras_from_ui(ui).get('SSH'):
391 if _extras_from_ui(ui).get('SSH'):
392 return post_push(ui, repo, node, **kwargs)
392 return post_push(ui, repo, node, **kwargs)
393 return 0
393 return 0
394
394
395
395
396 def key_push(ui, repo, **kwargs):
396 def key_push(ui, repo, **kwargs):
397 from vcsserver.hgcompat import get_ctx
397 from vcsserver.hgcompat import get_ctx
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
398 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
399 # store new bookmarks in our UI object propagated later to post_push
399 # store new bookmarks in our UI object propagated later to post_push
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
400 ui._rc_pushkey_branches = get_ctx(repo, kwargs['key']).bookmarks()
401 return
401 return
402
402
403
403
404 # backward compat
404 # backward compat
405 log_pull_action = post_pull
405 log_pull_action = post_pull
406
406
407 # backward compat
407 # backward compat
408 log_push_action = post_push
408 log_push_action = post_push
409
409
410
410
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
411 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
412 """
412 """
413 Old hook name: keep here for backward compatibility.
413 Old hook name: keep here for backward compatibility.
414
414
415 This is only required when the installed git hooks are not upgraded.
415 This is only required when the installed git hooks are not upgraded.
416 """
416 """
417 pass
417 pass
418
418
419
419
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
420 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
421 """
421 """
422 Old hook name: keep here for backward compatibility.
422 Old hook name: keep here for backward compatibility.
423
423
424 This is only required when the installed git hooks are not upgraded.
424 This is only required when the installed git hooks are not upgraded.
425 """
425 """
426 pass
426 pass
427
427
428
428
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
429 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
430
430
431
431
432 def git_pre_pull(extras):
432 def git_pre_pull(extras):
433 """
433 """
434 Pre pull hook.
434 Pre pull hook.
435
435
436 :param extras: dictionary containing the keys defined in simplevcs
436 :param extras: dictionary containing the keys defined in simplevcs
437 :type extras: dict
437 :type extras: dict
438
438
439 :return: status code of the hook. 0 for success.
439 :return: status code of the hook. 0 for success.
440 :rtype: int
440 :rtype: int
441 """
441 """
442 if 'pull' not in extras['hooks']:
442 if 'pull' not in extras['hooks']:
443 return HookResponse(0, '')
443 return HookResponse(0, '')
444
444
445 stdout = io.BytesIO()
445 stdout = io.BytesIO()
446 try:
446 try:
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
447 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
448 except Exception as error:
448 except Exception as error:
449 status = 128
449 status = 128
450 stdout.write('ERROR: %s\n' % str(error))
450 stdout.write('ERROR: %s\n' % str(error))
451
451
452 return HookResponse(status, stdout.getvalue())
452 return HookResponse(status, stdout.getvalue())
453
453
454
454
455 def git_post_pull(extras):
455 def git_post_pull(extras):
456 """
456 """
457 Post pull hook.
457 Post pull hook.
458
458
459 :param extras: dictionary containing the keys defined in simplevcs
459 :param extras: dictionary containing the keys defined in simplevcs
460 :type extras: dict
460 :type extras: dict
461
461
462 :return: status code of the hook. 0 for success.
462 :return: status code of the hook. 0 for success.
463 :rtype: int
463 :rtype: int
464 """
464 """
465 if 'pull' not in extras['hooks']:
465 if 'pull' not in extras['hooks']:
466 return HookResponse(0, '')
466 return HookResponse(0, '')
467
467
468 stdout = io.BytesIO()
468 stdout = io.BytesIO()
469 try:
469 try:
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
470 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
471 except Exception as error:
471 except Exception as error:
472 status = 128
472 status = 128
473 stdout.write('ERROR: %s\n' % error)
473 stdout.write('ERROR: %s\n' % error)
474
474
475 return HookResponse(status, stdout.getvalue())
475 return HookResponse(status, stdout.getvalue())
476
476
477
477
478 def _parse_git_ref_lines(revision_lines):
478 def _parse_git_ref_lines(revision_lines):
479 rev_data = []
479 rev_data = []
480 for revision_line in revision_lines or []:
480 for revision_line in revision_lines or []:
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
481 old_rev, new_rev, ref = revision_line.strip().split(' ')
482 ref_data = ref.split('/', 2)
482 ref_data = ref.split('/', 2)
483 if ref_data[1] in ('tags', 'heads'):
483 if ref_data[1] in ('tags', 'heads'):
484 rev_data.append({
484 rev_data.append({
485 # NOTE(marcink):
485 # NOTE(marcink):
486 # we're unable to tell total_commits for git at this point
486 # we're unable to tell total_commits for git at this point
487 # but we set the variable for consistency with GIT
487 # but we set the variable for consistency with GIT
488 'total_commits': -1,
488 'total_commits': -1,
489 'old_rev': old_rev,
489 'old_rev': old_rev,
490 'new_rev': new_rev,
490 'new_rev': new_rev,
491 'ref': ref,
491 'ref': ref,
492 'type': ref_data[1],
492 'type': ref_data[1],
493 'name': ref_data[2],
493 'name': ref_data[2],
494 })
494 })
495 return rev_data
495 return rev_data
496
496
497
497
498 def git_pre_receive(unused_repo_path, revision_lines, env):
498 def git_pre_receive(unused_repo_path, revision_lines, env):
499 """
499 """
500 Pre push hook.
500 Pre push hook.
501
501
502 :param extras: dictionary containing the keys defined in simplevcs
502 :param extras: dictionary containing the keys defined in simplevcs
503 :type extras: dict
503 :type extras: dict
504
504
505 :return: status code of the hook. 0 for success.
505 :return: status code of the hook. 0 for success.
506 :rtype: int
506 :rtype: int
507 """
507 """
508 extras = json.loads(env['RC_SCM_DATA'])
508 extras = json.loads(env['RC_SCM_DATA'])
509 rev_data = _parse_git_ref_lines(revision_lines)
509 rev_data = _parse_git_ref_lines(revision_lines)
510 if 'push' not in extras['hooks']:
510 if 'push' not in extras['hooks']:
511 return 0
511 return 0
512 empty_commit_id = '0' * 40
512 empty_commit_id = '0' * 40
513
513
514 detect_force_push = extras.get('detect_force_push')
514 detect_force_push = extras.get('detect_force_push')
515
515
516 for push_ref in rev_data:
516 for push_ref in rev_data:
517 # store our git-env which holds the temp store
517 # store our git-env which holds the temp store
518 push_ref['git_env'] = _get_git_env()
518 push_ref['git_env'] = _get_git_env()
519 push_ref['pruned_sha'] = ''
519 push_ref['pruned_sha'] = ''
520 if not detect_force_push:
520 if not detect_force_push:
521 # don't check for forced-push when we don't need to
521 # don't check for forced-push when we don't need to
522 continue
522 continue
523
523
524 type_ = push_ref['type']
524 type_ = push_ref['type']
525 new_branch = push_ref['old_rev'] == empty_commit_id
525 new_branch = push_ref['old_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
526 delete_branch = push_ref['new_rev'] == empty_commit_id
527 if type_ == 'heads' and not (new_branch or delete_branch):
527 if type_ == 'heads' and not (new_branch or delete_branch):
528 old_rev = push_ref['old_rev']
528 old_rev = push_ref['old_rev']
529 new_rev = push_ref['new_rev']
529 new_rev = push_ref['new_rev']
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
530 cmd = [settings.GIT_EXECUTABLE, 'rev-list', old_rev, '^{}'.format(new_rev)]
531 stdout, stderr = subprocessio.run_command(
531 stdout, stderr = subprocessio.run_command(
532 cmd, env=os.environ.copy())
532 cmd, env=os.environ.copy())
533 # means we're having some non-reachable objects, this forced push was used
533 # means we're having some non-reachable objects, this forced push was used
534 if stdout:
534 if stdout:
535 push_ref['pruned_sha'] = stdout.splitlines()
535 push_ref['pruned_sha'] = stdout.splitlines()
536
536
537 extras['hook_type'] = 'pre_receive'
537 extras['hook_type'] = 'pre_receive'
538 extras['commit_ids'] = rev_data
538 extras['commit_ids'] = rev_data
539 return _call_hook('pre_push', extras, GitMessageWriter())
539 return _call_hook('pre_push', extras, GitMessageWriter())
540
540
541
541
542 def git_post_receive(unused_repo_path, revision_lines, env):
542 def git_post_receive(unused_repo_path, revision_lines, env):
543 """
543 """
544 Post push hook.
544 Post push hook.
545
545
546 :param extras: dictionary containing the keys defined in simplevcs
546 :param extras: dictionary containing the keys defined in simplevcs
547 :type extras: dict
547 :type extras: dict
548
548
549 :return: status code of the hook. 0 for success.
549 :return: status code of the hook. 0 for success.
550 :rtype: int
550 :rtype: int
551 """
551 """
552 extras = json.loads(env['RC_SCM_DATA'])
552 extras = json.loads(env['RC_SCM_DATA'])
553 if 'push' not in extras['hooks']:
553 if 'push' not in extras['hooks']:
554 return 0
554 return 0
555
555
556 rev_data = _parse_git_ref_lines(revision_lines)
556 rev_data = _parse_git_ref_lines(revision_lines)
557
557
558 git_revs = []
558 git_revs = []
559
559
560 # N.B.(skreft): it is ok to just call git, as git before calling a
560 # N.B.(skreft): it is ok to just call git, as git before calling a
561 # subcommand sets the PATH environment variable so that it point to the
561 # subcommand sets the PATH environment variable so that it point to the
562 # correct version of the git executable.
562 # correct version of the git executable.
563 empty_commit_id = '0' * 40
563 empty_commit_id = '0' * 40
564 branches = []
564 branches = []
565 tags = []
565 tags = []
566 for push_ref in rev_data:
566 for push_ref in rev_data:
567 type_ = push_ref['type']
567 type_ = push_ref['type']
568
568
569 if type_ == 'heads':
569 if type_ == 'heads':
570 if push_ref['old_rev'] == empty_commit_id:
570 if push_ref['old_rev'] == empty_commit_id:
571 # starting new branch case
571 # starting new branch case
572 if push_ref['name'] not in branches:
572 if push_ref['name'] not in branches:
573 branches.append(push_ref['name'])
573 branches.append(push_ref['name'])
574
574
575 # Fix up head revision if needed
575 # Fix up head revision if needed
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
576 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
577 try:
577 try:
578 subprocessio.run_command(cmd, env=os.environ.copy())
578 subprocessio.run_command(cmd, env=os.environ.copy())
579 except Exception:
579 except Exception:
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
580 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
581 'refs/heads/%s' % push_ref['name']]
581 'refs/heads/%s' % push_ref['name']]
582 print("Setting default branch to %s" % push_ref['name'])
582 print("Setting default branch to %s" % push_ref['name'])
583 subprocessio.run_command(cmd, env=os.environ.copy())
583 subprocessio.run_command(cmd, env=os.environ.copy())
584
584
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
585 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
586 '--format=%(refname)', 'refs/heads/*']
586 '--format=%(refname)', 'refs/heads/*']
587 stdout, stderr = subprocessio.run_command(
587 stdout, stderr = subprocessio.run_command(
588 cmd, env=os.environ.copy())
588 cmd, env=os.environ.copy())
589 heads = stdout
589 heads = stdout
590 heads = heads.replace(push_ref['ref'], '')
590 heads = heads.replace(push_ref['ref'], '')
591 heads = ' '.join(head for head
591 heads = ' '.join(head for head
592 in heads.splitlines() if head) or '.'
592 in heads.splitlines() if head) or '.'
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
593 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
594 '--pretty=format:%H', '--', push_ref['new_rev'],
594 '--pretty=format:%H', '--', push_ref['new_rev'],
595 '--not', heads]
595 '--not', heads]
596 stdout, stderr = subprocessio.run_command(
596 stdout, stderr = subprocessio.run_command(
597 cmd, env=os.environ.copy())
597 cmd, env=os.environ.copy())
598 git_revs.extend(stdout.splitlines())
598 git_revs.extend(stdout.splitlines())
599 elif push_ref['new_rev'] == empty_commit_id:
599 elif push_ref['new_rev'] == empty_commit_id:
600 # delete branch case
600 # delete branch case
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
601 git_revs.append('delete_branch=>%s' % push_ref['name'])
602 else:
602 else:
603 if push_ref['name'] not in branches:
603 if push_ref['name'] not in branches:
604 branches.append(push_ref['name'])
604 branches.append(push_ref['name'])
605
605
606 cmd = [settings.GIT_EXECUTABLE, 'log',
606 cmd = [settings.GIT_EXECUTABLE, 'log',
607 '{old_rev}..{new_rev}'.format(**push_ref),
607 '{old_rev}..{new_rev}'.format(**push_ref),
608 '--reverse', '--pretty=format:%H']
608 '--reverse', '--pretty=format:%H']
609 stdout, stderr = subprocessio.run_command(
609 stdout, stderr = subprocessio.run_command(
610 cmd, env=os.environ.copy())
610 cmd, env=os.environ.copy())
611 git_revs.extend(stdout.splitlines())
611 git_revs.extend(stdout.splitlines())
612 elif type_ == 'tags':
612 elif type_ == 'tags':
613 if push_ref['name'] not in tags:
613 if push_ref['name'] not in tags:
614 tags.append(push_ref['name'])
614 tags.append(push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
615 git_revs.append('tag=>%s' % push_ref['name'])
616
616
617 extras['hook_type'] = 'post_receive'
617 extras['hook_type'] = 'post_receive'
618 extras['commit_ids'] = git_revs
618 extras['commit_ids'] = git_revs
619 extras['new_refs'] = {
619 extras['new_refs'] = {
620 'branches': branches,
620 'branches': branches,
621 'bookmarks': [],
621 'bookmarks': [],
622 'tags': tags,
622 'tags': tags,
623 }
623 }
624
624
625 if 'repo_size' in extras['hooks']:
625 if 'repo_size' in extras['hooks']:
626 try:
626 try:
627 _call_hook('repo_size', extras, GitMessageWriter())
627 _call_hook('repo_size', extras, GitMessageWriter())
628 except:
628 except:
629 pass
629 pass
630
630
631 return _call_hook('post_push', extras, GitMessageWriter())
631 return _call_hook('post_push', extras, GitMessageWriter())
632
632
633
633
634 def _get_extras_from_txn_id(path, txn_id):
634 def _get_extras_from_txn_id(path, txn_id):
635 extras = {}
635 extras = {}
636 try:
636 try:
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
637 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
638 '-t', txn_id,
638 '-t', txn_id,
639 '--revprop', path, 'rc-scm-extras']
639 '--revprop', path, 'rc-scm-extras']
640 stdout, stderr = subprocessio.run_command(
640 stdout, stderr = subprocessio.run_command(
641 cmd, env=os.environ.copy())
641 cmd, env=os.environ.copy())
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
642 extras = json.loads(base64.urlsafe_b64decode(stdout))
643 except Exception:
643 except Exception:
644 log.exception('Failed to extract extras info from txn_id')
644 log.exception('Failed to extract extras info from txn_id')
645
645
646 return extras
646 return extras
647
647
648
648
649 def _get_extras_from_commit_id(commit_id, path):
649 def _get_extras_from_commit_id(commit_id, path):
650 extras = {}
650 extras = {}
651 try:
651 try:
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
652 cmd = [settings.SVNLOOK_EXECUTABLE, 'pget',
653 '-r', commit_id,
653 '-r', commit_id,
654 '--revprop', path, 'rc-scm-extras']
654 '--revprop', path, 'rc-scm-extras']
655 stdout, stderr = subprocessio.run_command(
655 stdout, stderr = subprocessio.run_command(
656 cmd, env=os.environ.copy())
656 cmd, env=os.environ.copy())
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
657 extras = json.loads(base64.urlsafe_b64decode(stdout))
658 except Exception:
658 except Exception:
659 log.exception('Failed to extract extras info from commit_id')
659 log.exception('Failed to extract extras info from commit_id')
660
660
661 return extras
661 return extras
662
662
663
663
664 def svn_pre_commit(repo_path, commit_data, env):
664 def svn_pre_commit(repo_path, commit_data, env):
665 path, txn_id = commit_data
665 path, txn_id = commit_data
666 branches = []
666 branches = []
667 tags = []
667 tags = []
668
668
669 if env.get('RC_SCM_DATA'):
669 if env.get('RC_SCM_DATA'):
670 extras = json.loads(env['RC_SCM_DATA'])
670 extras = json.loads(env['RC_SCM_DATA'])
671 else:
671 else:
672 # fallback method to read from TXN-ID stored data
672 # fallback method to read from TXN-ID stored data
673 extras = _get_extras_from_txn_id(path, txn_id)
673 extras = _get_extras_from_txn_id(path, txn_id)
674 if not extras:
674 if not extras:
675 return 0
675 return 0
676
676
677 extras['hook_type'] = 'pre_commit'
677 extras['hook_type'] = 'pre_commit'
678 extras['commit_ids'] = [txn_id]
678 extras['commit_ids'] = [txn_id]
679 extras['txn_id'] = txn_id
679 extras['txn_id'] = txn_id
680 extras['new_refs'] = {
680 extras['new_refs'] = {
681 'total_commits': 1,
681 'total_commits': 1,
682 'branches': branches,
682 'branches': branches,
683 'bookmarks': [],
683 'bookmarks': [],
684 'tags': tags,
684 'tags': tags,
685 }
685 }
686
686
687 return _call_hook('pre_push', extras, SvnMessageWriter())
687 return _call_hook('pre_push', extras, SvnMessageWriter())
688
688
689
689
690 def svn_post_commit(repo_path, commit_data, env):
690 def svn_post_commit(repo_path, commit_data, env):
691 """
691 """
692 commit_data is path, rev, txn_id
692 commit_data is path, rev, txn_id
693 """
693 """
694 if len(commit_data) == 3:
694 if len(commit_data) == 3:
695 path, commit_id, txn_id = commit_data
695 path, commit_id, txn_id = commit_data
696 elif len(commit_data) == 2:
696 elif len(commit_data) == 2:
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
697 log.error('Failed to extract txn_id from commit_data using legacy method. '
698 'Some functionality might be limited')
698 'Some functionality might be limited')
699 path, commit_id = commit_data
699 path, commit_id = commit_data
700 txn_id = None
700 txn_id = None
701
701
702 branches = []
702 branches = []
703 tags = []
703 tags = []
704
704
705 if env.get('RC_SCM_DATA'):
705 if env.get('RC_SCM_DATA'):
706 extras = json.loads(env['RC_SCM_DATA'])
706 extras = json.loads(env['RC_SCM_DATA'])
707 else:
707 else:
708 # fallback method to read from TXN-ID stored data
708 # fallback method to read from TXN-ID stored data
709 extras = _get_extras_from_commit_id(commit_id, path)
709 extras = _get_extras_from_commit_id(commit_id, path)
710 if not extras:
710 if not extras:
711 return 0
711 return 0
712
712
713 extras['hook_type'] = 'post_commit'
713 extras['hook_type'] = 'post_commit'
714 extras['commit_ids'] = [commit_id]
714 extras['commit_ids'] = [commit_id]
715 extras['txn_id'] = txn_id
715 extras['txn_id'] = txn_id
716 extras['new_refs'] = {
716 extras['new_refs'] = {
717 'branches': branches,
717 'branches': branches,
718 'bookmarks': [],
718 'bookmarks': [],
719 'tags': tags,
719 'tags': tags,
720 'total_commits': 1,
720 'total_commits': 1,
721 }
721 }
722
722
723 if 'repo_size' in extras['hooks']:
723 if 'repo_size' in extras['hooks']:
724 try:
724 try:
725 _call_hook('repo_size', extras, SvnMessageWriter())
725 _call_hook('repo_size', extras, SvnMessageWriter())
726 except Exception:
726 except Exception:
727 pass
727 pass
728
728
729 return _call_hook('post_push', extras, SvnMessageWriter())
729 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,705 +1,705 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import base64
20 import base64
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import wsgiref.util
24 import wsgiref.util
25 import traceback
25 import traceback
26 import tempfile
26 import tempfile
27 import psutil
27 import psutil
28 from itertools import chain
28 from itertools import chain
29 from cStringIO import StringIO
29 from cStringIO import StringIO
30
30
31 import simplejson as json
31 import simplejson as json
32 import msgpack
32 import msgpack
33 from pyramid.config import Configurator
33 from pyramid.config import Configurator
34 from pyramid.settings import asbool, aslist
34 from pyramid.settings import asbool, aslist
35 from pyramid.wsgi import wsgiapp
35 from pyramid.wsgi import wsgiapp
36 from pyramid.compat import configparser
36 from pyramid.compat import configparser
37 from pyramid.response import Response
37 from pyramid.response import Response
38
38
39 from vcsserver.utils import safe_int
39 from vcsserver.utils import safe_int
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
43 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
44 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
44 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
45
45
46 try:
46 try:
47 locale.setlocale(locale.LC_ALL, '')
47 locale.setlocale(locale.LC_ALL, '')
48 except locale.Error as e:
48 except locale.Error as e:
49 log.error(
49 log.error(
50 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
50 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
51 os.environ['LC_ALL'] = 'C'
51 os.environ['LC_ALL'] = 'C'
52
52
53 import vcsserver
53 import vcsserver
54 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
54 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
55 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
55 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
56 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
56 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
57 from vcsserver.echo_stub.echo_app import EchoApp
57 from vcsserver.echo_stub.echo_app import EchoApp
58 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
58 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
59 from vcsserver.lib.exc_tracking import store_exception
59 from vcsserver.lib.exc_tracking import store_exception
60 from vcsserver.server import VcsServer
60 from vcsserver.server import VcsServer
61
61
62 try:
62 try:
63 from vcsserver.git import GitFactory, GitRemote
63 from vcsserver.git import GitFactory, GitRemote
64 except ImportError:
64 except ImportError:
65 GitFactory = None
65 GitFactory = None
66 GitRemote = None
66 GitRemote = None
67
67
68 try:
68 try:
69 from vcsserver.hg import MercurialFactory, HgRemote
69 from vcsserver.hg import MercurialFactory, HgRemote
70 except ImportError:
70 except ImportError:
71 MercurialFactory = None
71 MercurialFactory = None
72 HgRemote = None
72 HgRemote = None
73
73
74 try:
74 try:
75 from vcsserver.svn import SubversionFactory, SvnRemote
75 from vcsserver.svn import SubversionFactory, SvnRemote
76 except ImportError:
76 except ImportError:
77 SubversionFactory = None
77 SubversionFactory = None
78 SvnRemote = None
78 SvnRemote = None
79
79
80
80
81 def _is_request_chunked(environ):
81 def _is_request_chunked(environ):
82 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
82 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
83 return stream
83 return stream
84
84
85
85
86 def _int_setting(settings, name, default):
86 def _int_setting(settings, name, default):
87 settings[name] = int(settings.get(name, default))
87 settings[name] = int(settings.get(name, default))
88 return settings[name]
88 return settings[name]
89
89
90
90
91 def _bool_setting(settings, name, default):
91 def _bool_setting(settings, name, default):
92 input_val = settings.get(name, default)
92 input_val = settings.get(name, default)
93 if isinstance(input_val, unicode):
93 if isinstance(input_val, unicode):
94 input_val = input_val.encode('utf8')
94 input_val = input_val.encode('utf8')
95 settings[name] = asbool(input_val)
95 settings[name] = asbool(input_val)
96 return settings[name]
96 return settings[name]
97
97
98
98
99 def _list_setting(settings, name, default):
99 def _list_setting(settings, name, default):
100 raw_value = settings.get(name, default)
100 raw_value = settings.get(name, default)
101
101
102 # Otherwise we assume it uses pyramids space/newline separation.
102 # Otherwise we assume it uses pyramids space/newline separation.
103 settings[name] = aslist(raw_value)
103 settings[name] = aslist(raw_value)
104 return settings[name]
104 return settings[name]
105
105
106
106
107 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
107 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
108 value = settings.get(name, default)
108 value = settings.get(name, default)
109
109
110 if default_when_empty and not value:
110 if default_when_empty and not value:
111 # use default value when value is empty
111 # use default value when value is empty
112 value = default
112 value = default
113
113
114 if lower:
114 if lower:
115 value = value.lower()
115 value = value.lower()
116 settings[name] = value
116 settings[name] = value
117 return settings[name]
117 return settings[name]
118
118
119
119
120 def log_max_fd():
120 def log_max_fd():
121 try:
121 try:
122 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
122 maxfd = psutil.Process().rlimit(psutil.RLIMIT_NOFILE)[1]
123 log.info('Max file descriptors value: %s', maxfd)
123 log.info('Max file descriptors value: %s', maxfd)
124 except Exception:
124 except Exception:
125 pass
125 pass
126
126
127
127
128 class VCS(object):
128 class VCS(object):
129 def __init__(self, locale_conf=None, cache_config=None):
129 def __init__(self, locale_conf=None, cache_config=None):
130 self.locale = locale_conf
130 self.locale = locale_conf
131 self.cache_config = cache_config
131 self.cache_config = cache_config
132 self._configure_locale()
132 self._configure_locale()
133
133
134 log_max_fd()
134 log_max_fd()
135
135
136 if GitFactory and GitRemote:
136 if GitFactory and GitRemote:
137 git_factory = GitFactory()
137 git_factory = GitFactory()
138 self._git_remote = GitRemote(git_factory)
138 self._git_remote = GitRemote(git_factory)
139 else:
139 else:
140 log.info("Git client import failed")
140 log.info("Git client import failed")
141
141
142 if MercurialFactory and HgRemote:
142 if MercurialFactory and HgRemote:
143 hg_factory = MercurialFactory()
143 hg_factory = MercurialFactory()
144 self._hg_remote = HgRemote(hg_factory)
144 self._hg_remote = HgRemote(hg_factory)
145 else:
145 else:
146 log.info("Mercurial client import failed")
146 log.info("Mercurial client import failed")
147
147
148 if SubversionFactory and SvnRemote:
148 if SubversionFactory and SvnRemote:
149 svn_factory = SubversionFactory()
149 svn_factory = SubversionFactory()
150
150
151 # hg factory is used for svn url validation
151 # hg factory is used for svn url validation
152 hg_factory = MercurialFactory()
152 hg_factory = MercurialFactory()
153 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
153 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
154 else:
154 else:
155 log.info("Subversion client import failed")
155 log.info("Subversion client import failed")
156
156
157 self._vcsserver = VcsServer()
157 self._vcsserver = VcsServer()
158
158
159 def _configure_locale(self):
159 def _configure_locale(self):
160 if self.locale:
160 if self.locale:
161 log.info('Settings locale: `LC_ALL` to %s', self.locale)
161 log.info('Settings locale: `LC_ALL` to %s', self.locale)
162 else:
162 else:
163 log.info(
163 log.info(
164 'Configuring locale subsystem based on environment variables')
164 'Configuring locale subsystem based on environment variables')
165 try:
165 try:
166 # If self.locale is the empty string, then the locale
166 # If self.locale is the empty string, then the locale
167 # module will use the environment variables. See the
167 # module will use the environment variables. See the
168 # documentation of the package `locale`.
168 # documentation of the package `locale`.
169 locale.setlocale(locale.LC_ALL, self.locale)
169 locale.setlocale(locale.LC_ALL, self.locale)
170
170
171 language_code, encoding = locale.getlocale()
171 language_code, encoding = locale.getlocale()
172 log.info(
172 log.info(
173 'Locale set to language code "%s" with encoding "%s".',
173 'Locale set to language code "%s" with encoding "%s".',
174 language_code, encoding)
174 language_code, encoding)
175 except locale.Error:
175 except locale.Error:
176 log.exception(
176 log.exception(
177 'Cannot set locale, not configuring the locale system')
177 'Cannot set locale, not configuring the locale system')
178
178
179
179
180 class WsgiProxy(object):
180 class WsgiProxy(object):
181 def __init__(self, wsgi):
181 def __init__(self, wsgi):
182 self.wsgi = wsgi
182 self.wsgi = wsgi
183
183
184 def __call__(self, environ, start_response):
184 def __call__(self, environ, start_response):
185 input_data = environ['wsgi.input'].read()
185 input_data = environ['wsgi.input'].read()
186 input_data = msgpack.unpackb(input_data)
186 input_data = msgpack.unpackb(input_data)
187
187
188 error = None
188 error = None
189 try:
189 try:
190 data, status, headers = self.wsgi.handle(
190 data, status, headers = self.wsgi.handle(
191 input_data['environment'], input_data['input_data'],
191 input_data['environment'], input_data['input_data'],
192 *input_data['args'], **input_data['kwargs'])
192 *input_data['args'], **input_data['kwargs'])
193 except Exception as e:
193 except Exception as e:
194 data, status, headers = [], None, None
194 data, status, headers = [], None, None
195 error = {
195 error = {
196 'message': str(e),
196 'message': str(e),
197 '_vcs_kind': getattr(e, '_vcs_kind', None)
197 '_vcs_kind': getattr(e, '_vcs_kind', None)
198 }
198 }
199
199
200 start_response(200, {})
200 start_response(200, {})
201 return self._iterator(error, status, headers, data)
201 return self._iterator(error, status, headers, data)
202
202
203 def _iterator(self, error, status, headers, data):
203 def _iterator(self, error, status, headers, data):
204 initial_data = [
204 initial_data = [
205 error,
205 error,
206 status,
206 status,
207 headers,
207 headers,
208 ]
208 ]
209
209
210 for d in chain(initial_data, data):
210 for d in chain(initial_data, data):
211 yield msgpack.packb(d)
211 yield msgpack.packb(d)
212
212
213
213
214 def not_found(request):
214 def not_found(request):
215 return {'status': '404 NOT FOUND'}
215 return {'status': '404 NOT FOUND'}
216
216
217
217
218 class VCSViewPredicate(object):
218 class VCSViewPredicate(object):
219 def __init__(self, val, config):
219 def __init__(self, val, config):
220 self.remotes = val
220 self.remotes = val
221
221
222 def text(self):
222 def text(self):
223 return 'vcs view method = %s' % (self.remotes.keys(),)
223 return 'vcs view method = %s' % (self.remotes.keys(),)
224
224
225 phash = text
225 phash = text
226
226
227 def __call__(self, context, request):
227 def __call__(self, context, request):
228 """
228 """
229 View predicate that returns true if given backend is supported by
229 View predicate that returns true if given backend is supported by
230 defined remotes.
230 defined remotes.
231 """
231 """
232 backend = request.matchdict.get('backend')
232 backend = request.matchdict.get('backend')
233 return backend in self.remotes
233 return backend in self.remotes
234
234
235
235
236 class HTTPApplication(object):
236 class HTTPApplication(object):
237 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
237 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
238
238
239 remote_wsgi = remote_wsgi
239 remote_wsgi = remote_wsgi
240 _use_echo_app = False
240 _use_echo_app = False
241
241
242 def __init__(self, settings=None, global_config=None):
242 def __init__(self, settings=None, global_config=None):
243 self._sanitize_settings_and_apply_defaults(settings)
243 self._sanitize_settings_and_apply_defaults(settings)
244
244
245 self.config = Configurator(settings=settings)
245 self.config = Configurator(settings=settings)
246 self.global_config = global_config
246 self.global_config = global_config
247 self.config.include('vcsserver.lib.rc_cache')
247 self.config.include('vcsserver.lib.rc_cache')
248
248
249 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
249 settings_locale = settings.get('locale', '') or 'en_US.UTF-8'
250 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
250 vcs = VCS(locale_conf=settings_locale, cache_config=settings)
251 self._remotes = {
251 self._remotes = {
252 'hg': vcs._hg_remote,
252 'hg': vcs._hg_remote,
253 'git': vcs._git_remote,
253 'git': vcs._git_remote,
254 'svn': vcs._svn_remote,
254 'svn': vcs._svn_remote,
255 'server': vcs._vcsserver,
255 'server': vcs._vcsserver,
256 }
256 }
257 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
257 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
258 self._use_echo_app = True
258 self._use_echo_app = True
259 log.warning("Using EchoApp for VCS operations.")
259 log.warning("Using EchoApp for VCS operations.")
260 self.remote_wsgi = remote_wsgi_stub
260 self.remote_wsgi = remote_wsgi_stub
261
261
262 self._configure_settings(global_config, settings)
262 self._configure_settings(global_config, settings)
263
263
264 self._configure()
264 self._configure()
265
265
266 def _configure_settings(self, global_config, app_settings):
266 def _configure_settings(self, global_config, app_settings):
267 """
267 """
268 Configure the settings module.
268 Configure the settings module.
269 """
269 """
270 settings_merged = global_config.copy()
270 settings_merged = global_config.copy()
271 settings_merged.update(app_settings)
271 settings_merged.update(app_settings)
272
272
273 git_path = app_settings.get('git_path', None)
273 git_path = app_settings.get('git_path', None)
274 if git_path:
274 if git_path:
275 settings.GIT_EXECUTABLE = git_path
275 settings.GIT_EXECUTABLE = git_path
276 binary_dir = app_settings.get('core.binary_dir', None)
276 binary_dir = app_settings.get('core.binary_dir', None)
277 if binary_dir:
277 if binary_dir:
278 settings.BINARY_DIR = binary_dir
278 settings.BINARY_DIR = binary_dir
279
279
280 # Store the settings to make them available to other modules.
280 # Store the settings to make them available to other modules.
281 vcsserver.PYRAMID_SETTINGS = settings_merged
281 vcsserver.PYRAMID_SETTINGS = settings_merged
282 vcsserver.CONFIG = settings_merged
282 vcsserver.CONFIG = settings_merged
283
283
284 def _sanitize_settings_and_apply_defaults(self, settings):
284 def _sanitize_settings_and_apply_defaults(self, settings):
285 temp_store = tempfile.gettempdir()
285 temp_store = tempfile.gettempdir()
286 default_cache_dir = os.path.join(temp_store, 'rc_cache')
286 default_cache_dir = os.path.join(temp_store, 'rc_cache')
287
287
288 # save default, cache dir, and use it for all backends later.
288 # save default, cache dir, and use it for all backends later.
289 default_cache_dir = _string_setting(
289 default_cache_dir = _string_setting(
290 settings,
290 settings,
291 'cache_dir',
291 'cache_dir',
292 default_cache_dir, lower=False, default_when_empty=True)
292 default_cache_dir, lower=False, default_when_empty=True)
293
293
294 # ensure we have our dir created
294 # ensure we have our dir created
295 if not os.path.isdir(default_cache_dir):
295 if not os.path.isdir(default_cache_dir):
296 os.makedirs(default_cache_dir, mode=0o755)
296 os.makedirs(default_cache_dir, mode=0o755)
297
297
298 # exception store cache
298 # exception store cache
299 _string_setting(
299 _string_setting(
300 settings,
300 settings,
301 'exception_tracker.store_path',
301 'exception_tracker.store_path',
302 temp_store, lower=False, default_when_empty=True)
302 temp_store, lower=False, default_when_empty=True)
303
303
304 # repo_object cache
304 # repo_object cache
305 _string_setting(
305 _string_setting(
306 settings,
306 settings,
307 'rc_cache.repo_object.backend',
307 'rc_cache.repo_object.backend',
308 'dogpile.cache.rc.file_namespace', lower=False)
308 'dogpile.cache.rc.file_namespace', lower=False)
309 _int_setting(
309 _int_setting(
310 settings,
310 settings,
311 'rc_cache.repo_object.expiration_time',
311 'rc_cache.repo_object.expiration_time',
312 30 * 24 * 60 * 60)
312 30 * 24 * 60 * 60)
313 _string_setting(
313 _string_setting(
314 settings,
314 settings,
315 'rc_cache.repo_object.arguments.filename',
315 'rc_cache.repo_object.arguments.filename',
316 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
316 os.path.join(default_cache_dir, 'vcsserver_cache_1'), lower=False)
317
317
318 def _configure(self):
318 def _configure(self):
319 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
319 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
320
320
321 self.config.add_route('service', '/_service')
321 self.config.add_route('service', '/_service')
322 self.config.add_route('status', '/status')
322 self.config.add_route('status', '/status')
323 self.config.add_route('hg_proxy', '/proxy/hg')
323 self.config.add_route('hg_proxy', '/proxy/hg')
324 self.config.add_route('git_proxy', '/proxy/git')
324 self.config.add_route('git_proxy', '/proxy/git')
325
325
326 # rpc methods
326 # rpc methods
327 self.config.add_route('vcs', '/{backend}')
327 self.config.add_route('vcs', '/{backend}')
328
328
329 # streaming rpc remote methods
329 # streaming rpc remote methods
330 self.config.add_route('vcs_stream', '/{backend}/stream')
330 self.config.add_route('vcs_stream', '/{backend}/stream')
331
331
332 # vcs operations clone/push as streaming
332 # vcs operations clone/push as streaming
333 self.config.add_route('stream_git', '/stream/git/*repo_name')
333 self.config.add_route('stream_git', '/stream/git/*repo_name')
334 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
334 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
335
335
336 self.config.add_view(self.status_view, route_name='status', renderer='json')
336 self.config.add_view(self.status_view, route_name='status', renderer='json')
337 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
337 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
338
338
339 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
339 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
340 self.config.add_view(self.git_proxy(), route_name='git_proxy')
340 self.config.add_view(self.git_proxy(), route_name='git_proxy')
341 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
341 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
342 vcs_view=self._remotes)
342 vcs_view=self._remotes)
343 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
343 self.config.add_view(self.vcs_stream_view, route_name='vcs_stream',
344 vcs_view=self._remotes)
344 vcs_view=self._remotes)
345
345
346 self.config.add_view(self.hg_stream(), route_name='stream_hg')
346 self.config.add_view(self.hg_stream(), route_name='stream_hg')
347 self.config.add_view(self.git_stream(), route_name='stream_git')
347 self.config.add_view(self.git_stream(), route_name='stream_git')
348
348
349 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
349 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
350
350
351 self.config.add_notfound_view(not_found, renderer='json')
351 self.config.add_notfound_view(not_found, renderer='json')
352
352
353 self.config.add_view(self.handle_vcs_exception, context=Exception)
353 self.config.add_view(self.handle_vcs_exception, context=Exception)
354
354
355 self.config.add_tween(
355 self.config.add_tween(
356 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
356 'vcsserver.tweens.request_wrapper.RequestWrapperTween',
357 )
357 )
358 self.config.add_request_method(
358 self.config.add_request_method(
359 'vcsserver.lib.request_counter.get_request_counter',
359 'vcsserver.lib.request_counter.get_request_counter',
360 'request_count')
360 'request_count')
361
361
362 self.config.add_request_method(
362 self.config.add_request_method(
363 'vcsserver.lib._vendor.statsd.get_statsd_client',
363 'vcsserver.lib._vendor.statsd.get_statsd_client',
364 'statsd', reify=True)
364 'statsd', reify=True)
365
365
366 def wsgi_app(self):
366 def wsgi_app(self):
367 return self.config.make_wsgi_app()
367 return self.config.make_wsgi_app()
368
368
369 def _vcs_view_params(self, request):
369 def _vcs_view_params(self, request):
370 remote = self._remotes[request.matchdict['backend']]
370 remote = self._remotes[request.matchdict['backend']]
371 payload = msgpack.unpackb(request.body, use_list=True)
371 payload = msgpack.unpackb(request.body, use_list=True)
372 method = payload.get('method')
372 method = payload.get('method')
373 params = payload['params']
373 params = payload['params']
374 wire = params.get('wire')
374 wire = params.get('wire')
375 args = params.get('args')
375 args = params.get('args')
376 kwargs = params.get('kwargs')
376 kwargs = params.get('kwargs')
377 context_uid = None
377 context_uid = None
378
378
379 if wire:
379 if wire:
380 try:
380 try:
381 wire['context'] = context_uid = uuid.UUID(wire['context'])
381 wire['context'] = context_uid = uuid.UUID(wire['context'])
382 except KeyError:
382 except KeyError:
383 pass
383 pass
384 args.insert(0, wire)
384 args.insert(0, wire)
385 repo_state_uid = wire.get('repo_state_uid') if wire else None
385 repo_state_uid = wire.get('repo_state_uid') if wire else None
386
386
387 # NOTE(marcink): trading complexity for slight performance
387 # NOTE(marcink): trading complexity for slight performance
388 if log.isEnabledFor(logging.DEBUG):
388 if log.isEnabledFor(logging.DEBUG):
389 no_args_methods = [
389 no_args_methods = [
390
390
391 ]
391 ]
392 if method in no_args_methods:
392 if method in no_args_methods:
393 call_args = ''
393 call_args = ''
394 else:
394 else:
395 call_args = args[1:]
395 call_args = args[1:]
396
396
397 log.debug('method requested:%s with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
397 log.debug('Method requested:`%s` with args:%s kwargs:%s context_uid: %s, repo_state_uid:%s',
398 method, call_args, kwargs, context_uid, repo_state_uid)
398 method, call_args, kwargs, context_uid, repo_state_uid)
399
399
400 return payload, remote, method, args, kwargs
400 return payload, remote, method, args, kwargs
401
401
402 def vcs_view(self, request):
402 def vcs_view(self, request):
403
403
404 payload, remote, method, args, kwargs = self._vcs_view_params(request)
404 payload, remote, method, args, kwargs = self._vcs_view_params(request)
405 payload_id = payload.get('id')
405 payload_id = payload.get('id')
406
406
407 try:
407 try:
408 resp = getattr(remote, method)(*args, **kwargs)
408 resp = getattr(remote, method)(*args, **kwargs)
409 except Exception as e:
409 except Exception as e:
410 exc_info = list(sys.exc_info())
410 exc_info = list(sys.exc_info())
411 exc_type, exc_value, exc_traceback = exc_info
411 exc_type, exc_value, exc_traceback = exc_info
412
412
413 org_exc = getattr(e, '_org_exc', None)
413 org_exc = getattr(e, '_org_exc', None)
414 org_exc_name = None
414 org_exc_name = None
415 org_exc_tb = ''
415 org_exc_tb = ''
416 if org_exc:
416 if org_exc:
417 org_exc_name = org_exc.__class__.__name__
417 org_exc_name = org_exc.__class__.__name__
418 org_exc_tb = getattr(e, '_org_exc_tb', '')
418 org_exc_tb = getattr(e, '_org_exc_tb', '')
419 # replace our "faked" exception with our org
419 # replace our "faked" exception with our org
420 exc_info[0] = org_exc.__class__
420 exc_info[0] = org_exc.__class__
421 exc_info[1] = org_exc
421 exc_info[1] = org_exc
422
422
423 should_store_exc = True
423 should_store_exc = True
424 if org_exc:
424 if org_exc:
425 def get_exc_fqn(_exc_obj):
425 def get_exc_fqn(_exc_obj):
426 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
426 module_name = getattr(org_exc.__class__, '__module__', 'UNKNOWN')
427 return module_name + '.' + org_exc_name
427 return module_name + '.' + org_exc_name
428
428
429 exc_fqn = get_exc_fqn(org_exc)
429 exc_fqn = get_exc_fqn(org_exc)
430
430
431 if exc_fqn in ['mercurial.error.RepoLookupError',
431 if exc_fqn in ['mercurial.error.RepoLookupError',
432 'vcsserver.exceptions.RefNotFoundException']:
432 'vcsserver.exceptions.RefNotFoundException']:
433 should_store_exc = False
433 should_store_exc = False
434
434
435 if should_store_exc:
435 if should_store_exc:
436 store_exception(id(exc_info), exc_info, request_path=request.path)
436 store_exception(id(exc_info), exc_info, request_path=request.path)
437
437
438 tb_info = ''.join(
438 tb_info = ''.join(
439 traceback.format_exception(exc_type, exc_value, exc_traceback))
439 traceback.format_exception(exc_type, exc_value, exc_traceback))
440
440
441 type_ = e.__class__.__name__
441 type_ = e.__class__.__name__
442 if type_ not in self.ALLOWED_EXCEPTIONS:
442 if type_ not in self.ALLOWED_EXCEPTIONS:
443 type_ = None
443 type_ = None
444
444
445 resp = {
445 resp = {
446 'id': payload_id,
446 'id': payload_id,
447 'error': {
447 'error': {
448 'message': e.message,
448 'message': e.message,
449 'traceback': tb_info,
449 'traceback': tb_info,
450 'org_exc': org_exc_name,
450 'org_exc': org_exc_name,
451 'org_exc_tb': org_exc_tb,
451 'org_exc_tb': org_exc_tb,
452 'type': type_
452 'type': type_
453 }
453 }
454 }
454 }
455
455
456 try:
456 try:
457 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
457 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
458 except AttributeError:
458 except AttributeError:
459 pass
459 pass
460 else:
460 else:
461 resp = {
461 resp = {
462 'id': payload_id,
462 'id': payload_id,
463 'result': resp
463 'result': resp
464 }
464 }
465
465
466 return resp
466 return resp
467
467
468 def vcs_stream_view(self, request):
468 def vcs_stream_view(self, request):
469 payload, remote, method, args, kwargs = self._vcs_view_params(request)
469 payload, remote, method, args, kwargs = self._vcs_view_params(request)
470 # this method has a stream: marker we remove it here
470 # this method has a stream: marker we remove it here
471 method = method.split('stream:')[-1]
471 method = method.split('stream:')[-1]
472 chunk_size = safe_int(payload.get('chunk_size')) or 4096
472 chunk_size = safe_int(payload.get('chunk_size')) or 4096
473
473
474 try:
474 try:
475 resp = getattr(remote, method)(*args, **kwargs)
475 resp = getattr(remote, method)(*args, **kwargs)
476 except Exception as e:
476 except Exception as e:
477 raise
477 raise
478
478
479 def get_chunked_data(method_resp):
479 def get_chunked_data(method_resp):
480 stream = StringIO(method_resp)
480 stream = StringIO(method_resp)
481 while 1:
481 while 1:
482 chunk = stream.read(chunk_size)
482 chunk = stream.read(chunk_size)
483 if not chunk:
483 if not chunk:
484 break
484 break
485 yield chunk
485 yield chunk
486
486
487 response = Response(app_iter=get_chunked_data(resp))
487 response = Response(app_iter=get_chunked_data(resp))
488 response.content_type = 'application/octet-stream'
488 response.content_type = 'application/octet-stream'
489
489
490 return response
490 return response
491
491
492 def status_view(self, request):
492 def status_view(self, request):
493 import vcsserver
493 import vcsserver
494 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
494 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
495 'pid': os.getpid()}
495 'pid': os.getpid()}
496
496
497 def service_view(self, request):
497 def service_view(self, request):
498 import vcsserver
498 import vcsserver
499
499
500 payload = msgpack.unpackb(request.body, use_list=True)
500 payload = msgpack.unpackb(request.body, use_list=True)
501 server_config, app_config = {}, {}
501 server_config, app_config = {}, {}
502
502
503 try:
503 try:
504 path = self.global_config['__file__']
504 path = self.global_config['__file__']
505 config = configparser.RawConfigParser()
505 config = configparser.RawConfigParser()
506
506
507 config.read(path)
507 config.read(path)
508
508
509 if config.has_section('server:main'):
509 if config.has_section('server:main'):
510 server_config = dict(config.items('server:main'))
510 server_config = dict(config.items('server:main'))
511 if config.has_section('app:main'):
511 if config.has_section('app:main'):
512 app_config = dict(config.items('app:main'))
512 app_config = dict(config.items('app:main'))
513
513
514 except Exception:
514 except Exception:
515 log.exception('Failed to read .ini file for display')
515 log.exception('Failed to read .ini file for display')
516
516
517 environ = os.environ.items()
517 environ = os.environ.items()
518
518
519 resp = {
519 resp = {
520 'id': payload.get('id'),
520 'id': payload.get('id'),
521 'result': dict(
521 'result': dict(
522 version=vcsserver.__version__,
522 version=vcsserver.__version__,
523 config=server_config,
523 config=server_config,
524 app_config=app_config,
524 app_config=app_config,
525 environ=environ,
525 environ=environ,
526 payload=payload,
526 payload=payload,
527 )
527 )
528 }
528 }
529 return resp
529 return resp
530
530
531 def _msgpack_renderer_factory(self, info):
531 def _msgpack_renderer_factory(self, info):
532 def _render(value, system):
532 def _render(value, system):
533 request = system.get('request')
533 request = system.get('request')
534 if request is not None:
534 if request is not None:
535 response = request.response
535 response = request.response
536 ct = response.content_type
536 ct = response.content_type
537 if ct == response.default_content_type:
537 if ct == response.default_content_type:
538 response.content_type = 'application/x-msgpack'
538 response.content_type = 'application/x-msgpack'
539 return msgpack.packb(value)
539 return msgpack.packb(value)
540 return _render
540 return _render
541
541
542 def set_env_from_config(self, environ, config):
542 def set_env_from_config(self, environ, config):
543 dict_conf = {}
543 dict_conf = {}
544 try:
544 try:
545 for elem in config:
545 for elem in config:
546 if elem[0] == 'rhodecode':
546 if elem[0] == 'rhodecode':
547 dict_conf = json.loads(elem[2])
547 dict_conf = json.loads(elem[2])
548 break
548 break
549 except Exception:
549 except Exception:
550 log.exception('Failed to fetch SCM CONFIG')
550 log.exception('Failed to fetch SCM CONFIG')
551 return
551 return
552
552
553 username = dict_conf.get('username')
553 username = dict_conf.get('username')
554 if username:
554 if username:
555 environ['REMOTE_USER'] = username
555 environ['REMOTE_USER'] = username
556 # mercurial specific, some extension api rely on this
556 # mercurial specific, some extension api rely on this
557 environ['HGUSER'] = username
557 environ['HGUSER'] = username
558
558
559 ip = dict_conf.get('ip')
559 ip = dict_conf.get('ip')
560 if ip:
560 if ip:
561 environ['REMOTE_HOST'] = ip
561 environ['REMOTE_HOST'] = ip
562
562
563 if _is_request_chunked(environ):
563 if _is_request_chunked(environ):
564 # set the compatibility flag for webob
564 # set the compatibility flag for webob
565 environ['wsgi.input_terminated'] = True
565 environ['wsgi.input_terminated'] = True
566
566
567 def hg_proxy(self):
567 def hg_proxy(self):
568 @wsgiapp
568 @wsgiapp
569 def _hg_proxy(environ, start_response):
569 def _hg_proxy(environ, start_response):
570 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
570 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
571 return app(environ, start_response)
571 return app(environ, start_response)
572 return _hg_proxy
572 return _hg_proxy
573
573
574 def git_proxy(self):
574 def git_proxy(self):
575 @wsgiapp
575 @wsgiapp
576 def _git_proxy(environ, start_response):
576 def _git_proxy(environ, start_response):
577 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
577 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
578 return app(environ, start_response)
578 return app(environ, start_response)
579 return _git_proxy
579 return _git_proxy
580
580
581 def hg_stream(self):
581 def hg_stream(self):
582 if self._use_echo_app:
582 if self._use_echo_app:
583 @wsgiapp
583 @wsgiapp
584 def _hg_stream(environ, start_response):
584 def _hg_stream(environ, start_response):
585 app = EchoApp('fake_path', 'fake_name', None)
585 app = EchoApp('fake_path', 'fake_name', None)
586 return app(environ, start_response)
586 return app(environ, start_response)
587 return _hg_stream
587 return _hg_stream
588 else:
588 else:
589 @wsgiapp
589 @wsgiapp
590 def _hg_stream(environ, start_response):
590 def _hg_stream(environ, start_response):
591 log.debug('http-app: handling hg stream')
591 log.debug('http-app: handling hg stream')
592 repo_path = environ['HTTP_X_RC_REPO_PATH']
592 repo_path = environ['HTTP_X_RC_REPO_PATH']
593 repo_name = environ['HTTP_X_RC_REPO_NAME']
593 repo_name = environ['HTTP_X_RC_REPO_NAME']
594 packed_config = base64.b64decode(
594 packed_config = base64.b64decode(
595 environ['HTTP_X_RC_REPO_CONFIG'])
595 environ['HTTP_X_RC_REPO_CONFIG'])
596 config = msgpack.unpackb(packed_config)
596 config = msgpack.unpackb(packed_config)
597 app = scm_app.create_hg_wsgi_app(
597 app = scm_app.create_hg_wsgi_app(
598 repo_path, repo_name, config)
598 repo_path, repo_name, config)
599
599
600 # Consistent path information for hgweb
600 # Consistent path information for hgweb
601 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
601 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
602 environ['REPO_NAME'] = repo_name
602 environ['REPO_NAME'] = repo_name
603 self.set_env_from_config(environ, config)
603 self.set_env_from_config(environ, config)
604
604
605 log.debug('http-app: starting app handler '
605 log.debug('http-app: starting app handler '
606 'with %s and process request', app)
606 'with %s and process request', app)
607 return app(environ, ResponseFilter(start_response))
607 return app(environ, ResponseFilter(start_response))
608 return _hg_stream
608 return _hg_stream
609
609
610 def git_stream(self):
610 def git_stream(self):
611 if self._use_echo_app:
611 if self._use_echo_app:
612 @wsgiapp
612 @wsgiapp
613 def _git_stream(environ, start_response):
613 def _git_stream(environ, start_response):
614 app = EchoApp('fake_path', 'fake_name', None)
614 app = EchoApp('fake_path', 'fake_name', None)
615 return app(environ, start_response)
615 return app(environ, start_response)
616 return _git_stream
616 return _git_stream
617 else:
617 else:
618 @wsgiapp
618 @wsgiapp
619 def _git_stream(environ, start_response):
619 def _git_stream(environ, start_response):
620 log.debug('http-app: handling git stream')
620 log.debug('http-app: handling git stream')
621 repo_path = environ['HTTP_X_RC_REPO_PATH']
621 repo_path = environ['HTTP_X_RC_REPO_PATH']
622 repo_name = environ['HTTP_X_RC_REPO_NAME']
622 repo_name = environ['HTTP_X_RC_REPO_NAME']
623 packed_config = base64.b64decode(
623 packed_config = base64.b64decode(
624 environ['HTTP_X_RC_REPO_CONFIG'])
624 environ['HTTP_X_RC_REPO_CONFIG'])
625 config = msgpack.unpackb(packed_config)
625 config = msgpack.unpackb(packed_config)
626
626
627 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
627 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
628 self.set_env_from_config(environ, config)
628 self.set_env_from_config(environ, config)
629
629
630 content_type = environ.get('CONTENT_TYPE', '')
630 content_type = environ.get('CONTENT_TYPE', '')
631
631
632 path = environ['PATH_INFO']
632 path = environ['PATH_INFO']
633 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
633 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
634 log.debug(
634 log.debug(
635 'LFS: Detecting if request `%s` is LFS server path based '
635 'LFS: Detecting if request `%s` is LFS server path based '
636 'on content type:`%s`, is_lfs:%s',
636 'on content type:`%s`, is_lfs:%s',
637 path, content_type, is_lfs_request)
637 path, content_type, is_lfs_request)
638
638
639 if not is_lfs_request:
639 if not is_lfs_request:
640 # fallback detection by path
640 # fallback detection by path
641 if GIT_LFS_PROTO_PAT.match(path):
641 if GIT_LFS_PROTO_PAT.match(path):
642 is_lfs_request = True
642 is_lfs_request = True
643 log.debug(
643 log.debug(
644 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
644 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
645 path, is_lfs_request)
645 path, is_lfs_request)
646
646
647 if is_lfs_request:
647 if is_lfs_request:
648 app = scm_app.create_git_lfs_wsgi_app(
648 app = scm_app.create_git_lfs_wsgi_app(
649 repo_path, repo_name, config)
649 repo_path, repo_name, config)
650 else:
650 else:
651 app = scm_app.create_git_wsgi_app(
651 app = scm_app.create_git_wsgi_app(
652 repo_path, repo_name, config)
652 repo_path, repo_name, config)
653
653
654 log.debug('http-app: starting app handler '
654 log.debug('http-app: starting app handler '
655 'with %s and process request', app)
655 'with %s and process request', app)
656
656
657 return app(environ, start_response)
657 return app(environ, start_response)
658
658
659 return _git_stream
659 return _git_stream
660
660
661 def handle_vcs_exception(self, exception, request):
661 def handle_vcs_exception(self, exception, request):
662 _vcs_kind = getattr(exception, '_vcs_kind', '')
662 _vcs_kind = getattr(exception, '_vcs_kind', '')
663 if _vcs_kind == 'repo_locked':
663 if _vcs_kind == 'repo_locked':
664 # Get custom repo-locked status code if present.
664 # Get custom repo-locked status code if present.
665 status_code = request.headers.get('X-RC-Locked-Status-Code')
665 status_code = request.headers.get('X-RC-Locked-Status-Code')
666 return HTTPRepoLocked(
666 return HTTPRepoLocked(
667 title=exception.message, status_code=status_code)
667 title=exception.message, status_code=status_code)
668
668
669 elif _vcs_kind == 'repo_branch_protected':
669 elif _vcs_kind == 'repo_branch_protected':
670 # Get custom repo-branch-protected status code if present.
670 # Get custom repo-branch-protected status code if present.
671 return HTTPRepoBranchProtected(title=exception.message)
671 return HTTPRepoBranchProtected(title=exception.message)
672
672
673 exc_info = request.exc_info
673 exc_info = request.exc_info
674 store_exception(id(exc_info), exc_info)
674 store_exception(id(exc_info), exc_info)
675
675
676 traceback_info = 'unavailable'
676 traceback_info = 'unavailable'
677 if request.exc_info:
677 if request.exc_info:
678 exc_type, exc_value, exc_tb = request.exc_info
678 exc_type, exc_value, exc_tb = request.exc_info
679 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
679 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
680
680
681 log.error(
681 log.error(
682 'error occurred handling this request for path: %s, \n tb: %s',
682 'error occurred handling this request for path: %s, \n tb: %s',
683 request.path, traceback_info)
683 request.path, traceback_info)
684 raise exception
684 raise exception
685
685
686
686
687 class ResponseFilter(object):
687 class ResponseFilter(object):
688
688
689 def __init__(self, start_response):
689 def __init__(self, start_response):
690 self._start_response = start_response
690 self._start_response = start_response
691
691
692 def __call__(self, status, response_headers, exc_info=None):
692 def __call__(self, status, response_headers, exc_info=None):
693 headers = tuple(
693 headers = tuple(
694 (h, v) for h, v in response_headers
694 (h, v) for h, v in response_headers
695 if not wsgiref.util.is_hop_by_hop(h))
695 if not wsgiref.util.is_hop_by_hop(h))
696 return self._start_response(status, headers, exc_info)
696 return self._start_response(status, headers, exc_info)
697
697
698
698
699 def main(global_config, **settings):
699 def main(global_config, **settings):
700 if MercurialFactory:
700 if MercurialFactory:
701 hgpatches.patch_largefiles_capabilities()
701 hgpatches.patch_largefiles_capabilities()
702 hgpatches.patch_subrepo_type_mapping()
702 hgpatches.patch_subrepo_type_mapping()
703
703
704 app = HTTPApplication(settings=settings, global_config=global_config)
704 app = HTTPApplication(settings=settings, global_config=global_config)
705 return app.wsgi_app()
705 return app.wsgi_app()
@@ -1,77 +1,79 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 from dogpile.cache import register_backend
19 from dogpile.cache import register_backend
20
20
21 register_backend(
21 register_backend(
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
22 "dogpile.cache.rc.memory_lru", "vcsserver.lib.rc_cache.backends",
23 "LRUMemoryBackend")
23 "LRUMemoryBackend")
24
24
25 register_backend(
25 register_backend(
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
26 "dogpile.cache.rc.file_namespace", "vcsserver.lib.rc_cache.backends",
27 "FileNamespaceBackend")
27 "FileNamespaceBackend")
28
28
29 register_backend(
29 register_backend(
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
30 "dogpile.cache.rc.redis", "vcsserver.lib.rc_cache.backends",
31 "RedisPickleBackend")
31 "RedisPickleBackend")
32
32
33 register_backend(
33 register_backend(
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
34 "dogpile.cache.rc.redis_msgpack", "vcsserver.lib.rc_cache.backends",
35 "RedisMsgPackBackend")
35 "RedisMsgPackBackend")
36
36
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40 from . import region_meta
40 from . import region_meta
41 from .utils import (get_default_cache_settings, backend_key_generator, make_region)
41 from .utils import (
42 get_default_cache_settings, backend_key_generator, get_or_create_region,
43 clear_cache_namespace, make_region)
42
44
43
45
44 def configure_dogpile_cache(settings):
46 def configure_dogpile_cache(settings):
45 cache_dir = settings.get('cache_dir')
47 cache_dir = settings.get('cache_dir')
46 if cache_dir:
48 if cache_dir:
47 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
49 region_meta.dogpile_config_defaults['cache_dir'] = cache_dir
48
50
49 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
51 rc_cache_data = get_default_cache_settings(settings, prefixes=['rc_cache.'])
50
52
51 # inspect available namespaces
53 # inspect available namespaces
52 avail_regions = set()
54 avail_regions = set()
53 for key in rc_cache_data.keys():
55 for key in rc_cache_data.keys():
54 namespace_name = key.split('.', 1)[0]
56 namespace_name = key.split('.', 1)[0]
55 if namespace_name in avail_regions:
57 if namespace_name in avail_regions:
56 continue
58 continue
57
59
58 avail_regions.add(namespace_name)
60 avail_regions.add(namespace_name)
59 log.debug('dogpile: found following cache regions: %s', namespace_name)
61 log.debug('dogpile: found following cache regions: %s', namespace_name)
60
62
61 new_region = make_region(
63 new_region = make_region(
62 name=namespace_name,
64 name=namespace_name,
63 function_key_generator=None
65 function_key_generator=None
64 )
66 )
65
67
66 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(namespace_name))
68 new_region.configure_from_config(settings, 'rc_cache.{}.'.format(namespace_name))
67 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
69 new_region.function_key_generator = backend_key_generator(new_region.actual_backend)
68 if log.isEnabledFor(logging.DEBUG):
70 if log.isEnabledFor(logging.DEBUG):
69 region_args = dict(backend=new_region.actual_backend.__class__,
71 region_args = dict(backend=new_region.actual_backend.__class__,
70 region_invalidator=new_region.region_invalidator.__class__)
72 region_invalidator=new_region.region_invalidator.__class__)
71 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
73 log.debug('dogpile: registering a new region `%s` %s', namespace_name, region_args)
72
74
73 region_meta.dogpile_cache_regions[namespace_name] = new_region
75 region_meta.dogpile_cache_regions[namespace_name] = new_region
74
76
75
77
76 def includeme(config):
78 def includeme(config):
77 configure_dogpile_cache(config.registry.settings)
79 configure_dogpile_cache(config.registry.settings)
@@ -1,328 +1,329 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import time
18 import time
19 import errno
19 import errno
20 import logging
20 import logging
21
21
22 import msgpack
22 import msgpack
23 import redis
23 import redis
24
24
25 from dogpile.cache.api import CachedValue
25 from dogpile.cache.api import CachedValue
26 from dogpile.cache.backends import memory as memory_backend
26 from dogpile.cache.backends import memory as memory_backend
27 from dogpile.cache.backends import file as file_backend
27 from dogpile.cache.backends import file as file_backend
28 from dogpile.cache.backends import redis as redis_backend
28 from dogpile.cache.backends import redis as redis_backend
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
29 from dogpile.cache.backends.file import NO_VALUE, compat, FileLock
30 from dogpile.cache.util import memoized_property
30 from dogpile.cache.util import memoized_property
31
31
32 from pyramid.settings import asbool
32 from pyramid.settings import asbool
33
33
34 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
34 from vcsserver.lib.memory_lru_dict import LRUDict, LRUDictDebug
35 from vcsserver.utils import safe_str
35
36
36
37
37 _default_max_size = 1024
38 _default_max_size = 1024
38
39
39 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
40
41
41
42
42 class LRUMemoryBackend(memory_backend.MemoryBackend):
43 class LRUMemoryBackend(memory_backend.MemoryBackend):
43 key_prefix = 'lru_mem_backend'
44 key_prefix = 'lru_mem_backend'
44 pickle_values = False
45 pickle_values = False
45
46
46 def __init__(self, arguments):
47 def __init__(self, arguments):
47 max_size = arguments.pop('max_size', _default_max_size)
48 max_size = arguments.pop('max_size', _default_max_size)
48
49
49 LRUDictClass = LRUDict
50 LRUDictClass = LRUDict
50 if arguments.pop('log_key_count', None):
51 if arguments.pop('log_key_count', None):
51 LRUDictClass = LRUDictDebug
52 LRUDictClass = LRUDictDebug
52
53
53 arguments['cache_dict'] = LRUDictClass(max_size)
54 arguments['cache_dict'] = LRUDictClass(max_size)
54 super(LRUMemoryBackend, self).__init__(arguments)
55 super(LRUMemoryBackend, self).__init__(arguments)
55
56
56 def delete(self, key):
57 def delete(self, key):
57 try:
58 try:
58 del self._cache[key]
59 del self._cache[key]
59 except KeyError:
60 except KeyError:
60 # we don't care if key isn't there at deletion
61 # we don't care if key isn't there at deletion
61 pass
62 pass
62
63
63 def delete_multi(self, keys):
64 def delete_multi(self, keys):
64 for key in keys:
65 for key in keys:
65 self.delete(key)
66 self.delete(key)
66
67
67
68
68 class PickleSerializer(object):
69 class PickleSerializer(object):
69
70
70 def _dumps(self, value, safe=False):
71 def _dumps(self, value, safe=False):
71 try:
72 try:
72 return compat.pickle.dumps(value)
73 return compat.pickle.dumps(value)
73 except Exception:
74 except Exception:
74 if safe:
75 if safe:
75 return NO_VALUE
76 return NO_VALUE
76 else:
77 else:
77 raise
78 raise
78
79
79 def _loads(self, value, safe=True):
80 def _loads(self, value, safe=True):
80 try:
81 try:
81 return compat.pickle.loads(value)
82 return compat.pickle.loads(value)
82 except Exception:
83 except Exception:
83 if safe:
84 if safe:
84 return NO_VALUE
85 return NO_VALUE
85 else:
86 else:
86 raise
87 raise
87
88
88
89
89 class MsgPackSerializer(object):
90 class MsgPackSerializer(object):
90
91
91 def _dumps(self, value, safe=False):
92 def _dumps(self, value, safe=False):
92 try:
93 try:
93 return msgpack.packb(value)
94 return msgpack.packb(value)
94 except Exception:
95 except Exception:
95 if safe:
96 if safe:
96 return NO_VALUE
97 return NO_VALUE
97 else:
98 else:
98 raise
99 raise
99
100
100 def _loads(self, value, safe=True):
101 def _loads(self, value, safe=True):
101 """
102 """
102 pickle maintained the `CachedValue` wrapper of the tuple
103 pickle maintained the `CachedValue` wrapper of the tuple
103 msgpack does not, so it must be added back in.
104 msgpack does not, so it must be added back in.
104 """
105 """
105 try:
106 try:
106 value = msgpack.unpackb(value, use_list=False)
107 value = msgpack.unpackb(value, use_list=False)
107 return CachedValue(*value)
108 return CachedValue(*value)
108 except Exception:
109 except Exception:
109 if safe:
110 if safe:
110 return NO_VALUE
111 return NO_VALUE
111 else:
112 else:
112 raise
113 raise
113
114
114
115
115 import fcntl
116 import fcntl
116 flock_org = fcntl.flock
117 flock_org = fcntl.flock
117
118
118
119
119 class CustomLockFactory(FileLock):
120 class CustomLockFactory(FileLock):
120
121
121 pass
122 pass
122
123
123
124
124 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
125 class FileNamespaceBackend(PickleSerializer, file_backend.DBMBackend):
125 key_prefix = 'file_backend'
126 key_prefix = 'file_backend'
126
127
127 def __init__(self, arguments):
128 def __init__(self, arguments):
128 arguments['lock_factory'] = CustomLockFactory
129 arguments['lock_factory'] = CustomLockFactory
129 db_file = arguments.get('filename')
130 db_file = arguments.get('filename')
130
131
131 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
132 log.debug('initialing %s DB in %s', self.__class__.__name__, db_file)
132 try:
133 try:
133 super(FileNamespaceBackend, self).__init__(arguments)
134 super(FileNamespaceBackend, self).__init__(arguments)
134 except Exception:
135 except Exception:
135 log.error('Failed to initialize db at: %s', db_file)
136 log.error('Failed to initialize db at: %s', db_file)
136 raise
137 raise
137
138
138 def __repr__(self):
139 def __repr__(self):
139 return '{} `{}`'.format(self.__class__, self.filename)
140 return '{} `{}`'.format(self.__class__, self.filename)
140
141
141 def list_keys(self, prefix=''):
142 def list_keys(self, prefix=''):
142 prefix = '{}:{}'.format(self.key_prefix, prefix)
143 prefix = '{}:{}'.format(self.key_prefix, prefix)
143
144
144 def cond(v):
145 def cond(v):
145 if not prefix:
146 if not prefix:
146 return True
147 return True
147
148
148 if v.startswith(prefix):
149 if v.startswith(prefix):
149 return True
150 return True
150 return False
151 return False
151
152
152 with self._dbm_file(True) as dbm:
153 with self._dbm_file(True) as dbm:
153 try:
154 try:
154 return filter(cond, dbm.keys())
155 return filter(cond, dbm.keys())
155 except Exception:
156 except Exception:
156 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
157 log.error('Failed to fetch DBM keys from DB: %s', self.get_store())
157 raise
158 raise
158
159
159 def get_store(self):
160 def get_store(self):
160 return self.filename
161 return self.filename
161
162
162 def _dbm_get(self, key):
163 def _dbm_get(self, key):
163 with self._dbm_file(False) as dbm:
164 with self._dbm_file(False) as dbm:
164 if hasattr(dbm, 'get'):
165 if hasattr(dbm, 'get'):
165 value = dbm.get(key, NO_VALUE)
166 value = dbm.get(key, NO_VALUE)
166 else:
167 else:
167 # gdbm objects lack a .get method
168 # gdbm objects lack a .get method
168 try:
169 try:
169 value = dbm[key]
170 value = dbm[key]
170 except KeyError:
171 except KeyError:
171 value = NO_VALUE
172 value = NO_VALUE
172 if value is not NO_VALUE:
173 if value is not NO_VALUE:
173 value = self._loads(value)
174 value = self._loads(value)
174 return value
175 return value
175
176
176 def get(self, key):
177 def get(self, key):
177 try:
178 try:
178 return self._dbm_get(key)
179 return self._dbm_get(key)
179 except Exception:
180 except Exception:
180 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
181 log.error('Failed to fetch DBM key %s from DB: %s', key, self.get_store())
181 raise
182 raise
182
183
183 def set(self, key, value):
184 def set(self, key, value):
184 with self._dbm_file(True) as dbm:
185 with self._dbm_file(True) as dbm:
185 dbm[key] = self._dumps(value)
186 dbm[key] = self._dumps(value)
186
187
187 def set_multi(self, mapping):
188 def set_multi(self, mapping):
188 with self._dbm_file(True) as dbm:
189 with self._dbm_file(True) as dbm:
189 for key, value in mapping.items():
190 for key, value in mapping.items():
190 dbm[key] = self._dumps(value)
191 dbm[key] = self._dumps(value)
191
192
192
193
193 class BaseRedisBackend(redis_backend.RedisBackend):
194 class BaseRedisBackend(redis_backend.RedisBackend):
194 key_prefix = ''
195 key_prefix = ''
195
196
196 def __init__(self, arguments):
197 def __init__(self, arguments):
197 super(BaseRedisBackend, self).__init__(arguments)
198 super(BaseRedisBackend, self).__init__(arguments)
198 self._lock_timeout = self.lock_timeout
199 self._lock_timeout = self.lock_timeout
199 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
200 self._lock_auto_renewal = asbool(arguments.pop("lock_auto_renewal", True))
200
201
201 if self._lock_auto_renewal and not self._lock_timeout:
202 if self._lock_auto_renewal and not self._lock_timeout:
202 # set default timeout for auto_renewal
203 # set default timeout for auto_renewal
203 self._lock_timeout = 30
204 self._lock_timeout = 30
204
205
205 def _create_client(self):
206 def _create_client(self):
206 args = {}
207 args = {}
207
208
208 if self.url is not None:
209 if self.url is not None:
209 args.update(url=self.url)
210 args.update(url=self.url)
210
211
211 else:
212 else:
212 args.update(
213 args.update(
213 host=self.host, password=self.password,
214 host=self.host, password=self.password,
214 port=self.port, db=self.db
215 port=self.port, db=self.db
215 )
216 )
216
217
217 connection_pool = redis.ConnectionPool(**args)
218 connection_pool = redis.ConnectionPool(**args)
218
219
219 return redis.StrictRedis(connection_pool=connection_pool)
220 return redis.StrictRedis(connection_pool=connection_pool)
220
221
221 def list_keys(self, prefix=''):
222 def list_keys(self, prefix=''):
222 prefix = '{}:{}*'.format(self.key_prefix, prefix)
223 prefix = '{}:{}*'.format(self.key_prefix, prefix)
223 return self.client.keys(prefix)
224 return self.client.keys(prefix)
224
225
225 def get_store(self):
226 def get_store(self):
226 return self.client.connection_pool
227 return self.client.connection_pool
227
228
228 def get(self, key):
229 def get(self, key):
229 value = self.client.get(key)
230 value = self.client.get(key)
230 if value is None:
231 if value is None:
231 return NO_VALUE
232 return NO_VALUE
232 return self._loads(value)
233 return self._loads(value)
233
234
234 def get_multi(self, keys):
235 def get_multi(self, keys):
235 if not keys:
236 if not keys:
236 return []
237 return []
237 values = self.client.mget(keys)
238 values = self.client.mget(keys)
238 loads = self._loads
239 loads = self._loads
239 return [
240 return [
240 loads(v) if v is not None else NO_VALUE
241 loads(v) if v is not None else NO_VALUE
241 for v in values]
242 for v in values]
242
243
243 def set(self, key, value):
244 def set(self, key, value):
244 if self.redis_expiration_time:
245 if self.redis_expiration_time:
245 self.client.setex(key, self.redis_expiration_time,
246 self.client.setex(key, self.redis_expiration_time,
246 self._dumps(value))
247 self._dumps(value))
247 else:
248 else:
248 self.client.set(key, self._dumps(value))
249 self.client.set(key, self._dumps(value))
249
250
250 def set_multi(self, mapping):
251 def set_multi(self, mapping):
251 dumps = self._dumps
252 dumps = self._dumps
252 mapping = dict(
253 mapping = dict(
253 (k, dumps(v))
254 (k, dumps(v))
254 for k, v in mapping.items()
255 for k, v in mapping.items()
255 )
256 )
256
257
257 if not self.redis_expiration_time:
258 if not self.redis_expiration_time:
258 self.client.mset(mapping)
259 self.client.mset(mapping)
259 else:
260 else:
260 pipe = self.client.pipeline()
261 pipe = self.client.pipeline()
261 for key, value in mapping.items():
262 for key, value in mapping.items():
262 pipe.setex(key, self.redis_expiration_time, value)
263 pipe.setex(key, self.redis_expiration_time, value)
263 pipe.execute()
264 pipe.execute()
264
265
265 def get_mutex(self, key):
266 def get_mutex(self, key):
266 if self.distributed_lock:
267 if self.distributed_lock:
267 lock_key = redis_backend.u('_lock_{0}').format(key)
268 lock_key = redis_backend.u('_lock_{0}').format(safe_str(key))
268 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
269 return get_mutex_lock(self.client, lock_key, self._lock_timeout,
269 auto_renewal=self._lock_auto_renewal)
270 auto_renewal=self._lock_auto_renewal)
270 else:
271 else:
271 return None
272 return None
272
273
273
274
274 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
275 class RedisPickleBackend(PickleSerializer, BaseRedisBackend):
275 key_prefix = 'redis_pickle_backend'
276 key_prefix = 'redis_pickle_backend'
276 pass
277 pass
277
278
278
279
279 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
280 class RedisMsgPackBackend(MsgPackSerializer, BaseRedisBackend):
280 key_prefix = 'redis_msgpack_backend'
281 key_prefix = 'redis_msgpack_backend'
281 pass
282 pass
282
283
283
284
284 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
285 def get_mutex_lock(client, lock_key, lock_timeout, auto_renewal=False):
285 import redis_lock
286 import redis_lock
286
287
287 class _RedisLockWrapper(object):
288 class _RedisLockWrapper(object):
288 """LockWrapper for redis_lock"""
289 """LockWrapper for redis_lock"""
289
290
290 @classmethod
291 @classmethod
291 def get_lock(cls):
292 def get_lock(cls):
292 return redis_lock.Lock(
293 return redis_lock.Lock(
293 redis_client=client,
294 redis_client=client,
294 name=lock_key,
295 name=lock_key,
295 expire=lock_timeout,
296 expire=lock_timeout,
296 auto_renewal=auto_renewal,
297 auto_renewal=auto_renewal,
297 strict=True,
298 strict=True,
298 )
299 )
299
300
300 def __repr__(self):
301 def __repr__(self):
301 return "{}:{}".format(self.__class__.__name__, lock_key)
302 return "{}:{}".format(self.__class__.__name__, lock_key)
302
303
303 def __str__(self):
304 def __str__(self):
304 return "{}:{}".format(self.__class__.__name__, lock_key)
305 return "{}:{}".format(self.__class__.__name__, lock_key)
305
306
306 def __init__(self):
307 def __init__(self):
307 self.lock = self.get_lock()
308 self.lock = self.get_lock()
308 self.lock_key = lock_key
309 self.lock_key = lock_key
309
310
310 def acquire(self, wait=True):
311 def acquire(self, wait=True):
311 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
312 log.debug('Trying to acquire Redis lock for key %s', self.lock_key)
312 try:
313 try:
313 acquired = self.lock.acquire(wait)
314 acquired = self.lock.acquire(wait)
314 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
315 log.debug('Got lock for key %s, %s', self.lock_key, acquired)
315 return acquired
316 return acquired
316 except redis_lock.AlreadyAcquired:
317 except redis_lock.AlreadyAcquired:
317 return False
318 return False
318 except redis_lock.AlreadyStarted:
319 except redis_lock.AlreadyStarted:
319 # refresh thread exists, but it also means we acquired the lock
320 # refresh thread exists, but it also means we acquired the lock
320 return True
321 return True
321
322
322 def release(self):
323 def release(self):
323 try:
324 try:
324 self.lock.release()
325 self.lock.release()
325 except redis_lock.NotAcquired:
326 except redis_lock.NotAcquired:
326 pass
327 pass
327
328
328 return _RedisLockWrapper()
329 return _RedisLockWrapper()
@@ -1,158 +1,263 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import time
19 import logging
20 import logging
20 import functools
21 import functools
21 import time
22
23 from decorator import decorate
24
22
25 from dogpile.cache import CacheRegion
23 from dogpile.cache import CacheRegion
26 from dogpile.cache.util import compat
24 from dogpile.cache.util import compat
27
25
28 from vcsserver.utils import safe_str, sha1
26 from vcsserver.utils import safe_str, sha1
29
27
28 from vcsserver.lib.rc_cache import region_meta
30
29
31 log = logging.getLogger(__name__)
30 log = logging.getLogger(__name__)
32
31
33
32
34 class RhodeCodeCacheRegion(CacheRegion):
33 class RhodeCodeCacheRegion(CacheRegion):
35
34
36 def conditional_cache_on_arguments(
35 def conditional_cache_on_arguments(
37 self, namespace=None,
36 self, namespace=None,
38 expiration_time=None,
37 expiration_time=None,
39 should_cache_fn=None,
38 should_cache_fn=None,
40 to_str=compat.string_type,
39 to_str=compat.string_type,
41 function_key_generator=None,
40 function_key_generator=None,
42 condition=True):
41 condition=True):
43 """
42 """
44 Custom conditional decorator, that will not touch any dogpile internals if
43 Custom conditional decorator, that will not touch any dogpile internals if
45 condition isn't meet. This works a bit different than should_cache_fn
44 condition isn't meet. This works a bit different than should_cache_fn
46 And it's faster in cases we don't ever want to compute cached values
45 And it's faster in cases we don't ever want to compute cached values
47 """
46 """
48 expiration_time_is_callable = compat.callable(expiration_time)
47 expiration_time_is_callable = compat.callable(expiration_time)
49
48
50 if function_key_generator is None:
49 if function_key_generator is None:
51 function_key_generator = self.function_key_generator
50 function_key_generator = self.function_key_generator
52
51
52 # workaround for py2 and cython problems, this block should be removed
53 # once we've migrated to py3
54 if 'cython' == 'cython':
55 def decorator(fn):
56 if to_str is compat.string_type:
57 # backwards compatible
58 key_generator = function_key_generator(namespace, fn)
59 else:
60 key_generator = function_key_generator(namespace, fn, to_str=to_str)
61
62 @functools.wraps(fn)
63 def decorate(*arg, **kw):
64 key = key_generator(*arg, **kw)
65
66 @functools.wraps(fn)
67 def creator():
68 return fn(*arg, **kw)
69
70 if not condition:
71 return creator()
72
73 timeout = expiration_time() if expiration_time_is_callable \
74 else expiration_time
75
76 return self.get_or_create(key, creator, timeout, should_cache_fn)
77
78 def invalidate(*arg, **kw):
79 key = key_generator(*arg, **kw)
80 self.delete(key)
81
82 def set_(value, *arg, **kw):
83 key = key_generator(*arg, **kw)
84 self.set(key, value)
85
86 def get(*arg, **kw):
87 key = key_generator(*arg, **kw)
88 return self.get(key)
89
90 def refresh(*arg, **kw):
91 key = key_generator(*arg, **kw)
92 value = fn(*arg, **kw)
93 self.set(key, value)
94 return value
95
96 decorate.set = set_
97 decorate.invalidate = invalidate
98 decorate.refresh = refresh
99 decorate.get = get
100 decorate.original = fn
101 decorate.key_generator = key_generator
102 decorate.__wrapped__ = fn
103
104 return decorate
105 return decorator
106
53 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
107 def get_or_create_for_user_func(key_generator, user_func, *arg, **kw):
54
108
55 if not condition:
109 if not condition:
56 log.debug('Calling un-cached func:%s', user_func.func_name)
110 log.debug('Calling un-cached method:%s', user_func.func_name)
57 start = time.time()
111 start = time.time()
58 result = user_func(*arg, **kw)
112 result = user_func(*arg, **kw)
59 total = time.time() - start
113 total = time.time() - start
60 log.debug('un-cached func:%s took %.4fs', user_func.func_name, total)
114 log.debug('un-cached method:%s took %.4fs', user_func.func_name, total)
61 return result
115 return result
62
116
63 key = key_generator(*arg, **kw)
117 key = key_generator(*arg, **kw)
64
118
65 timeout = expiration_time() if expiration_time_is_callable \
119 timeout = expiration_time() if expiration_time_is_callable \
66 else expiration_time
120 else expiration_time
67
121
68 log.debug('Calling cached fn:%s', user_func.func_name)
122 log.debug('Calling cached method:`%s`', user_func.func_name)
69 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
123 return self.get_or_create(key, user_func, timeout, should_cache_fn, (arg, kw))
70
124
71 def cache_decorator(user_func):
125 def cache_decorator(user_func):
72 if to_str is compat.string_type:
126 if to_str is compat.string_type:
73 # backwards compatible
127 # backwards compatible
74 key_generator = function_key_generator(namespace, user_func)
128 key_generator = function_key_generator(namespace, user_func)
75 else:
129 else:
76 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
130 key_generator = function_key_generator(namespace, user_func, to_str=to_str)
77
131
78 def refresh(*arg, **kw):
132 def refresh(*arg, **kw):
79 """
133 """
80 Like invalidate, but regenerates the value instead
134 Like invalidate, but regenerates the value instead
81 """
135 """
82 key = key_generator(*arg, **kw)
136 key = key_generator(*arg, **kw)
83 value = user_func(*arg, **kw)
137 value = user_func(*arg, **kw)
84 self.set(key, value)
138 self.set(key, value)
85 return value
139 return value
86
140
87 def invalidate(*arg, **kw):
141 def invalidate(*arg, **kw):
88 key = key_generator(*arg, **kw)
142 key = key_generator(*arg, **kw)
89 self.delete(key)
143 self.delete(key)
90
144
91 def set_(value, *arg, **kw):
145 def set_(value, *arg, **kw):
92 key = key_generator(*arg, **kw)
146 key = key_generator(*arg, **kw)
93 self.set(key, value)
147 self.set(key, value)
94
148
95 def get(*arg, **kw):
149 def get(*arg, **kw):
96 key = key_generator(*arg, **kw)
150 key = key_generator(*arg, **kw)
97 return self.get(key)
151 return self.get(key)
98
152
99 user_func.set = set_
153 user_func.set = set_
100 user_func.invalidate = invalidate
154 user_func.invalidate = invalidate
101 user_func.get = get
155 user_func.get = get
102 user_func.refresh = refresh
156 user_func.refresh = refresh
103 user_func.key_generator = key_generator
157 user_func.key_generator = key_generator
104 user_func.original = user_func
158 user_func.original = user_func
105
159
106 # Use `decorate` to preserve the signature of :param:`user_func`.
160 # Use `decorate` to preserve the signature of :param:`user_func`.
107 return decorate(user_func, functools.partial(
161 return decorator.decorate(user_func, functools.partial(
108 get_or_create_for_user_func, key_generator))
162 get_or_create_for_user_func, key_generator))
109
163
110 return cache_decorator
164 return cache_decorator
111
165
112
166
113 def make_region(*arg, **kw):
167 def make_region(*arg, **kw):
114 return RhodeCodeCacheRegion(*arg, **kw)
168 return RhodeCodeCacheRegion(*arg, **kw)
115
169
116
170
117 def get_default_cache_settings(settings, prefixes=None):
171 def get_default_cache_settings(settings, prefixes=None):
118 prefixes = prefixes or []
172 prefixes = prefixes or []
119 cache_settings = {}
173 cache_settings = {}
120 for key in settings.keys():
174 for key in settings.keys():
121 for prefix in prefixes:
175 for prefix in prefixes:
122 if key.startswith(prefix):
176 if key.startswith(prefix):
123 name = key.split(prefix)[1].strip()
177 name = key.split(prefix)[1].strip()
124 val = settings[key]
178 val = settings[key]
125 if isinstance(val, compat.string_types):
179 if isinstance(val, compat.string_types):
126 val = val.strip()
180 val = val.strip()
127 cache_settings[name] = val
181 cache_settings[name] = val
128 return cache_settings
182 return cache_settings
129
183
130
184
131 def compute_key_from_params(*args):
185 def compute_key_from_params(*args):
132 """
186 """
133 Helper to compute key from given params to be used in cache manager
187 Helper to compute key from given params to be used in cache manager
134 """
188 """
135 return sha1("_".join(map(safe_str, args)))
189 return sha1("_".join(map(safe_str, args)))
136
190
137
191
138 def backend_key_generator(backend):
192 def backend_key_generator(backend):
139 """
193 """
140 Special wrapper that also sends over the backend to the key generator
194 Special wrapper that also sends over the backend to the key generator
141 """
195 """
142 def wrapper(namespace, fn):
196 def wrapper(namespace, fn):
143 return key_generator(backend, namespace, fn)
197 return key_generator(backend, namespace, fn)
144 return wrapper
198 return wrapper
145
199
146
200
147 def key_generator(backend, namespace, fn):
201 def key_generator(backend, namespace, fn):
148 fname = fn.__name__
202 fname = fn.__name__
149
203
150 def generate_key(*args):
204 def generate_key(*args):
151 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
205 backend_prefix = getattr(backend, 'key_prefix', None) or 'backend_prefix'
152 namespace_pref = namespace or 'default_namespace'
206 namespace_pref = namespace or 'default_namespace'
153 arg_key = compute_key_from_params(*args)
207 arg_key = compute_key_from_params(*args)
154 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
208 final_key = "{}:{}:{}_{}".format(backend_prefix, namespace_pref, fname, arg_key)
155
209
156 return final_key
210 return final_key
157
211
158 return generate_key
212 return generate_key
213
214
215 def get_or_create_region(region_name, region_namespace=None):
216 from vcsserver.lib.rc_cache.backends import FileNamespaceBackend
217 region_obj = region_meta.dogpile_cache_regions.get(region_name)
218 if not region_obj:
219 raise EnvironmentError(
220 'Region `{}` not in configured: {}.'.format(
221 region_name, region_meta.dogpile_cache_regions.keys()))
222
223 region_uid_name = '{}:{}'.format(region_name, region_namespace)
224 if isinstance(region_obj.actual_backend, FileNamespaceBackend):
225 region_exist = region_meta.dogpile_cache_regions.get(region_namespace)
226 if region_exist:
227 log.debug('Using already configured region: %s', region_namespace)
228 return region_exist
229 cache_dir = region_meta.dogpile_config_defaults['cache_dir']
230 expiration_time = region_obj.expiration_time
231
232 if not os.path.isdir(cache_dir):
233 os.makedirs(cache_dir)
234 new_region = make_region(
235 name=region_uid_name,
236 function_key_generator=backend_key_generator(region_obj.actual_backend)
237 )
238 namespace_filename = os.path.join(
239 cache_dir, "{}.cache.dbm".format(region_namespace))
240 # special type that allows 1db per namespace
241 new_region.configure(
242 backend='dogpile.cache.rc.file_namespace',
243 expiration_time=expiration_time,
244 arguments={"filename": namespace_filename}
245 )
246
247 # create and save in region caches
248 log.debug('configuring new region: %s', region_uid_name)
249 region_obj = region_meta.dogpile_cache_regions[region_namespace] = new_region
250
251 return region_obj
252
253
254 def clear_cache_namespace(cache_region, cache_namespace_uid, invalidate=False):
255 region = get_or_create_region(cache_region, cache_namespace_uid)
256 cache_keys = region.backend.list_keys(prefix=cache_namespace_uid)
257 num_delete_keys = len(cache_keys)
258 if invalidate:
259 region.invalidate(hard=False)
260 else:
261 if num_delete_keys:
262 region.delete_multi(cache_keys)
263 return num_delete_keys
@@ -1,855 +1,866 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 import subprocess
21 import subprocess
22 import time
22 import time
23 from urllib2 import URLError
23 from urllib2 import URLError
24 import urlparse
24 import urlparse
25 import logging
25 import logging
26 import posixpath as vcspath
26 import posixpath as vcspath
27 import StringIO
27 import StringIO
28 import urllib
28 import urllib
29 import traceback
29 import traceback
30
30
31 import svn.client
31 import svn.client
32 import svn.core
32 import svn.core
33 import svn.delta
33 import svn.delta
34 import svn.diff
34 import svn.diff
35 import svn.fs
35 import svn.fs
36 import svn.repos
36 import svn.repos
37
37
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
38 from vcsserver import svn_diff, exceptions, subprocessio, settings
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
39 from vcsserver.base import RepoFactory, raise_from_original, ArchiveNode, archive_repo
40 from vcsserver.exceptions import NoContentException
40 from vcsserver.exceptions import NoContentException
41 from vcsserver.utils import safe_str
41 from vcsserver.utils import safe_str
42 from vcsserver.vcs_base import RemoteBase
42 from vcsserver.vcs_base import RemoteBase
43
43
44 log = logging.getLogger(__name__)
44 log = logging.getLogger(__name__)
45
45
46
46
47 svn_compatible_versions_map = {
47 svn_compatible_versions_map = {
48 'pre-1.4-compatible': '1.3',
48 'pre-1.4-compatible': '1.3',
49 'pre-1.5-compatible': '1.4',
49 'pre-1.5-compatible': '1.4',
50 'pre-1.6-compatible': '1.5',
50 'pre-1.6-compatible': '1.5',
51 'pre-1.8-compatible': '1.7',
51 'pre-1.8-compatible': '1.7',
52 'pre-1.9-compatible': '1.8',
52 'pre-1.9-compatible': '1.8',
53 }
53 }
54
54
55 current_compatible_version = '1.12'
55 current_compatible_version = '1.12'
56
56
57
57
58 def reraise_safe_exceptions(func):
58 def reraise_safe_exceptions(func):
59 """Decorator for converting svn exceptions to something neutral."""
59 """Decorator for converting svn exceptions to something neutral."""
60 def wrapper(*args, **kwargs):
60 def wrapper(*args, **kwargs):
61 try:
61 try:
62 return func(*args, **kwargs)
62 return func(*args, **kwargs)
63 except Exception as e:
63 except Exception as e:
64 if not hasattr(e, '_vcs_kind'):
64 if not hasattr(e, '_vcs_kind'):
65 log.exception("Unhandled exception in svn remote call")
65 log.exception("Unhandled exception in svn remote call")
66 raise_from_original(exceptions.UnhandledException(e))
66 raise_from_original(exceptions.UnhandledException(e))
67 raise
67 raise
68 return wrapper
68 return wrapper
69
69
70
70
71 class SubversionFactory(RepoFactory):
71 class SubversionFactory(RepoFactory):
72 repo_type = 'svn'
72 repo_type = 'svn'
73
73
74 def _create_repo(self, wire, create, compatible_version):
74 def _create_repo(self, wire, create, compatible_version):
75 path = svn.core.svn_path_canonicalize(wire['path'])
75 path = svn.core.svn_path_canonicalize(wire['path'])
76 if create:
76 if create:
77 fs_config = {'compatible-version': current_compatible_version}
77 fs_config = {'compatible-version': current_compatible_version}
78 if compatible_version:
78 if compatible_version:
79
79
80 compatible_version_string = \
80 compatible_version_string = \
81 svn_compatible_versions_map.get(compatible_version) \
81 svn_compatible_versions_map.get(compatible_version) \
82 or compatible_version
82 or compatible_version
83 fs_config['compatible-version'] = compatible_version_string
83 fs_config['compatible-version'] = compatible_version_string
84
84
85 log.debug('Create SVN repo with config "%s"', fs_config)
85 log.debug('Create SVN repo with config "%s"', fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
86 repo = svn.repos.create(path, "", "", None, fs_config)
87 else:
87 else:
88 repo = svn.repos.open(path)
88 repo = svn.repos.open(path)
89
89
90 log.debug('Got SVN object: %s', repo)
90 log.debug('Got SVN object: %s', repo)
91 return repo
91 return repo
92
92
93 def repo(self, wire, create=False, compatible_version=None):
93 def repo(self, wire, create=False, compatible_version=None):
94 """
94 """
95 Get a repository instance for the given path.
95 Get a repository instance for the given path.
96 """
96 """
97 return self._create_repo(wire, create, compatible_version)
97 return self._create_repo(wire, create, compatible_version)
98
98
99
99
100 NODE_TYPE_MAPPING = {
100 NODE_TYPE_MAPPING = {
101 svn.core.svn_node_file: 'file',
101 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_dir: 'dir',
102 svn.core.svn_node_dir: 'dir',
103 }
103 }
104
104
105
105
106 class SvnRemote(RemoteBase):
106 class SvnRemote(RemoteBase):
107
107
108 def __init__(self, factory, hg_factory=None):
108 def __init__(self, factory, hg_factory=None):
109 self._factory = factory
109 self._factory = factory
110 # TODO: Remove once we do not use internal Mercurial objects anymore
110 # TODO: Remove once we do not use internal Mercurial objects anymore
111 # for subversion
111 # for subversion
112 self._hg_factory = hg_factory
112 self._hg_factory = hg_factory
113
113
114 @reraise_safe_exceptions
114 @reraise_safe_exceptions
115 def discover_svn_version(self):
115 def discover_svn_version(self):
116 try:
116 try:
117 import svn.core
117 import svn.core
118 svn_ver = svn.core.SVN_VERSION
118 svn_ver = svn.core.SVN_VERSION
119 except ImportError:
119 except ImportError:
120 svn_ver = None
120 svn_ver = None
121 return svn_ver
121 return svn_ver
122
122
123 @reraise_safe_exceptions
123 @reraise_safe_exceptions
124 def is_empty(self, wire):
124 def is_empty(self, wire):
125
125
126 try:
126 try:
127 return self.lookup(wire, -1) == 0
127 return self.lookup(wire, -1) == 0
128 except Exception:
128 except Exception:
129 log.exception("failed to read object_store")
129 log.exception("failed to read object_store")
130 return False
130 return False
131
131
132 def check_url(self, url, config_items):
132 def check_url(self, url, config_items):
133 # this can throw exception if not installed, but we detect this
133 # this can throw exception if not installed, but we detect this
134 from hgsubversion import svnrepo
134 from hgsubversion import svnrepo
135
135
136 baseui = self._hg_factory._create_config(config_items)
136 baseui = self._hg_factory._create_config(config_items)
137 # uuid function get's only valid UUID from proper repo, else
137 # uuid function get's only valid UUID from proper repo, else
138 # throws exception
138 # throws exception
139 try:
139 try:
140 svnrepo.svnremoterepo(baseui, url).svn.uuid
140 svnrepo.svnremoterepo(baseui, url).svn.uuid
141 except Exception:
141 except Exception:
142 tb = traceback.format_exc()
142 tb = traceback.format_exc()
143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
143 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
144 raise URLError(
144 raise URLError(
145 '"%s" is not a valid Subversion source url.' % (url, ))
145 '"%s" is not a valid Subversion source url.' % (url, ))
146 return True
146 return True
147
147
148 def is_path_valid_repository(self, wire, path):
148 def is_path_valid_repository(self, wire, path):
149
149
150 # NOTE(marcink): short circuit the check for SVN repo
150 # NOTE(marcink): short circuit the check for SVN repo
151 # the repos.open might be expensive to check, but we have one cheap
151 # the repos.open might be expensive to check, but we have one cheap
152 # pre condition that we can use, to check for 'format' file
152 # pre condition that we can use, to check for 'format' file
153
153
154 if not os.path.isfile(os.path.join(path, 'format')):
154 if not os.path.isfile(os.path.join(path, 'format')):
155 return False
155 return False
156
156
157 try:
157 try:
158 svn.repos.open(path)
158 svn.repos.open(path)
159 except svn.core.SubversionException:
159 except svn.core.SubversionException:
160 tb = traceback.format_exc()
160 tb = traceback.format_exc()
161 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
161 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
162 return False
162 return False
163 return True
163 return True
164
164
165 @reraise_safe_exceptions
165 @reraise_safe_exceptions
166 def verify(self, wire,):
166 def verify(self, wire,):
167 repo_path = wire['path']
167 repo_path = wire['path']
168 if not self.is_path_valid_repository(wire, repo_path):
168 if not self.is_path_valid_repository(wire, repo_path):
169 raise Exception(
169 raise Exception(
170 "Path %s is not a valid Subversion repository." % repo_path)
170 "Path %s is not a valid Subversion repository." % repo_path)
171
171
172 cmd = ['svnadmin', 'info', repo_path]
172 cmd = ['svnadmin', 'info', repo_path]
173 stdout, stderr = subprocessio.run_command(cmd)
173 stdout, stderr = subprocessio.run_command(cmd)
174 return stdout
174 return stdout
175
175
176 def lookup(self, wire, revision):
176 def lookup(self, wire, revision):
177 if revision not in [-1, None, 'HEAD']:
177 if revision not in [-1, None, 'HEAD']:
178 raise NotImplementedError
178 raise NotImplementedError
179 repo = self._factory.repo(wire)
179 repo = self._factory.repo(wire)
180 fs_ptr = svn.repos.fs(repo)
180 fs_ptr = svn.repos.fs(repo)
181 head = svn.fs.youngest_rev(fs_ptr)
181 head = svn.fs.youngest_rev(fs_ptr)
182 return head
182 return head
183
183
184 def lookup_interval(self, wire, start_ts, end_ts):
184 def lookup_interval(self, wire, start_ts, end_ts):
185 repo = self._factory.repo(wire)
185 repo = self._factory.repo(wire)
186 fsobj = svn.repos.fs(repo)
186 fsobj = svn.repos.fs(repo)
187 start_rev = None
187 start_rev = None
188 end_rev = None
188 end_rev = None
189 if start_ts:
189 if start_ts:
190 start_ts_svn = apr_time_t(start_ts)
190 start_ts_svn = apr_time_t(start_ts)
191 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
191 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
192 else:
192 else:
193 start_rev = 1
193 start_rev = 1
194 if end_ts:
194 if end_ts:
195 end_ts_svn = apr_time_t(end_ts)
195 end_ts_svn = apr_time_t(end_ts)
196 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
196 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
197 else:
197 else:
198 end_rev = svn.fs.youngest_rev(fsobj)
198 end_rev = svn.fs.youngest_rev(fsobj)
199 return start_rev, end_rev
199 return start_rev, end_rev
200
200
201 def revision_properties(self, wire, revision):
201 def revision_properties(self, wire, revision):
202
202
203 cache_on, context_uid, repo_id = self._cache_on(wire)
203 cache_on, context_uid, repo_id = self._cache_on(wire)
204 @self.region.conditional_cache_on_arguments(condition=cache_on)
204 region = self._region(wire)
205 @region.conditional_cache_on_arguments(condition=cache_on)
205 def _revision_properties(_repo_id, _revision):
206 def _revision_properties(_repo_id, _revision):
206 repo = self._factory.repo(wire)
207 repo = self._factory.repo(wire)
207 fs_ptr = svn.repos.fs(repo)
208 fs_ptr = svn.repos.fs(repo)
208 return svn.fs.revision_proplist(fs_ptr, revision)
209 return svn.fs.revision_proplist(fs_ptr, revision)
209 return _revision_properties(repo_id, revision)
210 return _revision_properties(repo_id, revision)
210
211
211 def revision_changes(self, wire, revision):
212 def revision_changes(self, wire, revision):
212
213
213 repo = self._factory.repo(wire)
214 repo = self._factory.repo(wire)
214 fsobj = svn.repos.fs(repo)
215 fsobj = svn.repos.fs(repo)
215 rev_root = svn.fs.revision_root(fsobj, revision)
216 rev_root = svn.fs.revision_root(fsobj, revision)
216
217
217 editor = svn.repos.ChangeCollector(fsobj, rev_root)
218 editor = svn.repos.ChangeCollector(fsobj, rev_root)
218 editor_ptr, editor_baton = svn.delta.make_editor(editor)
219 editor_ptr, editor_baton = svn.delta.make_editor(editor)
219 base_dir = ""
220 base_dir = ""
220 send_deltas = False
221 send_deltas = False
221 svn.repos.replay2(
222 svn.repos.replay2(
222 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
223 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
223 editor_ptr, editor_baton, None)
224 editor_ptr, editor_baton, None)
224
225
225 added = []
226 added = []
226 changed = []
227 changed = []
227 removed = []
228 removed = []
228
229
229 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
230 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
230 for path, change in editor.changes.iteritems():
231 for path, change in editor.changes.iteritems():
231 # TODO: Decide what to do with directory nodes. Subversion can add
232 # TODO: Decide what to do with directory nodes. Subversion can add
232 # empty directories.
233 # empty directories.
233
234
234 if change.item_kind == svn.core.svn_node_dir:
235 if change.item_kind == svn.core.svn_node_dir:
235 continue
236 continue
236 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
237 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
237 added.append(path)
238 added.append(path)
238 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
239 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
239 svn.repos.CHANGE_ACTION_REPLACE]:
240 svn.repos.CHANGE_ACTION_REPLACE]:
240 changed.append(path)
241 changed.append(path)
241 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
242 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
242 removed.append(path)
243 removed.append(path)
243 else:
244 else:
244 raise NotImplementedError(
245 raise NotImplementedError(
245 "Action %s not supported on path %s" % (
246 "Action %s not supported on path %s" % (
246 change.action, path))
247 change.action, path))
247
248
248 changes = {
249 changes = {
249 'added': added,
250 'added': added,
250 'changed': changed,
251 'changed': changed,
251 'removed': removed,
252 'removed': removed,
252 }
253 }
253 return changes
254 return changes
254
255
255 @reraise_safe_exceptions
256 @reraise_safe_exceptions
256 def node_history(self, wire, path, revision, limit):
257 def node_history(self, wire, path, revision, limit):
257 cache_on, context_uid, repo_id = self._cache_on(wire)
258 cache_on, context_uid, repo_id = self._cache_on(wire)
258 @self.region.conditional_cache_on_arguments(condition=cache_on)
259 region = self._region(wire)
260 @region.conditional_cache_on_arguments(condition=cache_on)
259 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
261 def _assert_correct_path(_context_uid, _repo_id, _path, _revision, _limit):
260 cross_copies = False
262 cross_copies = False
261 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
262 fsobj = svn.repos.fs(repo)
264 fsobj = svn.repos.fs(repo)
263 rev_root = svn.fs.revision_root(fsobj, revision)
265 rev_root = svn.fs.revision_root(fsobj, revision)
264
266
265 history_revisions = []
267 history_revisions = []
266 history = svn.fs.node_history(rev_root, path)
268 history = svn.fs.node_history(rev_root, path)
267 history = svn.fs.history_prev(history, cross_copies)
269 history = svn.fs.history_prev(history, cross_copies)
268 while history:
270 while history:
269 __, node_revision = svn.fs.history_location(history)
271 __, node_revision = svn.fs.history_location(history)
270 history_revisions.append(node_revision)
272 history_revisions.append(node_revision)
271 if limit and len(history_revisions) >= limit:
273 if limit and len(history_revisions) >= limit:
272 break
274 break
273 history = svn.fs.history_prev(history, cross_copies)
275 history = svn.fs.history_prev(history, cross_copies)
274 return history_revisions
276 return history_revisions
275 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
277 return _assert_correct_path(context_uid, repo_id, path, revision, limit)
276
278
277 def node_properties(self, wire, path, revision):
279 def node_properties(self, wire, path, revision):
278 cache_on, context_uid, repo_id = self._cache_on(wire)
280 cache_on, context_uid, repo_id = self._cache_on(wire)
279 @self.region.conditional_cache_on_arguments(condition=cache_on)
281 region = self._region(wire)
282 @region.conditional_cache_on_arguments(condition=cache_on)
280 def _node_properties(_repo_id, _path, _revision):
283 def _node_properties(_repo_id, _path, _revision):
281 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
282 fsobj = svn.repos.fs(repo)
285 fsobj = svn.repos.fs(repo)
283 rev_root = svn.fs.revision_root(fsobj, revision)
286 rev_root = svn.fs.revision_root(fsobj, revision)
284 return svn.fs.node_proplist(rev_root, path)
287 return svn.fs.node_proplist(rev_root, path)
285 return _node_properties(repo_id, path, revision)
288 return _node_properties(repo_id, path, revision)
286
289
287 def file_annotate(self, wire, path, revision):
290 def file_annotate(self, wire, path, revision):
288 abs_path = 'file://' + urllib.pathname2url(
291 abs_path = 'file://' + urllib.pathname2url(
289 vcspath.join(wire['path'], path))
292 vcspath.join(wire['path'], path))
290 file_uri = svn.core.svn_path_canonicalize(abs_path)
293 file_uri = svn.core.svn_path_canonicalize(abs_path)
291
294
292 start_rev = svn_opt_revision_value_t(0)
295 start_rev = svn_opt_revision_value_t(0)
293 peg_rev = svn_opt_revision_value_t(revision)
296 peg_rev = svn_opt_revision_value_t(revision)
294 end_rev = peg_rev
297 end_rev = peg_rev
295
298
296 annotations = []
299 annotations = []
297
300
298 def receiver(line_no, revision, author, date, line, pool):
301 def receiver(line_no, revision, author, date, line, pool):
299 annotations.append((line_no, revision, line))
302 annotations.append((line_no, revision, line))
300
303
301 # TODO: Cannot use blame5, missing typemap function in the swig code
304 # TODO: Cannot use blame5, missing typemap function in the swig code
302 try:
305 try:
303 svn.client.blame2(
306 svn.client.blame2(
304 file_uri, peg_rev, start_rev, end_rev,
307 file_uri, peg_rev, start_rev, end_rev,
305 receiver, svn.client.create_context())
308 receiver, svn.client.create_context())
306 except svn.core.SubversionException as exc:
309 except svn.core.SubversionException as exc:
307 log.exception("Error during blame operation.")
310 log.exception("Error during blame operation.")
308 raise Exception(
311 raise Exception(
309 "Blame not supported or file does not exist at path %s. "
312 "Blame not supported or file does not exist at path %s. "
310 "Error %s." % (path, exc))
313 "Error %s." % (path, exc))
311
314
312 return annotations
315 return annotations
313
316
314 def get_node_type(self, wire, path, revision=None):
317 def get_node_type(self, wire, path, revision=None):
315
318
316 cache_on, context_uid, repo_id = self._cache_on(wire)
319 cache_on, context_uid, repo_id = self._cache_on(wire)
317 @self.region.conditional_cache_on_arguments(condition=cache_on)
320 region = self._region(wire)
321 @region.conditional_cache_on_arguments(condition=cache_on)
318 def _get_node_type(_repo_id, _path, _revision):
322 def _get_node_type(_repo_id, _path, _revision):
319 repo = self._factory.repo(wire)
323 repo = self._factory.repo(wire)
320 fs_ptr = svn.repos.fs(repo)
324 fs_ptr = svn.repos.fs(repo)
321 if _revision is None:
325 if _revision is None:
322 _revision = svn.fs.youngest_rev(fs_ptr)
326 _revision = svn.fs.youngest_rev(fs_ptr)
323 root = svn.fs.revision_root(fs_ptr, _revision)
327 root = svn.fs.revision_root(fs_ptr, _revision)
324 node = svn.fs.check_path(root, path)
328 node = svn.fs.check_path(root, path)
325 return NODE_TYPE_MAPPING.get(node, None)
329 return NODE_TYPE_MAPPING.get(node, None)
326 return _get_node_type(repo_id, path, revision)
330 return _get_node_type(repo_id, path, revision)
327
331
328 def get_nodes(self, wire, path, revision=None):
332 def get_nodes(self, wire, path, revision=None):
329
333
330 cache_on, context_uid, repo_id = self._cache_on(wire)
334 cache_on, context_uid, repo_id = self._cache_on(wire)
331 @self.region.conditional_cache_on_arguments(condition=cache_on)
335 region = self._region(wire)
336 @region.conditional_cache_on_arguments(condition=cache_on)
332 def _get_nodes(_repo_id, _path, _revision):
337 def _get_nodes(_repo_id, _path, _revision):
333 repo = self._factory.repo(wire)
338 repo = self._factory.repo(wire)
334 fsobj = svn.repos.fs(repo)
339 fsobj = svn.repos.fs(repo)
335 if _revision is None:
340 if _revision is None:
336 _revision = svn.fs.youngest_rev(fsobj)
341 _revision = svn.fs.youngest_rev(fsobj)
337 root = svn.fs.revision_root(fsobj, _revision)
342 root = svn.fs.revision_root(fsobj, _revision)
338 entries = svn.fs.dir_entries(root, path)
343 entries = svn.fs.dir_entries(root, path)
339 result = []
344 result = []
340 for entry_path, entry_info in entries.iteritems():
345 for entry_path, entry_info in entries.iteritems():
341 result.append(
346 result.append(
342 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
347 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
343 return result
348 return result
344 return _get_nodes(repo_id, path, revision)
349 return _get_nodes(repo_id, path, revision)
345
350
346 def get_file_content(self, wire, path, rev=None):
351 def get_file_content(self, wire, path, rev=None):
347 repo = self._factory.repo(wire)
352 repo = self._factory.repo(wire)
348 fsobj = svn.repos.fs(repo)
353 fsobj = svn.repos.fs(repo)
349 if rev is None:
354 if rev is None:
350 rev = svn.fs.youngest_revision(fsobj)
355 rev = svn.fs.youngest_revision(fsobj)
351 root = svn.fs.revision_root(fsobj, rev)
356 root = svn.fs.revision_root(fsobj, rev)
352 content = svn.core.Stream(svn.fs.file_contents(root, path))
357 content = svn.core.Stream(svn.fs.file_contents(root, path))
353 return content.read()
358 return content.read()
354
359
355 def get_file_size(self, wire, path, revision=None):
360 def get_file_size(self, wire, path, revision=None):
356
361
357 cache_on, context_uid, repo_id = self._cache_on(wire)
362 cache_on, context_uid, repo_id = self._cache_on(wire)
358 @self.region.conditional_cache_on_arguments(condition=cache_on)
363 region = self._region(wire)
364 @region.conditional_cache_on_arguments(condition=cache_on)
359 def _get_file_size(_repo_id, _path, _revision):
365 def _get_file_size(_repo_id, _path, _revision):
360 repo = self._factory.repo(wire)
366 repo = self._factory.repo(wire)
361 fsobj = svn.repos.fs(repo)
367 fsobj = svn.repos.fs(repo)
362 if _revision is None:
368 if _revision is None:
363 _revision = svn.fs.youngest_revision(fsobj)
369 _revision = svn.fs.youngest_revision(fsobj)
364 root = svn.fs.revision_root(fsobj, _revision)
370 root = svn.fs.revision_root(fsobj, _revision)
365 size = svn.fs.file_length(root, path)
371 size = svn.fs.file_length(root, path)
366 return size
372 return size
367 return _get_file_size(repo_id, path, revision)
373 return _get_file_size(repo_id, path, revision)
368
374
369 def create_repository(self, wire, compatible_version=None):
375 def create_repository(self, wire, compatible_version=None):
370 log.info('Creating Subversion repository in path "%s"', wire['path'])
376 log.info('Creating Subversion repository in path "%s"', wire['path'])
371 self._factory.repo(wire, create=True,
377 self._factory.repo(wire, create=True,
372 compatible_version=compatible_version)
378 compatible_version=compatible_version)
373
379
374 def get_url_and_credentials(self, src_url):
380 def get_url_and_credentials(self, src_url):
375 obj = urlparse.urlparse(src_url)
381 obj = urlparse.urlparse(src_url)
376 username = obj.username or None
382 username = obj.username or None
377 password = obj.password or None
383 password = obj.password or None
378 return username, password, src_url
384 return username, password, src_url
379
385
380 def import_remote_repository(self, wire, src_url):
386 def import_remote_repository(self, wire, src_url):
381 repo_path = wire['path']
387 repo_path = wire['path']
382 if not self.is_path_valid_repository(wire, repo_path):
388 if not self.is_path_valid_repository(wire, repo_path):
383 raise Exception(
389 raise Exception(
384 "Path %s is not a valid Subversion repository." % repo_path)
390 "Path %s is not a valid Subversion repository." % repo_path)
385
391
386 username, password, src_url = self.get_url_and_credentials(src_url)
392 username, password, src_url = self.get_url_and_credentials(src_url)
387 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
393 rdump_cmd = ['svnrdump', 'dump', '--non-interactive',
388 '--trust-server-cert-failures=unknown-ca']
394 '--trust-server-cert-failures=unknown-ca']
389 if username and password:
395 if username and password:
390 rdump_cmd += ['--username', username, '--password', password]
396 rdump_cmd += ['--username', username, '--password', password]
391 rdump_cmd += [src_url]
397 rdump_cmd += [src_url]
392
398
393 rdump = subprocess.Popen(
399 rdump = subprocess.Popen(
394 rdump_cmd,
400 rdump_cmd,
395 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
401 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
396 load = subprocess.Popen(
402 load = subprocess.Popen(
397 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
403 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
398
404
399 # TODO: johbo: This can be a very long operation, might be better
405 # TODO: johbo: This can be a very long operation, might be better
400 # to track some kind of status and provide an api to check if the
406 # to track some kind of status and provide an api to check if the
401 # import is done.
407 # import is done.
402 rdump.wait()
408 rdump.wait()
403 load.wait()
409 load.wait()
404
410
405 log.debug('Return process ended with code: %s', rdump.returncode)
411 log.debug('Return process ended with code: %s', rdump.returncode)
406 if rdump.returncode != 0:
412 if rdump.returncode != 0:
407 errors = rdump.stderr.read()
413 errors = rdump.stderr.read()
408 log.error('svnrdump dump failed: statuscode %s: message: %s',
414 log.error('svnrdump dump failed: statuscode %s: message: %s',
409 rdump.returncode, errors)
415 rdump.returncode, errors)
410 reason = 'UNKNOWN'
416 reason = 'UNKNOWN'
411 if 'svnrdump: E230001:' in errors:
417 if 'svnrdump: E230001:' in errors:
412 reason = 'INVALID_CERTIFICATE'
418 reason = 'INVALID_CERTIFICATE'
413
419
414 if reason == 'UNKNOWN':
420 if reason == 'UNKNOWN':
415 reason = 'UNKNOWN:{}'.format(errors)
421 reason = 'UNKNOWN:{}'.format(errors)
416 raise Exception(
422 raise Exception(
417 'Failed to dump the remote repository from %s. Reason:%s' % (
423 'Failed to dump the remote repository from %s. Reason:%s' % (
418 src_url, reason))
424 src_url, reason))
419 if load.returncode != 0:
425 if load.returncode != 0:
420 raise Exception(
426 raise Exception(
421 'Failed to load the dump of remote repository from %s.' %
427 'Failed to load the dump of remote repository from %s.' %
422 (src_url, ))
428 (src_url, ))
423
429
424 def commit(self, wire, message, author, timestamp, updated, removed):
430 def commit(self, wire, message, author, timestamp, updated, removed):
425 assert isinstance(message, str)
431 assert isinstance(message, str)
426 assert isinstance(author, str)
432 assert isinstance(author, str)
427
433
428 repo = self._factory.repo(wire)
434 repo = self._factory.repo(wire)
429 fsobj = svn.repos.fs(repo)
435 fsobj = svn.repos.fs(repo)
430
436
431 rev = svn.fs.youngest_rev(fsobj)
437 rev = svn.fs.youngest_rev(fsobj)
432 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
438 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
433 txn_root = svn.fs.txn_root(txn)
439 txn_root = svn.fs.txn_root(txn)
434
440
435 for node in updated:
441 for node in updated:
436 TxnNodeProcessor(node, txn_root).update()
442 TxnNodeProcessor(node, txn_root).update()
437 for node in removed:
443 for node in removed:
438 TxnNodeProcessor(node, txn_root).remove()
444 TxnNodeProcessor(node, txn_root).remove()
439
445
440 commit_id = svn.repos.fs_commit_txn(repo, txn)
446 commit_id = svn.repos.fs_commit_txn(repo, txn)
441
447
442 if timestamp:
448 if timestamp:
443 apr_time = apr_time_t(timestamp)
449 apr_time = apr_time_t(timestamp)
444 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
450 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
445 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
451 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
446
452
447 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
453 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
448 return commit_id
454 return commit_id
449
455
450 def diff(self, wire, rev1, rev2, path1=None, path2=None,
456 def diff(self, wire, rev1, rev2, path1=None, path2=None,
451 ignore_whitespace=False, context=3):
457 ignore_whitespace=False, context=3):
452
458
453 wire.update(cache=False)
459 wire.update(cache=False)
454 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
455 diff_creator = SvnDiffer(
461 diff_creator = SvnDiffer(
456 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
462 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
457 try:
463 try:
458 return diff_creator.generate_diff()
464 return diff_creator.generate_diff()
459 except svn.core.SubversionException as e:
465 except svn.core.SubversionException as e:
460 log.exception(
466 log.exception(
461 "Error during diff operation operation. "
467 "Error during diff operation operation. "
462 "Path might not exist %s, %s" % (path1, path2))
468 "Path might not exist %s, %s" % (path1, path2))
463 return ""
469 return ""
464
470
465 @reraise_safe_exceptions
471 @reraise_safe_exceptions
466 def is_large_file(self, wire, path):
472 def is_large_file(self, wire, path):
467 return False
473 return False
468
474
469 @reraise_safe_exceptions
475 @reraise_safe_exceptions
470 def is_binary(self, wire, rev, path):
476 def is_binary(self, wire, rev, path):
471 cache_on, context_uid, repo_id = self._cache_on(wire)
477 cache_on, context_uid, repo_id = self._cache_on(wire)
472
478
473 @self.region.conditional_cache_on_arguments(condition=cache_on)
479 region = self._region(wire)
480 @region.conditional_cache_on_arguments(condition=cache_on)
474 def _is_binary(_repo_id, _rev, _path):
481 def _is_binary(_repo_id, _rev, _path):
475 raw_bytes = self.get_file_content(wire, path, rev)
482 raw_bytes = self.get_file_content(wire, path, rev)
476 return raw_bytes and '\0' in raw_bytes
483 return raw_bytes and '\0' in raw_bytes
477
484
478 return _is_binary(repo_id, rev, path)
485 return _is_binary(repo_id, rev, path)
479
486
480 @reraise_safe_exceptions
487 @reraise_safe_exceptions
481 def run_svn_command(self, wire, cmd, **opts):
488 def run_svn_command(self, wire, cmd, **opts):
482 path = wire.get('path', None)
489 path = wire.get('path', None)
483
490
484 if path and os.path.isdir(path):
491 if path and os.path.isdir(path):
485 opts['cwd'] = path
492 opts['cwd'] = path
486
493
487 safe_call = opts.pop('_safe', False)
494 safe_call = opts.pop('_safe', False)
488
495
489 svnenv = os.environ.copy()
496 svnenv = os.environ.copy()
490 svnenv.update(opts.pop('extra_env', {}))
497 svnenv.update(opts.pop('extra_env', {}))
491
498
492 _opts = {'env': svnenv, 'shell': False}
499 _opts = {'env': svnenv, 'shell': False}
493
500
494 try:
501 try:
495 _opts.update(opts)
502 _opts.update(opts)
496 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
503 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
497
504
498 return ''.join(p), ''.join(p.error)
505 return ''.join(p), ''.join(p.error)
499 except (EnvironmentError, OSError) as err:
506 except (EnvironmentError, OSError) as err:
500 if safe_call:
507 if safe_call:
501 return '', safe_str(err).strip()
508 return '', safe_str(err).strip()
502 else:
509 else:
503 cmd = ' '.join(cmd) # human friendly CMD
510 cmd = ' '.join(cmd) # human friendly CMD
504 tb_err = ("Couldn't run svn command (%s).\n"
511 tb_err = ("Couldn't run svn command (%s).\n"
505 "Original error was:%s\n"
512 "Original error was:%s\n"
506 "Call options:%s\n"
513 "Call options:%s\n"
507 % (cmd, err, _opts))
514 % (cmd, err, _opts))
508 log.exception(tb_err)
515 log.exception(tb_err)
509 raise exceptions.VcsException()(tb_err)
516 raise exceptions.VcsException()(tb_err)
510
517
511 @reraise_safe_exceptions
518 @reraise_safe_exceptions
512 def install_hooks(self, wire, force=False):
519 def install_hooks(self, wire, force=False):
513 from vcsserver.hook_utils import install_svn_hooks
520 from vcsserver.hook_utils import install_svn_hooks
514 repo_path = wire['path']
521 repo_path = wire['path']
515 binary_dir = settings.BINARY_DIR
522 binary_dir = settings.BINARY_DIR
516 executable = None
523 executable = None
517 if binary_dir:
524 if binary_dir:
518 executable = os.path.join(binary_dir, 'python')
525 executable = os.path.join(binary_dir, 'python')
519 return install_svn_hooks(
526 return install_svn_hooks(
520 repo_path, executable=executable, force_create=force)
527 repo_path, executable=executable, force_create=force)
521
528
522 @reraise_safe_exceptions
529 @reraise_safe_exceptions
523 def get_hooks_info(self, wire):
530 def get_hooks_info(self, wire):
524 from vcsserver.hook_utils import (
531 from vcsserver.hook_utils import (
525 get_svn_pre_hook_version, get_svn_post_hook_version)
532 get_svn_pre_hook_version, get_svn_post_hook_version)
526 repo_path = wire['path']
533 repo_path = wire['path']
527 return {
534 return {
528 'pre_version': get_svn_pre_hook_version(repo_path),
535 'pre_version': get_svn_pre_hook_version(repo_path),
529 'post_version': get_svn_post_hook_version(repo_path),
536 'post_version': get_svn_post_hook_version(repo_path),
530 }
537 }
531
538
532 @reraise_safe_exceptions
539 @reraise_safe_exceptions
540 def set_head_ref(self, wire, head_name):
541 pass
542
543 @reraise_safe_exceptions
533 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
544 def archive_repo(self, wire, archive_dest_path, kind, mtime, archive_at_path,
534 archive_dir_name, commit_id):
545 archive_dir_name, commit_id):
535
546
536 def walk_tree(root, root_dir, _commit_id):
547 def walk_tree(root, root_dir, _commit_id):
537 """
548 """
538 Special recursive svn repo walker
549 Special recursive svn repo walker
539 """
550 """
540
551
541 filemode_default = 0o100644
552 filemode_default = 0o100644
542 filemode_executable = 0o100755
553 filemode_executable = 0o100755
543
554
544 file_iter = svn.fs.dir_entries(root, root_dir)
555 file_iter = svn.fs.dir_entries(root, root_dir)
545 for f_name in file_iter:
556 for f_name in file_iter:
546 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
557 f_type = NODE_TYPE_MAPPING.get(file_iter[f_name].kind, None)
547
558
548 if f_type == 'dir':
559 if f_type == 'dir':
549 # return only DIR, and then all entries in that dir
560 # return only DIR, and then all entries in that dir
550 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
561 yield os.path.join(root_dir, f_name), {'mode': filemode_default}, f_type
551 new_root = os.path.join(root_dir, f_name)
562 new_root = os.path.join(root_dir, f_name)
552 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
563 for _f_name, _f_data, _f_type in walk_tree(root, new_root, _commit_id):
553 yield _f_name, _f_data, _f_type
564 yield _f_name, _f_data, _f_type
554 else:
565 else:
555 f_path = os.path.join(root_dir, f_name).rstrip('/')
566 f_path = os.path.join(root_dir, f_name).rstrip('/')
556 prop_list = svn.fs.node_proplist(root, f_path)
567 prop_list = svn.fs.node_proplist(root, f_path)
557
568
558 f_mode = filemode_default
569 f_mode = filemode_default
559 if prop_list.get('svn:executable'):
570 if prop_list.get('svn:executable'):
560 f_mode = filemode_executable
571 f_mode = filemode_executable
561
572
562 f_is_link = False
573 f_is_link = False
563 if prop_list.get('svn:special'):
574 if prop_list.get('svn:special'):
564 f_is_link = True
575 f_is_link = True
565
576
566 data = {
577 data = {
567 'is_link': f_is_link,
578 'is_link': f_is_link,
568 'mode': f_mode,
579 'mode': f_mode,
569 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
580 'content_stream': svn.core.Stream(svn.fs.file_contents(root, f_path)).read
570 }
581 }
571
582
572 yield f_path, data, f_type
583 yield f_path, data, f_type
573
584
574 def file_walker(_commit_id, path):
585 def file_walker(_commit_id, path):
575 repo = self._factory.repo(wire)
586 repo = self._factory.repo(wire)
576 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
587 root = svn.fs.revision_root(svn.repos.fs(repo), int(commit_id))
577
588
578 def no_content():
589 def no_content():
579 raise NoContentException()
590 raise NoContentException()
580
591
581 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
592 for f_name, f_data, f_type in walk_tree(root, path, _commit_id):
582 file_path = f_name
593 file_path = f_name
583
594
584 if f_type == 'dir':
595 if f_type == 'dir':
585 mode = f_data['mode']
596 mode = f_data['mode']
586 yield ArchiveNode(file_path, mode, False, no_content)
597 yield ArchiveNode(file_path, mode, False, no_content)
587 else:
598 else:
588 mode = f_data['mode']
599 mode = f_data['mode']
589 is_link = f_data['is_link']
600 is_link = f_data['is_link']
590 data_stream = f_data['content_stream']
601 data_stream = f_data['content_stream']
591 yield ArchiveNode(file_path, mode, is_link, data_stream)
602 yield ArchiveNode(file_path, mode, is_link, data_stream)
592
603
593 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
604 return archive_repo(file_walker, archive_dest_path, kind, mtime, archive_at_path,
594 archive_dir_name, commit_id)
605 archive_dir_name, commit_id)
595
606
596
607
597 class SvnDiffer(object):
608 class SvnDiffer(object):
598 """
609 """
599 Utility to create diffs based on difflib and the Subversion api
610 Utility to create diffs based on difflib and the Subversion api
600 """
611 """
601
612
602 binary_content = False
613 binary_content = False
603
614
604 def __init__(
615 def __init__(
605 self, repo, src_rev, src_path, tgt_rev, tgt_path,
616 self, repo, src_rev, src_path, tgt_rev, tgt_path,
606 ignore_whitespace, context):
617 ignore_whitespace, context):
607 self.repo = repo
618 self.repo = repo
608 self.ignore_whitespace = ignore_whitespace
619 self.ignore_whitespace = ignore_whitespace
609 self.context = context
620 self.context = context
610
621
611 fsobj = svn.repos.fs(repo)
622 fsobj = svn.repos.fs(repo)
612
623
613 self.tgt_rev = tgt_rev
624 self.tgt_rev = tgt_rev
614 self.tgt_path = tgt_path or ''
625 self.tgt_path = tgt_path or ''
615 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
626 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
616 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
627 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
617
628
618 self.src_rev = src_rev
629 self.src_rev = src_rev
619 self.src_path = src_path or self.tgt_path
630 self.src_path = src_path or self.tgt_path
620 self.src_root = svn.fs.revision_root(fsobj, src_rev)
631 self.src_root = svn.fs.revision_root(fsobj, src_rev)
621 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
632 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
622
633
623 self._validate()
634 self._validate()
624
635
625 def _validate(self):
636 def _validate(self):
626 if (self.tgt_kind != svn.core.svn_node_none and
637 if (self.tgt_kind != svn.core.svn_node_none and
627 self.src_kind != svn.core.svn_node_none and
638 self.src_kind != svn.core.svn_node_none and
628 self.src_kind != self.tgt_kind):
639 self.src_kind != self.tgt_kind):
629 # TODO: johbo: proper error handling
640 # TODO: johbo: proper error handling
630 raise Exception(
641 raise Exception(
631 "Source and target are not compatible for diff generation. "
642 "Source and target are not compatible for diff generation. "
632 "Source type: %s, target type: %s" %
643 "Source type: %s, target type: %s" %
633 (self.src_kind, self.tgt_kind))
644 (self.src_kind, self.tgt_kind))
634
645
635 def generate_diff(self):
646 def generate_diff(self):
636 buf = StringIO.StringIO()
647 buf = StringIO.StringIO()
637 if self.tgt_kind == svn.core.svn_node_dir:
648 if self.tgt_kind == svn.core.svn_node_dir:
638 self._generate_dir_diff(buf)
649 self._generate_dir_diff(buf)
639 else:
650 else:
640 self._generate_file_diff(buf)
651 self._generate_file_diff(buf)
641 return buf.getvalue()
652 return buf.getvalue()
642
653
643 def _generate_dir_diff(self, buf):
654 def _generate_dir_diff(self, buf):
644 editor = DiffChangeEditor()
655 editor = DiffChangeEditor()
645 editor_ptr, editor_baton = svn.delta.make_editor(editor)
656 editor_ptr, editor_baton = svn.delta.make_editor(editor)
646 svn.repos.dir_delta2(
657 svn.repos.dir_delta2(
647 self.src_root,
658 self.src_root,
648 self.src_path,
659 self.src_path,
649 '', # src_entry
660 '', # src_entry
650 self.tgt_root,
661 self.tgt_root,
651 self.tgt_path,
662 self.tgt_path,
652 editor_ptr, editor_baton,
663 editor_ptr, editor_baton,
653 authorization_callback_allow_all,
664 authorization_callback_allow_all,
654 False, # text_deltas
665 False, # text_deltas
655 svn.core.svn_depth_infinity, # depth
666 svn.core.svn_depth_infinity, # depth
656 False, # entry_props
667 False, # entry_props
657 False, # ignore_ancestry
668 False, # ignore_ancestry
658 )
669 )
659
670
660 for path, __, change in sorted(editor.changes):
671 for path, __, change in sorted(editor.changes):
661 self._generate_node_diff(
672 self._generate_node_diff(
662 buf, change, path, self.tgt_path, path, self.src_path)
673 buf, change, path, self.tgt_path, path, self.src_path)
663
674
664 def _generate_file_diff(self, buf):
675 def _generate_file_diff(self, buf):
665 change = None
676 change = None
666 if self.src_kind == svn.core.svn_node_none:
677 if self.src_kind == svn.core.svn_node_none:
667 change = "add"
678 change = "add"
668 elif self.tgt_kind == svn.core.svn_node_none:
679 elif self.tgt_kind == svn.core.svn_node_none:
669 change = "delete"
680 change = "delete"
670 tgt_base, tgt_path = vcspath.split(self.tgt_path)
681 tgt_base, tgt_path = vcspath.split(self.tgt_path)
671 src_base, src_path = vcspath.split(self.src_path)
682 src_base, src_path = vcspath.split(self.src_path)
672 self._generate_node_diff(
683 self._generate_node_diff(
673 buf, change, tgt_path, tgt_base, src_path, src_base)
684 buf, change, tgt_path, tgt_base, src_path, src_base)
674
685
675 def _generate_node_diff(
686 def _generate_node_diff(
676 self, buf, change, tgt_path, tgt_base, src_path, src_base):
687 self, buf, change, tgt_path, tgt_base, src_path, src_base):
677
688
678 if self.src_rev == self.tgt_rev and tgt_base == src_base:
689 if self.src_rev == self.tgt_rev and tgt_base == src_base:
679 # makes consistent behaviour with git/hg to return empty diff if
690 # makes consistent behaviour with git/hg to return empty diff if
680 # we compare same revisions
691 # we compare same revisions
681 return
692 return
682
693
683 tgt_full_path = vcspath.join(tgt_base, tgt_path)
694 tgt_full_path = vcspath.join(tgt_base, tgt_path)
684 src_full_path = vcspath.join(src_base, src_path)
695 src_full_path = vcspath.join(src_base, src_path)
685
696
686 self.binary_content = False
697 self.binary_content = False
687 mime_type = self._get_mime_type(tgt_full_path)
698 mime_type = self._get_mime_type(tgt_full_path)
688
699
689 if mime_type and not mime_type.startswith('text'):
700 if mime_type and not mime_type.startswith('text'):
690 self.binary_content = True
701 self.binary_content = True
691 buf.write("=" * 67 + '\n')
702 buf.write("=" * 67 + '\n')
692 buf.write("Cannot display: file marked as a binary type.\n")
703 buf.write("Cannot display: file marked as a binary type.\n")
693 buf.write("svn:mime-type = %s\n" % mime_type)
704 buf.write("svn:mime-type = %s\n" % mime_type)
694 buf.write("Index: %s\n" % (tgt_path, ))
705 buf.write("Index: %s\n" % (tgt_path, ))
695 buf.write("=" * 67 + '\n')
706 buf.write("=" * 67 + '\n')
696 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
707 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
697 'tgt_path': tgt_path})
708 'tgt_path': tgt_path})
698
709
699 if change == 'add':
710 if change == 'add':
700 # TODO: johbo: SVN is missing a zero here compared to git
711 # TODO: johbo: SVN is missing a zero here compared to git
701 buf.write("new file mode 10644\n")
712 buf.write("new file mode 10644\n")
702
713
703 #TODO(marcink): intro to binary detection of svn patches
714 #TODO(marcink): intro to binary detection of svn patches
704 # if self.binary_content:
715 # if self.binary_content:
705 # buf.write('GIT binary patch\n')
716 # buf.write('GIT binary patch\n')
706
717
707 buf.write("--- /dev/null\t(revision 0)\n")
718 buf.write("--- /dev/null\t(revision 0)\n")
708 src_lines = []
719 src_lines = []
709 else:
720 else:
710 if change == 'delete':
721 if change == 'delete':
711 buf.write("deleted file mode 10644\n")
722 buf.write("deleted file mode 10644\n")
712
723
713 #TODO(marcink): intro to binary detection of svn patches
724 #TODO(marcink): intro to binary detection of svn patches
714 # if self.binary_content:
725 # if self.binary_content:
715 # buf.write('GIT binary patch\n')
726 # buf.write('GIT binary patch\n')
716
727
717 buf.write("--- a/%s\t(revision %s)\n" % (
728 buf.write("--- a/%s\t(revision %s)\n" % (
718 src_path, self.src_rev))
729 src_path, self.src_rev))
719 src_lines = self._svn_readlines(self.src_root, src_full_path)
730 src_lines = self._svn_readlines(self.src_root, src_full_path)
720
731
721 if change == 'delete':
732 if change == 'delete':
722 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
733 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
723 tgt_lines = []
734 tgt_lines = []
724 else:
735 else:
725 buf.write("+++ b/%s\t(revision %s)\n" % (
736 buf.write("+++ b/%s\t(revision %s)\n" % (
726 tgt_path, self.tgt_rev))
737 tgt_path, self.tgt_rev))
727 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
738 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
728
739
729 if not self.binary_content:
740 if not self.binary_content:
730 udiff = svn_diff.unified_diff(
741 udiff = svn_diff.unified_diff(
731 src_lines, tgt_lines, context=self.context,
742 src_lines, tgt_lines, context=self.context,
732 ignore_blank_lines=self.ignore_whitespace,
743 ignore_blank_lines=self.ignore_whitespace,
733 ignore_case=False,
744 ignore_case=False,
734 ignore_space_changes=self.ignore_whitespace)
745 ignore_space_changes=self.ignore_whitespace)
735 buf.writelines(udiff)
746 buf.writelines(udiff)
736
747
737 def _get_mime_type(self, path):
748 def _get_mime_type(self, path):
738 try:
749 try:
739 mime_type = svn.fs.node_prop(
750 mime_type = svn.fs.node_prop(
740 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
751 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
741 except svn.core.SubversionException:
752 except svn.core.SubversionException:
742 mime_type = svn.fs.node_prop(
753 mime_type = svn.fs.node_prop(
743 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
754 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
744 return mime_type
755 return mime_type
745
756
746 def _svn_readlines(self, fs_root, node_path):
757 def _svn_readlines(self, fs_root, node_path):
747 if self.binary_content:
758 if self.binary_content:
748 return []
759 return []
749 node_kind = svn.fs.check_path(fs_root, node_path)
760 node_kind = svn.fs.check_path(fs_root, node_path)
750 if node_kind not in (
761 if node_kind not in (
751 svn.core.svn_node_file, svn.core.svn_node_symlink):
762 svn.core.svn_node_file, svn.core.svn_node_symlink):
752 return []
763 return []
753 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
764 content = svn.core.Stream(svn.fs.file_contents(fs_root, node_path)).read()
754 return content.splitlines(True)
765 return content.splitlines(True)
755
766
756
767
757 class DiffChangeEditor(svn.delta.Editor):
768 class DiffChangeEditor(svn.delta.Editor):
758 """
769 """
759 Records changes between two given revisions
770 Records changes between two given revisions
760 """
771 """
761
772
762 def __init__(self):
773 def __init__(self):
763 self.changes = []
774 self.changes = []
764
775
765 def delete_entry(self, path, revision, parent_baton, pool=None):
776 def delete_entry(self, path, revision, parent_baton, pool=None):
766 self.changes.append((path, None, 'delete'))
777 self.changes.append((path, None, 'delete'))
767
778
768 def add_file(
779 def add_file(
769 self, path, parent_baton, copyfrom_path, copyfrom_revision,
780 self, path, parent_baton, copyfrom_path, copyfrom_revision,
770 file_pool=None):
781 file_pool=None):
771 self.changes.append((path, 'file', 'add'))
782 self.changes.append((path, 'file', 'add'))
772
783
773 def open_file(self, path, parent_baton, base_revision, file_pool=None):
784 def open_file(self, path, parent_baton, base_revision, file_pool=None):
774 self.changes.append((path, 'file', 'change'))
785 self.changes.append((path, 'file', 'change'))
775
786
776
787
777 def authorization_callback_allow_all(root, path, pool):
788 def authorization_callback_allow_all(root, path, pool):
778 return True
789 return True
779
790
780
791
781 class TxnNodeProcessor(object):
792 class TxnNodeProcessor(object):
782 """
793 """
783 Utility to process the change of one node within a transaction root.
794 Utility to process the change of one node within a transaction root.
784
795
785 It encapsulates the knowledge of how to add, update or remove
796 It encapsulates the knowledge of how to add, update or remove
786 a node for a given transaction root. The purpose is to support the method
797 a node for a given transaction root. The purpose is to support the method
787 `SvnRemote.commit`.
798 `SvnRemote.commit`.
788 """
799 """
789
800
790 def __init__(self, node, txn_root):
801 def __init__(self, node, txn_root):
791 assert isinstance(node['path'], str)
802 assert isinstance(node['path'], str)
792
803
793 self.node = node
804 self.node = node
794 self.txn_root = txn_root
805 self.txn_root = txn_root
795
806
796 def update(self):
807 def update(self):
797 self._ensure_parent_dirs()
808 self._ensure_parent_dirs()
798 self._add_file_if_node_does_not_exist()
809 self._add_file_if_node_does_not_exist()
799 self._update_file_content()
810 self._update_file_content()
800 self._update_file_properties()
811 self._update_file_properties()
801
812
802 def remove(self):
813 def remove(self):
803 svn.fs.delete(self.txn_root, self.node['path'])
814 svn.fs.delete(self.txn_root, self.node['path'])
804 # TODO: Clean up directory if empty
815 # TODO: Clean up directory if empty
805
816
806 def _ensure_parent_dirs(self):
817 def _ensure_parent_dirs(self):
807 curdir = vcspath.dirname(self.node['path'])
818 curdir = vcspath.dirname(self.node['path'])
808 dirs_to_create = []
819 dirs_to_create = []
809 while not self._svn_path_exists(curdir):
820 while not self._svn_path_exists(curdir):
810 dirs_to_create.append(curdir)
821 dirs_to_create.append(curdir)
811 curdir = vcspath.dirname(curdir)
822 curdir = vcspath.dirname(curdir)
812
823
813 for curdir in reversed(dirs_to_create):
824 for curdir in reversed(dirs_to_create):
814 log.debug('Creating missing directory "%s"', curdir)
825 log.debug('Creating missing directory "%s"', curdir)
815 svn.fs.make_dir(self.txn_root, curdir)
826 svn.fs.make_dir(self.txn_root, curdir)
816
827
817 def _svn_path_exists(self, path):
828 def _svn_path_exists(self, path):
818 path_status = svn.fs.check_path(self.txn_root, path)
829 path_status = svn.fs.check_path(self.txn_root, path)
819 return path_status != svn.core.svn_node_none
830 return path_status != svn.core.svn_node_none
820
831
821 def _add_file_if_node_does_not_exist(self):
832 def _add_file_if_node_does_not_exist(self):
822 kind = svn.fs.check_path(self.txn_root, self.node['path'])
833 kind = svn.fs.check_path(self.txn_root, self.node['path'])
823 if kind == svn.core.svn_node_none:
834 if kind == svn.core.svn_node_none:
824 svn.fs.make_file(self.txn_root, self.node['path'])
835 svn.fs.make_file(self.txn_root, self.node['path'])
825
836
826 def _update_file_content(self):
837 def _update_file_content(self):
827 assert isinstance(self.node['content'], str)
838 assert isinstance(self.node['content'], str)
828 handler, baton = svn.fs.apply_textdelta(
839 handler, baton = svn.fs.apply_textdelta(
829 self.txn_root, self.node['path'], None, None)
840 self.txn_root, self.node['path'], None, None)
830 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
841 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
831
842
832 def _update_file_properties(self):
843 def _update_file_properties(self):
833 properties = self.node.get('properties', {})
844 properties = self.node.get('properties', {})
834 for key, value in properties.iteritems():
845 for key, value in properties.iteritems():
835 svn.fs.change_node_prop(
846 svn.fs.change_node_prop(
836 self.txn_root, self.node['path'], key, value)
847 self.txn_root, self.node['path'], key, value)
837
848
838
849
839 def apr_time_t(timestamp):
850 def apr_time_t(timestamp):
840 """
851 """
841 Convert a Python timestamp into APR timestamp type apr_time_t
852 Convert a Python timestamp into APR timestamp type apr_time_t
842 """
853 """
843 return timestamp * 1E6
854 return timestamp * 1E6
844
855
845
856
846 def svn_opt_revision_value_t(num):
857 def svn_opt_revision_value_t(num):
847 """
858 """
848 Put `num` into a `svn_opt_revision_value_t` structure.
859 Put `num` into a `svn_opt_revision_value_t` structure.
849 """
860 """
850 value = svn.core.svn_opt_revision_value_t()
861 value = svn.core.svn_opt_revision_value_t()
851 value.number = num
862 value.number = num
852 revision = svn.core.svn_opt_revision_t()
863 revision = svn.core.svn_opt_revision_t()
853 revision.kind = svn.core.svn_opt_revision_number
864 revision.kind = svn.core.svn_opt_revision_number
854 revision.value = value
865 revision.value = value
855 return revision
866 return revision
@@ -1,160 +1,160 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire={}, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.pull(
81 self.remote_git.pull(
82 wire={}, url='/tmp/', apply_refs=False,
82 wire={}, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
98 remote_refs = remote_git.get_remote_refs(wire={}, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102
102
103 class TestReraiseSafeExceptions(object):
103 class TestReraiseSafeExceptions(object):
104
104
105 def test_method_decorated_with_reraise_safe_exceptions(self):
105 def test_method_decorated_with_reraise_safe_exceptions(self):
106 factory = Mock()
106 factory = Mock()
107 git_remote = git.GitRemote(factory)
107 git_remote = git.GitRemote(factory)
108
108
109 def fake_function():
109 def fake_function():
110 return None
110 return None
111
111
112 decorator = git.reraise_safe_exceptions(fake_function)
112 decorator = git.reraise_safe_exceptions(fake_function)
113
113
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
114 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
115 for method_name, method in methods:
115 for method_name, method in methods:
116 if not method_name.startswith('_'):
116 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
117 assert method.im_func.__code__ == decorator.__code__
117 assert method.im_func.__code__ == decorator.__code__
118
118
119 @pytest.mark.parametrize('side_effect, expected_type', [
119 @pytest.mark.parametrize('side_effect, expected_type', [
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
120 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
121 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
122 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
123 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
124 (dulwich.errors.HangupException(), 'error'),
124 (dulwich.errors.HangupException(), 'error'),
125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
125 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
126 ])
126 ])
127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
127 def test_safe_exceptions_reraised(self, side_effect, expected_type):
128 @git.reraise_safe_exceptions
128 @git.reraise_safe_exceptions
129 def fake_method():
129 def fake_method():
130 raise side_effect
130 raise side_effect
131
131
132 with pytest.raises(Exception) as exc_info:
132 with pytest.raises(Exception) as exc_info:
133 fake_method()
133 fake_method()
134 assert type(exc_info.value) == Exception
134 assert type(exc_info.value) == Exception
135 assert exc_info.value._vcs_kind == expected_type
135 assert exc_info.value._vcs_kind == expected_type
136
136
137
137
138 class TestDulwichRepoWrapper(object):
138 class TestDulwichRepoWrapper(object):
139 def test_calls_close_on_delete(self):
139 def test_calls_close_on_delete(self):
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
140 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
141 with isdir_patcher:
141 with isdir_patcher:
142 repo = git.Repo('/tmp/abcde')
142 repo = git.Repo('/tmp/abcde')
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
143 with patch.object(git.DulwichRepo, 'close') as close_mock:
144 del repo
144 del repo
145 close_mock.assert_called_once_with()
145 close_mock.assert_called_once_with()
146
146
147
147
148 class TestGitFactory(object):
148 class TestGitFactory(object):
149 def test_create_repo_returns_dulwich_wrapper(self):
149 def test_create_repo_returns_dulwich_wrapper(self):
150
150
151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
151 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
152 mock.side_effect = {'repo_objects': ''}
152 mock.side_effect = {'repo_objects': ''}
153 factory = git.GitFactory()
153 factory = git.GitFactory()
154 wire = {
154 wire = {
155 'path': '/tmp/abcde'
155 'path': '/tmp/abcde'
156 }
156 }
157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
157 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
158 with isdir_patcher:
158 with isdir_patcher:
159 result = factory._create_repo(wire, True)
159 result = factory._create_repo(wire, True)
160 assert isinstance(result, git.Repo)
160 assert isinstance(result, git.Repo)
@@ -1,108 +1,108 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, MagicMock, patch
24 from mock import Mock, MagicMock, patch
25
25
26 from vcsserver import exceptions, hg, hgcompat
26 from vcsserver import exceptions, hg, hgcompat
27
27
28
28
29 class TestDiff(object):
29 class TestDiff(object):
30 def test_raising_safe_exception_when_lookup_failed(self):
30 def test_raising_safe_exception_when_lookup_failed(self):
31
31
32 factory = Mock()
32 factory = Mock()
33 hg_remote = hg.HgRemote(factory)
33 hg_remote = hg.HgRemote(factory)
34 with patch('mercurial.patch.diff') as diff_mock:
34 with patch('mercurial.patch.diff') as diff_mock:
35 diff_mock.side_effect = LookupError(
35 diff_mock.side_effect = LookupError(
36 'deadbeef', 'index', 'message')
36 'deadbeef', 'index', 'message')
37 with pytest.raises(Exception) as exc_info:
37 with pytest.raises(Exception) as exc_info:
38 hg_remote.diff(
38 hg_remote.diff(
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
39 wire={}, commit_id_1='deadbeef', commit_id_2='deadbee1',
40 file_filter=None, opt_git=True, opt_ignorews=True,
40 file_filter=None, opt_git=True, opt_ignorews=True,
41 context=3)
41 context=3)
42 assert type(exc_info.value) == Exception
42 assert type(exc_info.value) == Exception
43 assert exc_info.value._vcs_kind == 'lookup'
43 assert exc_info.value._vcs_kind == 'lookup'
44
44
45
45
46 class TestReraiseSafeExceptions(object):
46 class TestReraiseSafeExceptions(object):
47 def test_method_decorated_with_reraise_safe_exceptions(self):
47 def test_method_decorated_with_reraise_safe_exceptions(self):
48 factory = Mock()
48 factory = Mock()
49 hg_remote = hg.HgRemote(factory)
49 hg_remote = hg.HgRemote(factory)
50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
50 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
51 decorator = hg.reraise_safe_exceptions(None)
51 decorator = hg.reraise_safe_exceptions(None)
52 for method_name, method in methods:
52 for method_name, method in methods:
53 if not method_name.startswith('_'):
53 if not method_name.startswith('_') and method_name not in ['vcsserver_invalidate_cache']:
54 assert method.im_func.__code__ == decorator.__code__
54 assert method.im_func.__code__ == decorator.__code__
55
55
56 @pytest.mark.parametrize('side_effect, expected_type', [
56 @pytest.mark.parametrize('side_effect, expected_type', [
57 (hgcompat.Abort(), 'abort'),
57 (hgcompat.Abort(), 'abort'),
58 (hgcompat.InterventionRequired(), 'abort'),
58 (hgcompat.InterventionRequired(), 'abort'),
59 (hgcompat.RepoLookupError(), 'lookup'),
59 (hgcompat.RepoLookupError(), 'lookup'),
60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
60 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
61 (hgcompat.RepoError(), 'error'),
61 (hgcompat.RepoError(), 'error'),
62 (hgcompat.RequirementError(), 'requirement'),
62 (hgcompat.RequirementError(), 'requirement'),
63 ])
63 ])
64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
64 def test_safe_exceptions_reraised(self, side_effect, expected_type):
65 @hg.reraise_safe_exceptions
65 @hg.reraise_safe_exceptions
66 def fake_method():
66 def fake_method():
67 raise side_effect
67 raise side_effect
68
68
69 with pytest.raises(Exception) as exc_info:
69 with pytest.raises(Exception) as exc_info:
70 fake_method()
70 fake_method()
71 assert type(exc_info.value) == Exception
71 assert type(exc_info.value) == Exception
72 assert exc_info.value._vcs_kind == expected_type
72 assert exc_info.value._vcs_kind == expected_type
73
73
74 def test_keeps_original_traceback(self):
74 def test_keeps_original_traceback(self):
75 @hg.reraise_safe_exceptions
75 @hg.reraise_safe_exceptions
76 def fake_method():
76 def fake_method():
77 try:
77 try:
78 raise hgcompat.Abort()
78 raise hgcompat.Abort()
79 except:
79 except:
80 self.original_traceback = traceback.format_tb(
80 self.original_traceback = traceback.format_tb(
81 sys.exc_info()[2])
81 sys.exc_info()[2])
82 raise
82 raise
83
83
84 try:
84 try:
85 fake_method()
85 fake_method()
86 except Exception:
86 except Exception:
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
87 new_traceback = traceback.format_tb(sys.exc_info()[2])
88
88
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
89 new_traceback_tail = new_traceback[-len(self.original_traceback):]
90 assert new_traceback_tail == self.original_traceback
90 assert new_traceback_tail == self.original_traceback
91
91
92 def test_maps_unknow_exceptions_to_unhandled(self):
92 def test_maps_unknow_exceptions_to_unhandled(self):
93 @hg.reraise_safe_exceptions
93 @hg.reraise_safe_exceptions
94 def stub_method():
94 def stub_method():
95 raise ValueError('stub')
95 raise ValueError('stub')
96
96
97 with pytest.raises(Exception) as exc_info:
97 with pytest.raises(Exception) as exc_info:
98 stub_method()
98 stub_method()
99 assert exc_info.value._vcs_kind == 'unhandled'
99 assert exc_info.value._vcs_kind == 'unhandled'
100
100
101 def test_does_not_map_known_exceptions(self):
101 def test_does_not_map_known_exceptions(self):
102 @hg.reraise_safe_exceptions
102 @hg.reraise_safe_exceptions
103 def stub_method():
103 def stub_method():
104 raise exceptions.LookupException()('stub')
104 raise exceptions.LookupException()('stub')
105
105
106 with pytest.raises(Exception) as exc_info:
106 with pytest.raises(Exception) as exc_info:
107 stub_method()
107 stub_method()
108 assert exc_info.value._vcs_kind == 'lookup'
108 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,32 +1,45 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2020 RhodeCode GmbH
2 # Copyright (C) 2014-2020 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver.lib import rc_cache
18
19
19 class RemoteBase(object):
20 class RemoteBase(object):
20 EMPTY_COMMIT = '0' * 40
21 EMPTY_COMMIT = '0' * 40
21
22
22 @property
23 def _region(self, wire):
23 def region(self):
24 repo_id = wire.get('repo_id', '')
24 return self._factory._cache_region
25 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
26 return rc_cache.get_or_create_region('repo_object', cache_namespace_uid)
25
27
26 def _cache_on(self, wire):
28 def _cache_on(self, wire):
27 context = wire.get('context', '')
29 context = wire.get('context', '')
28 context_uid = '{}'.format(context)
30 context_uid = '{}'.format(context)
29 repo_id = wire.get('repo_id', '')
31 repo_id = wire.get('repo_id', '')
30 cache = wire.get('cache', True)
32 cache = wire.get('cache', True)
31 cache_on = context and cache
33 cache_on = context and cache
32 return cache_on, context_uid, repo_id
34 return cache_on, context_uid, repo_id
35
36 def vcsserver_invalidate_cache(self, wire, delete):
37 from vcsserver.lib import rc_cache
38 repo_id = wire.get('repo_id', '')
39
40 if delete:
41 cache_namespace_uid = 'cache_repo.{}'.format(repo_id)
42 rc_cache.clear_cache_namespace(
43 'repo_object', cache_namespace_uid, invalidate=True)
44
45 return {'invalidated': {'repo_id': repo_id, 'delete': delete}}
General Comments 0
You need to be logged in to leave comments. Login now