##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r595:0d6e5c13 merge stable
parent child Browse files
Show More
@@ -0,0 +1,26 b''
1 diff --git a/Documentation/git-send-email.txt b/Documentation/git-send-email.txt
2 --- a/Documentation/git-send-email.txt
3 +++ b/Documentation/git-send-email.txt
4 @@ -208,8 +208,7 @@ a password is obtained using 'git-credential'.
5 specify a full pathname of a sendmail-like program instead;
6 the program must support the `-i` option. Default value can
7 be specified by the `sendemail.smtpServer` configuration
8 - option; the built-in default is to search for `sendmail` in
9 - `/usr/sbin`, `/usr/lib` and $PATH if such program is
10 + option; the built-in default is to search in $PATH if such program is
11 available, falling back to `localhost` otherwise.
12
13 --smtp-server-port=<port>::
14 diff --git a/git-send-email.perl b/git-send-email.perl
15 --- a/git-send-email.perl
16 +++ b/git-send-email.perl
17 @@ -944,8 +944,7 @@ if (defined $reply_to) {
18 }
19
20 if (!defined $smtp_server) {
21 - my @sendmail_paths = qw( /usr/sbin/sendmail /usr/lib/sendmail );
22 - push @sendmail_paths, map {"$_/sendmail"} split /:/, $ENV{PATH};
23 + my @sendmail_paths = map {"$_/sendmail"} split /:/, $ENV{PATH};
24 foreach (@sendmail_paths) {
25 if (-x $_) {
26 $smtp_server = $_;
@@ -0,0 +1,12 b''
1 diff --git a/t/test-lib.sh b/t/test-lib.sh
2 --- a/t/test-lib.sh
3 +++ b/t/test-lib.sh
4 @@ -923,7 +923,7 @@
5 then
6 GIT_EXEC_PATH=$($GIT_TEST_INSTALLED/git --exec-path) ||
7 error "Cannot run git from $GIT_TEST_INSTALLED."
8 - PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR:$PATH
9 + PATH=$GIT_TEST_INSTALLED:$GIT_BUILD_DIR/t/helper:$GIT_BUILD_DIR:$PATH
10 GIT_EXEC_PATH=${GIT_TEST_EXEC_PATH:-$GIT_EXEC_PATH}
11 else # normal case, use ../bin-wrappers only unless $with_dashes:
12 git_bin_dir="$GIT_BUILD_DIR/bin-wrappers"
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.14.1
2 current_version = 4.15.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.14.1
12 state = in_progress
13 version = 4.15.0
16 14
@@ -1,45 +1,47 b''
1 1 self: super: {
2 2 # bump GIT version
3 3 git = super.lib.overrideDerivation super.git (oldAttrs: {
4 name = "git-2.17.2";
4 name = "git-2.19.1";
5 5 src = self.fetchurl {
6 url = "https://www.kernel.org/pub/software/scm/git/git-2.17.2.tar.xz";
7 sha256 = "1ghljlxmyqphx13qspy382cpl2pbkbwbhqm7w7z57r9mkhswx668";
6 url = "https://www.kernel.org/pub/software/scm/git/git-2.19.1.tar.xz";
7 sha256 = "1dfv43lmdnxz42504jc89sihbv1d4d6kgqcz3c5ji140kfm5cl1l";
8 8 };
9 9
10 # patches come from: https://github.com/NixOS/nixpkgs/tree/master/pkgs/applications/version-management/git-and-tools/git
10 11 patches = [
11 12 ./patches/git/docbook2texi.patch
12 ./patches/git/symlinks-in-bin.patch
13 13 ./patches/git/git-sh-i18n.patch
14 14 ./patches/git/ssh-path.patch
15 ./patches/git/git-send-email-honor-PATH.patch
16 ./patches/git/installCheck-path.patch
15 17 ];
16 18
17 19 });
18 20
19 21 # Override subversion derivation to
20 22 # - activate python bindings
21 23 subversion =
22 24 let
23 25 subversionWithPython = super.subversion.override {
24 26 httpSupport = true;
25 27 pythonBindings = true;
26 28 python = self.python27Packages.python;
27 29 };
28 30 in
29 31 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
30 32 name = "subversion-1.10.2";
31 33 src = self.fetchurl {
32 34 url = "https://archive.apache.org/dist/subversion/subversion-1.10.2.tar.gz";
33 35 sha256 = "0xv5z2bg0lw7057g913yc13f60nfj257wvmsq22pr33m4syf26sg";
34 36 };
35 37
36 38 ## use internal lz4/utf8proc because it is stable and shipped with SVN
37 39 configureFlags = oldAttrs.configureFlags ++ [
38 40 " --with-lz4=internal"
39 41 " --with-utf8proc=internal"
40 42 ];
41 43
42 44
43 45 });
44 46
45 47 }
@@ -1,37 +1,37 b''
1 1 This patch does two things: (1) use the right name for `docbook2texi',
2 2 and (2) make sure `gitman.info' isn't produced since it's broken (duplicate
3 3 node names).
4 4
5 diff -ru git-1.8.4-orig/Documentation/Makefile git-1.8.4/Documentation/Makefile
6 --- git-1.8.4-orig/Documentation/Makefile 2013-08-23 21:38:43.000000000 +0200
7 +++ git-1.8.4/Documentation/Makefile 2013-09-30 14:48:51.532890378 +0200
8 @@ -101,7 +101,7 @@
5 diff --git a/Documentation/Makefile b/Documentation/Makefile
6 --- a/Documentation/Makefile
7 +++ b/Documentation/Makefile
8 @@ -122,7 +122,7 @@
9 9
10 10 MAKEINFO = makeinfo
11 11 INSTALL_INFO = install-info
12 12 -DOCBOOK2X_TEXI = docbook2x-texi
13 13 +DOCBOOK2X_TEXI = docbook2texi
14 14 DBLATEX = dblatex
15 ifndef PERL_PATH
16 PERL_PATH = /usr/bin/perl
17 @@ -205,7 +205,7 @@
15 ASCIIDOC_DBLATEX_DIR = /etc/asciidoc/dblatex
16 DBLATEX_COMMON = -p $(ASCIIDOC_DBLATEX_DIR)/asciidoc-dblatex.xsl -s $(ASCIIDOC_DBLATEX_DIR)/asciidoc-dblatex.sty
17 @@ -240,7 +240,7 @@
18 18 man5: $(DOC_MAN5)
19 19 man7: $(DOC_MAN7)
20 20
21 21 -info: git.info gitman.info
22 22 +info: git.info
23 23
24 24 pdf: user-manual.pdf
25 25
26 @@ -221,10 +221,9 @@
26 @@ -256,10 +256,9 @@
27 27
28 28 install-info: info
29 29 $(INSTALL) -d -m 755 $(DESTDIR)$(infodir)
30 30 - $(INSTALL) -m 644 git.info gitman.info $(DESTDIR)$(infodir)
31 31 + $(INSTALL) -m 644 git.info $(DESTDIR)$(infodir)
32 32 if test -r $(DESTDIR)$(infodir)/dir; then \
33 33 $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) git.info ;\
34 34 - $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) gitman.info ;\
35 35 else \
36 36 echo "No directory found in $(DESTDIR)$(infodir)" >&2 ; \
37 37 fi
@@ -1,950 +1,950 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "atomicwrites" = super.buildPythonPackage {
8 8 name = "atomicwrites-1.2.1";
9 9 doCheck = false;
10 10 src = fetchurl {
11 11 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
12 12 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.mit ];
16 16 };
17 17 };
18 18 "attrs" = super.buildPythonPackage {
19 19 name = "attrs-18.2.0";
20 20 doCheck = false;
21 21 src = fetchurl {
22 22 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
23 23 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
24 24 };
25 25 meta = {
26 26 license = [ pkgs.lib.licenses.mit ];
27 27 };
28 28 };
29 29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 30 name = "backports.shutil-get-terminal-size-1.0.0";
31 31 doCheck = false;
32 32 src = fetchurl {
33 33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 35 };
36 36 meta = {
37 37 license = [ pkgs.lib.licenses.mit ];
38 38 };
39 39 };
40 40 "beautifulsoup4" = super.buildPythonPackage {
41 41 name = "beautifulsoup4-4.6.3";
42 42 doCheck = false;
43 43 src = fetchurl {
44 44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 46 };
47 47 meta = {
48 48 license = [ pkgs.lib.licenses.mit ];
49 49 };
50 50 };
51 51 "configobj" = super.buildPythonPackage {
52 52 name = "configobj-5.0.6";
53 53 doCheck = false;
54 54 propagatedBuildInputs = [
55 55 self."six"
56 56 ];
57 57 src = fetchurl {
58 58 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
59 59 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.bsdOriginal ];
63 63 };
64 64 };
65 65 "cov-core" = super.buildPythonPackage {
66 66 name = "cov-core-1.15.0";
67 67 doCheck = false;
68 68 propagatedBuildInputs = [
69 69 self."coverage"
70 70 ];
71 71 src = fetchurl {
72 72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
73 73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.mit ];
77 77 };
78 78 };
79 79 "coverage" = super.buildPythonPackage {
80 80 name = "coverage-4.5.1";
81 81 doCheck = false;
82 82 src = fetchurl {
83 83 url = "https://files.pythonhosted.org/packages/35/fe/e7df7289d717426093c68d156e0fd9117c8f4872b6588e8a8928a0f68424/coverage-4.5.1.tar.gz";
84 84 sha256 = "1wbrzpxka3xd4nmmkc6q0ir343d91kymwsm8pbmwa0d2a7q4ir2n";
85 85 };
86 86 meta = {
87 87 license = [ pkgs.lib.licenses.asl20 ];
88 88 };
89 89 };
90 90 "decorator" = super.buildPythonPackage {
91 91 name = "decorator-4.1.2";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
95 95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
99 99 };
100 100 };
101 101 "dogpile.cache" = super.buildPythonPackage {
102 102 name = "dogpile.cache-0.6.7";
103 103 doCheck = false;
104 104 src = fetchurl {
105 105 url = "https://files.pythonhosted.org/packages/ee/bd/440da735a11c6087eed7cc8747fc4b995cbac2464168682f8ee1c8e43844/dogpile.cache-0.6.7.tar.gz";
106 106 sha256 = "1aw8rx8vhb75y7zc6gi67g21sw057jdx7i8m3jq7kf3nqavxx9zw";
107 107 };
108 108 meta = {
109 109 license = [ pkgs.lib.licenses.bsdOriginal ];
110 110 };
111 111 };
112 112 "dogpile.core" = super.buildPythonPackage {
113 113 name = "dogpile.core-0.4.1";
114 114 doCheck = false;
115 115 src = fetchurl {
116 116 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
117 117 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
118 118 };
119 119 meta = {
120 120 license = [ pkgs.lib.licenses.bsdOriginal ];
121 121 };
122 122 };
123 123 "dulwich" = super.buildPythonPackage {
124 124 name = "dulwich-0.13.0";
125 125 doCheck = false;
126 126 src = fetchurl {
127 127 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
128 128 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
129 129 };
130 130 meta = {
131 131 license = [ pkgs.lib.licenses.gpl2Plus ];
132 132 };
133 133 };
134 134 "enum34" = super.buildPythonPackage {
135 135 name = "enum34-1.1.6";
136 136 doCheck = false;
137 137 src = fetchurl {
138 138 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
139 139 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
140 140 };
141 141 meta = {
142 142 license = [ pkgs.lib.licenses.bsdOriginal ];
143 143 };
144 144 };
145 145 "funcsigs" = super.buildPythonPackage {
146 146 name = "funcsigs-1.0.2";
147 147 doCheck = false;
148 148 src = fetchurl {
149 149 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
150 150 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
151 151 };
152 152 meta = {
153 153 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
154 154 };
155 155 };
156 156 "gevent" = super.buildPythonPackage {
157 name = "gevent-1.3.6";
157 name = "gevent-1.3.7";
158 158 doCheck = false;
159 159 propagatedBuildInputs = [
160 160 self."greenlet"
161 161 ];
162 162 src = fetchurl {
163 url = "https://files.pythonhosted.org/packages/49/13/aa4bb3640b5167fe58875d3d7e65390cdb14f9682a41a741a566bb560842/gevent-1.3.6.tar.gz";
164 sha256 = "1ih4k73dqz2zb561hda99vbanja3m6cdch3mgxxn1mla3qwkqhbv";
163 url = "https://files.pythonhosted.org/packages/10/c1/9499b146bfa43aa4f1e0ed1bab1bd3209a4861d25650c11725036c731cf5/gevent-1.3.7.tar.gz";
164 sha256 = "0b0fr04qdk1p4sniv87fh8z5psac60x01pv054kpgi94520g81iz";
165 165 };
166 166 meta = {
167 167 license = [ pkgs.lib.licenses.mit ];
168 168 };
169 169 };
170 170 "gprof2dot" = super.buildPythonPackage {
171 171 name = "gprof2dot-2017.9.19";
172 172 doCheck = false;
173 173 src = fetchurl {
174 174 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
175 175 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
176 176 };
177 177 meta = {
178 178 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
179 179 };
180 180 };
181 181 "greenlet" = super.buildPythonPackage {
182 182 name = "greenlet-0.4.15";
183 183 doCheck = false;
184 184 src = fetchurl {
185 185 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
186 186 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
187 187 };
188 188 meta = {
189 189 license = [ pkgs.lib.licenses.mit ];
190 190 };
191 191 };
192 192 "gunicorn" = super.buildPythonPackage {
193 193 name = "gunicorn-19.9.0";
194 194 doCheck = false;
195 195 src = fetchurl {
196 196 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
197 197 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
198 198 };
199 199 meta = {
200 200 license = [ pkgs.lib.licenses.mit ];
201 201 };
202 202 };
203 203 "hg-evolve" = super.buildPythonPackage {
204 204 name = "hg-evolve-8.0.1";
205 205 doCheck = false;
206 206 src = fetchurl {
207 207 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
208 208 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
209 209 };
210 210 meta = {
211 211 license = [ { fullName = "GPLv2+"; } ];
212 212 };
213 213 };
214 214 "hgsubversion" = super.buildPythonPackage {
215 name = "hgsubversion-1.9.2";
215 name = "hgsubversion-1.9.3";
216 216 doCheck = false;
217 217 propagatedBuildInputs = [
218 218 self."mercurial"
219 219 self."subvertpy"
220 220 ];
221 221 src = fetchurl {
222 url = "https://files.pythonhosted.org/packages/05/80/3a3cef10dd65e86528ef8d7ac57a41ebc782d0f3c6cfa4fed021aa9fbee0/hgsubversion-1.9.2.tar.gz";
223 sha256 = "16490narhq14vskml3dam8g5y3w3hdqj3g8bgm2b0c0i85l1xvcz";
222 url = "https://files.pythonhosted.org/packages/a3/53/6d205e641f3e09abcf1ddaed66e5e4b20da22d0145566d440a02c9e35f0d/hgsubversion-1.9.3.tar.gz";
223 sha256 = "0nymcjlch8c4zjbncrs30p2nrbylsf25g3h6mr0zzzxr141h3sig";
224 224 };
225 225 meta = {
226 226 license = [ pkgs.lib.licenses.gpl1 ];
227 227 };
228 228 };
229 229 "hupper" = super.buildPythonPackage {
230 name = "hupper-1.3.1";
230 name = "hupper-1.4.2";
231 231 doCheck = false;
232 232 src = fetchurl {
233 url = "https://files.pythonhosted.org/packages/cf/4b/467b826a84c8594b81f414b5ab6794e981951dac90ca40abaf9ea1cb36b0/hupper-1.3.1.tar.gz";
234 sha256 = "03mf13n6i4dd60wlb9m99ddl4m3lmly70cjp7f82vdkibfl1v6l9";
233 url = "https://files.pythonhosted.org/packages/f1/75/1915dc7650b4867fa3049256e24ca8eddb5989998fcec788cf52b9812dfc/hupper-1.4.2.tar.gz";
234 sha256 = "16vb9fkiaakdpcp6pn56h3w0dwvm67bxq2k2dv4i382qhqwphdzb";
235 235 };
236 236 meta = {
237 237 license = [ pkgs.lib.licenses.mit ];
238 238 };
239 239 };
240 240 "ipdb" = super.buildPythonPackage {
241 241 name = "ipdb-0.11";
242 242 doCheck = false;
243 243 propagatedBuildInputs = [
244 244 self."setuptools"
245 245 self."ipython"
246 246 ];
247 247 src = fetchurl {
248 248 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
249 249 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
250 250 };
251 251 meta = {
252 252 license = [ pkgs.lib.licenses.bsdOriginal ];
253 253 };
254 254 };
255 255 "ipython" = super.buildPythonPackage {
256 256 name = "ipython-5.1.0";
257 257 doCheck = false;
258 258 propagatedBuildInputs = [
259 259 self."setuptools"
260 260 self."decorator"
261 261 self."pickleshare"
262 262 self."simplegeneric"
263 263 self."traitlets"
264 264 self."prompt-toolkit"
265 265 self."pygments"
266 266 self."pexpect"
267 267 self."backports.shutil-get-terminal-size"
268 268 self."pathlib2"
269 269 self."pexpect"
270 270 ];
271 271 src = fetchurl {
272 272 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
273 273 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
274 274 };
275 275 meta = {
276 276 license = [ pkgs.lib.licenses.bsdOriginal ];
277 277 };
278 278 };
279 279 "ipython-genutils" = super.buildPythonPackage {
280 280 name = "ipython-genutils-0.2.0";
281 281 doCheck = false;
282 282 src = fetchurl {
283 283 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
284 284 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
285 285 };
286 286 meta = {
287 287 license = [ pkgs.lib.licenses.bsdOriginal ];
288 288 };
289 289 };
290 290 "mako" = super.buildPythonPackage {
291 291 name = "mako-1.0.7";
292 292 doCheck = false;
293 293 propagatedBuildInputs = [
294 294 self."markupsafe"
295 295 ];
296 296 src = fetchurl {
297 297 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
298 298 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
299 299 };
300 300 meta = {
301 301 license = [ pkgs.lib.licenses.mit ];
302 302 };
303 303 };
304 304 "markupsafe" = super.buildPythonPackage {
305 305 name = "markupsafe-1.0";
306 306 doCheck = false;
307 307 src = fetchurl {
308 308 url = "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
309 309 sha256 = "0rdn1s8x9ni7ss8rfiacj7x1085lx8mh2zdwqslnw8xc3l4nkgm6";
310 310 };
311 311 meta = {
312 312 license = [ pkgs.lib.licenses.bsdOriginal ];
313 313 };
314 314 };
315 315 "mercurial" = super.buildPythonPackage {
316 316 name = "mercurial-4.6.2";
317 317 doCheck = false;
318 318 src = fetchurl {
319 319 url = "https://files.pythonhosted.org/packages/d9/fb/c7ecf2b7fd349878dbf45b8390b8db735cef73d49dd9ce8a364b4ca3a846/mercurial-4.6.2.tar.gz";
320 320 sha256 = "1bv6wgcdx8glihjjfg22khhc52mclsn4kwfqvzbzlg0b42h4xl0w";
321 321 };
322 322 meta = {
323 323 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
324 324 };
325 325 };
326 326 "mock" = super.buildPythonPackage {
327 327 name = "mock-1.0.1";
328 328 doCheck = false;
329 329 src = fetchurl {
330 330 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
331 331 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
332 332 };
333 333 meta = {
334 334 license = [ pkgs.lib.licenses.bsdOriginal ];
335 335 };
336 336 };
337 337 "more-itertools" = super.buildPythonPackage {
338 338 name = "more-itertools-4.3.0";
339 339 doCheck = false;
340 340 propagatedBuildInputs = [
341 341 self."six"
342 342 ];
343 343 src = fetchurl {
344 344 url = "https://files.pythonhosted.org/packages/88/ff/6d485d7362f39880810278bdc906c13300db05485d9c65971dec1142da6a/more-itertools-4.3.0.tar.gz";
345 345 sha256 = "17h3na0rdh8xq30w4b9pizgkdxmm51896bxw600x84jflg9vaxn4";
346 346 };
347 347 meta = {
348 348 license = [ pkgs.lib.licenses.mit ];
349 349 };
350 350 };
351 351 "msgpack-python" = super.buildPythonPackage {
352 352 name = "msgpack-python-0.5.6";
353 353 doCheck = false;
354 354 src = fetchurl {
355 355 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
356 356 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
357 357 };
358 358 meta = {
359 359 license = [ pkgs.lib.licenses.asl20 ];
360 360 };
361 361 };
362 362 "pastedeploy" = super.buildPythonPackage {
363 363 name = "pastedeploy-1.5.2";
364 364 doCheck = false;
365 365 src = fetchurl {
366 366 url = "https://files.pythonhosted.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
367 367 sha256 = "1jz3m4hq8v6hyhfjz9425nd3nvn52cvbfipdcd72krjmla4qz1fm";
368 368 };
369 369 meta = {
370 370 license = [ pkgs.lib.licenses.mit ];
371 371 };
372 372 };
373 373 "pathlib2" = super.buildPythonPackage {
374 374 name = "pathlib2-2.3.2";
375 375 doCheck = false;
376 376 propagatedBuildInputs = [
377 377 self."six"
378 378 self."scandir"
379 379 ];
380 380 src = fetchurl {
381 381 url = "https://files.pythonhosted.org/packages/db/a8/7d6439c1aec525ed70810abee5b7d7f3aa35347f59bc28343e8f62019aa2/pathlib2-2.3.2.tar.gz";
382 382 sha256 = "10yb0iv5x2hs631rcppkhbddx799d3h8pcwmkbh2a66ns3w71ccf";
383 383 };
384 384 meta = {
385 385 license = [ pkgs.lib.licenses.mit ];
386 386 };
387 387 };
388 388 "pexpect" = super.buildPythonPackage {
389 389 name = "pexpect-4.6.0";
390 390 doCheck = false;
391 391 propagatedBuildInputs = [
392 392 self."ptyprocess"
393 393 ];
394 394 src = fetchurl {
395 395 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
396 396 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
397 397 };
398 398 meta = {
399 399 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
400 400 };
401 401 };
402 402 "pickleshare" = super.buildPythonPackage {
403 403 name = "pickleshare-0.7.5";
404 404 doCheck = false;
405 405 propagatedBuildInputs = [
406 406 self."pathlib2"
407 407 ];
408 408 src = fetchurl {
409 409 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
410 410 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
411 411 };
412 412 meta = {
413 413 license = [ pkgs.lib.licenses.mit ];
414 414 };
415 415 };
416 416 "plaster" = super.buildPythonPackage {
417 417 name = "plaster-1.0";
418 418 doCheck = false;
419 419 propagatedBuildInputs = [
420 420 self."setuptools"
421 421 ];
422 422 src = fetchurl {
423 423 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
424 424 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
425 425 };
426 426 meta = {
427 427 license = [ pkgs.lib.licenses.mit ];
428 428 };
429 429 };
430 430 "plaster-pastedeploy" = super.buildPythonPackage {
431 431 name = "plaster-pastedeploy-0.6";
432 432 doCheck = false;
433 433 propagatedBuildInputs = [
434 434 self."pastedeploy"
435 435 self."plaster"
436 436 ];
437 437 src = fetchurl {
438 438 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
439 439 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
440 440 };
441 441 meta = {
442 442 license = [ pkgs.lib.licenses.mit ];
443 443 };
444 444 };
445 445 "pluggy" = super.buildPythonPackage {
446 446 name = "pluggy-0.8.0";
447 447 doCheck = false;
448 448 src = fetchurl {
449 449 url = "https://files.pythonhosted.org/packages/65/25/81d0de17cd00f8ca994a4e74e3c4baf7cd25072c0b831dad5c7d9d6138f8/pluggy-0.8.0.tar.gz";
450 450 sha256 = "1580p47l2zqzsza8jcnw1h2wh3vvmygk6ly8bvi4w0g8j14sjys4";
451 451 };
452 452 meta = {
453 453 license = [ pkgs.lib.licenses.mit ];
454 454 };
455 455 };
456 456 "prompt-toolkit" = super.buildPythonPackage {
457 457 name = "prompt-toolkit-1.0.15";
458 458 doCheck = false;
459 459 propagatedBuildInputs = [
460 460 self."six"
461 461 self."wcwidth"
462 462 ];
463 463 src = fetchurl {
464 464 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
465 465 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
466 466 };
467 467 meta = {
468 468 license = [ pkgs.lib.licenses.bsdOriginal ];
469 469 };
470 470 };
471 471 "psutil" = super.buildPythonPackage {
472 472 name = "psutil-5.4.7";
473 473 doCheck = false;
474 474 src = fetchurl {
475 475 url = "https://files.pythonhosted.org/packages/7d/9a/1e93d41708f8ed2b564395edfa3389f0fd6d567597401c2e5e2775118d8b/psutil-5.4.7.tar.gz";
476 476 sha256 = "0fsgmvzwbdbszkwfnqhib8jcxm4w6zyhvlxlcda0rfm5cyqj4qsv";
477 477 };
478 478 meta = {
479 479 license = [ pkgs.lib.licenses.bsdOriginal ];
480 480 };
481 481 };
482 482 "ptyprocess" = super.buildPythonPackage {
483 483 name = "ptyprocess-0.6.0";
484 484 doCheck = false;
485 485 src = fetchurl {
486 486 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
487 487 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
488 488 };
489 489 meta = {
490 490 license = [ ];
491 491 };
492 492 };
493 493 "py" = super.buildPythonPackage {
494 494 name = "py-1.6.0";
495 495 doCheck = false;
496 496 src = fetchurl {
497 497 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
498 498 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
499 499 };
500 500 meta = {
501 501 license = [ pkgs.lib.licenses.mit ];
502 502 };
503 503 };
504 504 "pygments" = super.buildPythonPackage {
505 name = "pygments-2.2.0";
505 name = "pygments-2.3.0";
506 506 doCheck = false;
507 507 src = fetchurl {
508 url = "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
509 sha256 = "1k78qdvir1yb1c634nkv6rbga8wv4289xarghmsbbvzhvr311bnv";
508 url = "https://files.pythonhosted.org/packages/63/a2/91c31c4831853dedca2a08a0f94d788fc26a48f7281c99a303769ad2721b/Pygments-2.3.0.tar.gz";
509 sha256 = "1z34ms51dh4jq4h3cizp7vd1dmsxcbvffkjsd2xxfav22nn6lrl2";
510 510 };
511 511 meta = {
512 512 license = [ pkgs.lib.licenses.bsdOriginal ];
513 513 };
514 514 };
515 515 "pyramid" = super.buildPythonPackage {
516 516 name = "pyramid-1.9.2";
517 517 doCheck = false;
518 518 propagatedBuildInputs = [
519 519 self."setuptools"
520 520 self."webob"
521 521 self."repoze.lru"
522 522 self."zope.interface"
523 523 self."zope.deprecation"
524 524 self."venusian"
525 525 self."translationstring"
526 526 self."pastedeploy"
527 527 self."plaster"
528 528 self."plaster-pastedeploy"
529 529 self."hupper"
530 530 ];
531 531 src = fetchurl {
532 532 url = "https://files.pythonhosted.org/packages/a0/c1/b321d07cfc4870541989ad131c86a1d593bfe802af0eca9718a0dadfb97a/pyramid-1.9.2.tar.gz";
533 533 sha256 = "09drsl0346nchgxp2j7sa5hlk7mkhfld9wvbd0wicacrp26a92fg";
534 534 };
535 535 meta = {
536 536 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
537 537 };
538 538 };
539 539 "pyramid-mako" = super.buildPythonPackage {
540 540 name = "pyramid-mako-1.0.2";
541 541 doCheck = false;
542 542 propagatedBuildInputs = [
543 543 self."pyramid"
544 544 self."mako"
545 545 ];
546 546 src = fetchurl {
547 547 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
548 548 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
549 549 };
550 550 meta = {
551 551 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
552 552 };
553 553 };
554 554 "pytest" = super.buildPythonPackage {
555 555 name = "pytest-3.8.2";
556 556 doCheck = false;
557 557 propagatedBuildInputs = [
558 558 self."py"
559 559 self."six"
560 560 self."setuptools"
561 561 self."attrs"
562 562 self."more-itertools"
563 563 self."atomicwrites"
564 564 self."pluggy"
565 565 self."funcsigs"
566 566 self."pathlib2"
567 567 ];
568 568 src = fetchurl {
569 569 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
570 570 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
571 571 };
572 572 meta = {
573 573 license = [ pkgs.lib.licenses.mit ];
574 574 };
575 575 };
576 576 "pytest-cov" = super.buildPythonPackage {
577 577 name = "pytest-cov-2.6.0";
578 578 doCheck = false;
579 579 propagatedBuildInputs = [
580 580 self."pytest"
581 581 self."coverage"
582 582 ];
583 583 src = fetchurl {
584 584 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
585 585 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
586 586 };
587 587 meta = {
588 588 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
589 589 };
590 590 };
591 591 "pytest-profiling" = super.buildPythonPackage {
592 592 name = "pytest-profiling-1.3.0";
593 593 doCheck = false;
594 594 propagatedBuildInputs = [
595 595 self."six"
596 596 self."pytest"
597 597 self."gprof2dot"
598 598 ];
599 599 src = fetchurl {
600 600 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
601 601 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
602 602 };
603 603 meta = {
604 604 license = [ pkgs.lib.licenses.mit ];
605 605 };
606 606 };
607 607 "pytest-runner" = super.buildPythonPackage {
608 608 name = "pytest-runner-4.2";
609 609 doCheck = false;
610 610 src = fetchurl {
611 611 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
612 612 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
613 613 };
614 614 meta = {
615 615 license = [ pkgs.lib.licenses.mit ];
616 616 };
617 617 };
618 618 "pytest-sugar" = super.buildPythonPackage {
619 619 name = "pytest-sugar-0.9.1";
620 620 doCheck = false;
621 621 propagatedBuildInputs = [
622 622 self."pytest"
623 623 self."termcolor"
624 624 ];
625 625 src = fetchurl {
626 626 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
627 627 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
628 628 };
629 629 meta = {
630 630 license = [ pkgs.lib.licenses.bsdOriginal ];
631 631 };
632 632 };
633 633 "pytest-timeout" = super.buildPythonPackage {
634 634 name = "pytest-timeout-1.3.2";
635 635 doCheck = false;
636 636 propagatedBuildInputs = [
637 637 self."pytest"
638 638 ];
639 639 src = fetchurl {
640 640 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
641 641 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
642 642 };
643 643 meta = {
644 644 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
645 645 };
646 646 };
647 647 "repoze.lru" = super.buildPythonPackage {
648 648 name = "repoze.lru-0.7";
649 649 doCheck = false;
650 650 src = fetchurl {
651 651 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
652 652 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
653 653 };
654 654 meta = {
655 655 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
656 656 };
657 657 };
658 658 "rhodecode-vcsserver" = super.buildPythonPackage {
659 name = "rhodecode-vcsserver-4.14.1";
659 name = "rhodecode-vcsserver-4.15.0";
660 660 buildInputs = [
661 661 self."pytest"
662 662 self."py"
663 663 self."pytest-cov"
664 664 self."pytest-sugar"
665 665 self."pytest-runner"
666 666 self."pytest-profiling"
667 667 self."pytest-timeout"
668 668 self."gprof2dot"
669 669 self."mock"
670 670 self."webtest"
671 671 self."cov-core"
672 672 self."coverage"
673 673 self."configobj"
674 674 ];
675 675 doCheck = true;
676 676 propagatedBuildInputs = [
677 677 self."configobj"
678 678 self."atomicwrites"
679 679 self."attrs"
680 680 self."dogpile.cache"
681 681 self."dogpile.core"
682 682 self."decorator"
683 683 self."dulwich"
684 684 self."hgsubversion"
685 685 self."hg-evolve"
686 686 self."mako"
687 687 self."markupsafe"
688 688 self."mercurial"
689 689 self."msgpack-python"
690 690 self."pastedeploy"
691 691 self."psutil"
692 692 self."pyramid"
693 693 self."pyramid-mako"
694 694 self."pygments"
695 695 self."pathlib2"
696 696 self."repoze.lru"
697 697 self."simplejson"
698 698 self."subprocess32"
699 699 self."subvertpy"
700 700 self."six"
701 701 self."translationstring"
702 702 self."webob"
703 703 self."zope.deprecation"
704 704 self."zope.interface"
705 705 self."gevent"
706 706 self."greenlet"
707 707 self."gunicorn"
708 708 self."waitress"
709 709 self."setproctitle"
710 710 self."ipdb"
711 711 self."ipython"
712 712 self."pytest"
713 713 self."py"
714 714 self."pytest-cov"
715 715 self."pytest-sugar"
716 716 self."pytest-runner"
717 717 self."pytest-profiling"
718 718 self."pytest-timeout"
719 719 self."gprof2dot"
720 720 self."mock"
721 721 self."webtest"
722 722 self."cov-core"
723 723 self."coverage"
724 724 ];
725 725 src = ./.;
726 726 meta = {
727 727 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
728 728 };
729 729 };
730 730 "scandir" = super.buildPythonPackage {
731 731 name = "scandir-1.9.0";
732 732 doCheck = false;
733 733 src = fetchurl {
734 734 url = "https://files.pythonhosted.org/packages/16/2a/557af1181e6b4e30254d5a6163b18f5053791ca66e251e77ab08887e8fe3/scandir-1.9.0.tar.gz";
735 735 sha256 = "0r3hvf1a9jm1rkqgx40gxkmccknkaiqjavs8lccgq9s8khh5x5s4";
736 736 };
737 737 meta = {
738 738 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
739 739 };
740 740 };
741 741 "setproctitle" = super.buildPythonPackage {
742 742 name = "setproctitle-1.1.10";
743 743 doCheck = false;
744 744 src = fetchurl {
745 745 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
746 746 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
747 747 };
748 748 meta = {
749 749 license = [ pkgs.lib.licenses.bsdOriginal ];
750 750 };
751 751 };
752 752 "setuptools" = super.buildPythonPackage {
753 name = "setuptools-40.4.3";
753 name = "setuptools-40.6.2";
754 754 doCheck = false;
755 755 src = fetchurl {
756 url = "https://files.pythonhosted.org/packages/6e/9c/6a003320b00ef237f94aa74e4ad66c57a7618f6c79d67527136e2544b728/setuptools-40.4.3.zip";
757 sha256 = "058v6zns4634n4al2nmmvp15j8nrgwn8wjrbdks47wk3vm05gg5c";
756 url = "https://files.pythonhosted.org/packages/b0/d1/8acb42f391cba52e35b131e442e80deffbb8d0676b93261d761b1f0ef8fb/setuptools-40.6.2.zip";
757 sha256 = "0r2c5hapirlzm34h7pl1lgkm6gk7bcrlrdj28qgsvaqg3f74vfw6";
758 758 };
759 759 meta = {
760 760 license = [ pkgs.lib.licenses.mit ];
761 761 };
762 762 };
763 763 "simplegeneric" = super.buildPythonPackage {
764 764 name = "simplegeneric-0.8.1";
765 765 doCheck = false;
766 766 src = fetchurl {
767 767 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
768 768 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
769 769 };
770 770 meta = {
771 771 license = [ pkgs.lib.licenses.zpl21 ];
772 772 };
773 773 };
774 774 "simplejson" = super.buildPythonPackage {
775 775 name = "simplejson-3.11.1";
776 776 doCheck = false;
777 777 src = fetchurl {
778 778 url = "https://files.pythonhosted.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
779 779 sha256 = "1rr58dppsq73p0qcd9bsw066cdd3v63sqv7j6sqni8frvm4jv8h1";
780 780 };
781 781 meta = {
782 782 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
783 783 };
784 784 };
785 785 "six" = super.buildPythonPackage {
786 786 name = "six-1.11.0";
787 787 doCheck = false;
788 788 src = fetchurl {
789 789 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
790 790 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
791 791 };
792 792 meta = {
793 793 license = [ pkgs.lib.licenses.mit ];
794 794 };
795 795 };
796 796 "subprocess32" = super.buildPythonPackage {
797 797 name = "subprocess32-3.5.2";
798 798 doCheck = false;
799 799 src = fetchurl {
800 800 url = "https://files.pythonhosted.org/packages/c3/5f/7117737fc7114061837a4f51670d863dd7f7f9c762a6546fa8a0dcfe61c8/subprocess32-3.5.2.tar.gz";
801 801 sha256 = "11v62shwmdys48g7ncs3a8jwwnkcl8d4zcwy6dk73z1zy2f9hazb";
802 802 };
803 803 meta = {
804 804 license = [ pkgs.lib.licenses.psfl ];
805 805 };
806 806 };
807 807 "subvertpy" = super.buildPythonPackage {
808 808 name = "subvertpy-0.10.1";
809 809 doCheck = false;
810 810 src = fetchurl {
811 811 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
812 812 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
813 813 };
814 814 meta = {
815 815 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
816 816 };
817 817 };
818 818 "termcolor" = super.buildPythonPackage {
819 819 name = "termcolor-1.1.0";
820 820 doCheck = false;
821 821 src = fetchurl {
822 822 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
823 823 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
824 824 };
825 825 meta = {
826 826 license = [ pkgs.lib.licenses.mit ];
827 827 };
828 828 };
829 829 "traitlets" = super.buildPythonPackage {
830 830 name = "traitlets-4.3.2";
831 831 doCheck = false;
832 832 propagatedBuildInputs = [
833 833 self."ipython-genutils"
834 834 self."six"
835 835 self."decorator"
836 836 self."enum34"
837 837 ];
838 838 src = fetchurl {
839 839 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
840 840 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
841 841 };
842 842 meta = {
843 843 license = [ pkgs.lib.licenses.bsdOriginal ];
844 844 };
845 845 };
846 846 "translationstring" = super.buildPythonPackage {
847 847 name = "translationstring-1.3";
848 848 doCheck = false;
849 849 src = fetchurl {
850 850 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
851 851 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
852 852 };
853 853 meta = {
854 854 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
855 855 };
856 856 };
857 857 "venusian" = super.buildPythonPackage {
858 858 name = "venusian-1.1.0";
859 859 doCheck = false;
860 860 src = fetchurl {
861 861 url = "https://files.pythonhosted.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
862 862 sha256 = "0zapz131686qm0gazwy8bh11vr57pr89jbwbl50s528sqy9f80lr";
863 863 };
864 864 meta = {
865 865 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
866 866 };
867 867 };
868 868 "waitress" = super.buildPythonPackage {
869 869 name = "waitress-1.1.0";
870 870 doCheck = false;
871 871 src = fetchurl {
872 872 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
873 873 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
874 874 };
875 875 meta = {
876 876 license = [ pkgs.lib.licenses.zpl21 ];
877 877 };
878 878 };
879 879 "wcwidth" = super.buildPythonPackage {
880 880 name = "wcwidth-0.1.7";
881 881 doCheck = false;
882 882 src = fetchurl {
883 883 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
884 884 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
885 885 };
886 886 meta = {
887 887 license = [ pkgs.lib.licenses.mit ];
888 888 };
889 889 };
890 890 "webob" = super.buildPythonPackage {
891 891 name = "webob-1.7.4";
892 892 doCheck = false;
893 893 src = fetchurl {
894 894 url = "https://files.pythonhosted.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
895 895 sha256 = "1na01ljg04z40il7vcrn8g29vaw7nvg1xvhk64cr4jys5wcay44d";
896 896 };
897 897 meta = {
898 898 license = [ pkgs.lib.licenses.mit ];
899 899 };
900 900 };
901 901 "webtest" = super.buildPythonPackage {
902 902 name = "webtest-2.0.29";
903 903 doCheck = false;
904 904 propagatedBuildInputs = [
905 905 self."six"
906 906 self."webob"
907 907 self."waitress"
908 908 self."beautifulsoup4"
909 909 ];
910 910 src = fetchurl {
911 911 url = "https://files.pythonhosted.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
912 912 sha256 = "0bcj1ica5lnmj5zbvk46x28kgphcsgh7sfnwjmn0cr94mhawrg6v";
913 913 };
914 914 meta = {
915 915 license = [ pkgs.lib.licenses.mit ];
916 916 };
917 917 };
918 918 "zope.deprecation" = super.buildPythonPackage {
919 919 name = "zope.deprecation-4.3.0";
920 920 doCheck = false;
921 921 propagatedBuildInputs = [
922 922 self."setuptools"
923 923 ];
924 924 src = fetchurl {
925 925 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
926 926 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
927 927 };
928 928 meta = {
929 929 license = [ pkgs.lib.licenses.zpl21 ];
930 930 };
931 931 };
932 932 "zope.interface" = super.buildPythonPackage {
933 933 name = "zope.interface-4.5.0";
934 934 doCheck = false;
935 935 propagatedBuildInputs = [
936 936 self."setuptools"
937 937 ];
938 938 src = fetchurl {
939 939 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
940 940 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
941 941 };
942 942 meta = {
943 943 license = [ pkgs.lib.licenses.zpl21 ];
944 944 };
945 945 };
946 946
947 947 ### Test requirements
948 948
949 949
950 950 }
@@ -1,48 +1,48 b''
1 1 ## dependencies
2 2
3 3 # our custom configobj
4 4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
5 5 atomicwrites==1.2.1
6 6 attrs==18.2.0
7 7 dogpile.cache==0.6.7
8 8 dogpile.core==0.4.1
9 9 decorator==4.1.2
10 10 dulwich==0.13.0
11 hgsubversion==1.9.2
11 hgsubversion==1.9.3
12 12 hg-evolve==8.0.1
13 13 mako==1.0.7
14 14 markupsafe==1.0.0
15 15 mercurial==4.6.2
16 16 msgpack-python==0.5.6
17 17
18 18 pastedeploy==1.5.2
19 19 psutil==5.4.7
20 20 pyramid==1.9.2
21 21 pyramid-mako==1.0.2
22 22
23 pygments==2.2.0
23 pygments==2.3.0
24 24 pathlib2==2.3.2
25 25 repoze.lru==0.7
26 26 simplejson==3.11.1
27 27 subprocess32==3.5.2
28 28 subvertpy==0.10.1
29 29
30 30 six==1.11.0
31 31 translationstring==1.3
32 32 webob==1.7.4
33 33 zope.deprecation==4.3.0
34 34 zope.interface==4.5.0
35 35
36 36 ## http servers
37 gevent==1.3.6
37 gevent==1.3.7
38 38 greenlet==0.4.15
39 39 gunicorn==19.9.0
40 40 waitress==1.1.0
41 41 setproctitle==1.1.10
42 42
43 43 ## debug
44 44 ipdb==0.11.0
45 45 ipython==5.1.0
46 46
47 47 ## test related requirements
48 48 -r requirements_test.txt
@@ -1,1 +1,1 b''
1 4.14.1 No newline at end of file
1 4.15.0 No newline at end of file
@@ -1,728 +1,719 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import collections
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 from dulwich import index, objects
29 29 from dulwich.client import HttpGitClient, LocalGitClient
30 30 from dulwich.errors import (
31 31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 32 MissingCommitError, ObjectMissing, HangupException,
33 33 UnexpectedCommandError)
34 34 from dulwich.repo import Repo as DulwichRepo, Tag
35 35 from dulwich.server import update_server_info
36 36
37 37 from vcsserver import exceptions, settings, subprocessio
38 38 from vcsserver.utils import safe_str
39 39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 40 from vcsserver.hgcompat import (
41 41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 42 from vcsserver.git_lfs.lib import LFSOidStore
43 43
44 44 DIR_STAT = stat.S_IFDIR
45 45 FILE_MODE = stat.S_IFMT
46 46 GIT_LINK = objects.S_IFGITLINK
47 47
48 48 log = logging.getLogger(__name__)
49 49
50 50
51 51 def reraise_safe_exceptions(func):
52 52 """Converts Dulwich exceptions to something neutral."""
53 53 @wraps(func)
54 54 def wrapper(*args, **kwargs):
55 55 try:
56 56 return func(*args, **kwargs)
57 57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 58 ObjectMissing) as e:
59 59 exc = exceptions.LookupException(e)
60 60 raise exc(e)
61 61 except (HangupException, UnexpectedCommandError) as e:
62 62 exc = exceptions.VcsException(e)
63 63 raise exc(e)
64 64 except Exception as e:
65 65 # NOTE(marcink): becuase of how dulwich handles some exceptions
66 66 # (KeyError on empty repos), we cannot track this and catch all
67 67 # exceptions, it's an exceptions from other handlers
68 68 #if not hasattr(e, '_vcs_kind'):
69 69 #log.exception("Unhandled exception in git remote call")
70 70 #raise_from_original(exceptions.UnhandledException)
71 71 raise
72 72 return wrapper
73 73
74 74
75 75 class Repo(DulwichRepo):
76 76 """
77 77 A wrapper for dulwich Repo class.
78 78
79 79 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
80 80 "Too many open files" error. We need to close all opened file descriptors
81 81 once the repo object is destroyed.
82 82
83 83 TODO: mikhail: please check if we need this wrapper after updating dulwich
84 84 to 0.12.0 +
85 85 """
86 86 def __del__(self):
87 87 if hasattr(self, 'object_store'):
88 88 self.close()
89 89
90 90
91 91 class GitFactory(RepoFactory):
92 92 repo_type = 'git'
93 93
94 94 def _create_repo(self, wire, create):
95 95 repo_path = str_to_dulwich(wire['path'])
96 96 return Repo(repo_path)
97 97
98 98
99 99 class GitRemote(object):
100 100
101 101 def __init__(self, factory):
102 102 self._factory = factory
103 103 self.peeled_ref_marker = '^{}'
104 104 self._bulk_methods = {
105 105 "author": self.commit_attribute,
106 106 "date": self.get_object_attrs,
107 107 "message": self.commit_attribute,
108 108 "parents": self.commit_attribute,
109 109 "_commit": self.revision,
110 110 }
111 111
112 112 def _wire_to_config(self, wire):
113 113 if 'config' in wire:
114 114 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
115 115 return {}
116 116
117 117 def _assign_ref(self, wire, ref, commit_id):
118 118 repo = self._factory.repo(wire)
119 119 repo[ref] = commit_id
120 120
121 121 @reraise_safe_exceptions
122 122 def add_object(self, wire, content):
123 123 repo = self._factory.repo(wire)
124 124 blob = objects.Blob()
125 125 blob.set_raw_string(content)
126 126 repo.object_store.add_object(blob)
127 127 return blob.id
128 128
129 129 @reraise_safe_exceptions
130 130 def assert_correct_path(self, wire):
131 131 path = wire.get('path')
132 132 try:
133 133 self._factory.repo(wire)
134 134 except NotGitRepository as e:
135 135 tb = traceback.format_exc()
136 136 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
137 137 return False
138 138
139 139 return True
140 140
141 141 @reraise_safe_exceptions
142 142 def bare(self, wire):
143 143 repo = self._factory.repo(wire)
144 144 return repo.bare
145 145
146 146 @reraise_safe_exceptions
147 147 def blob_as_pretty_string(self, wire, sha):
148 148 repo = self._factory.repo(wire)
149 149 return repo[sha].as_pretty_string()
150 150
151 151 @reraise_safe_exceptions
152 152 def blob_raw_length(self, wire, sha):
153 153 repo = self._factory.repo(wire)
154 154 blob = repo[sha]
155 155 return blob.raw_length()
156 156
157 157 def _parse_lfs_pointer(self, raw_content):
158 158
159 159 spec_string = 'version https://git-lfs.github.com/spec'
160 160 if raw_content and raw_content.startswith(spec_string):
161 161 pattern = re.compile(r"""
162 162 (?:\n)?
163 163 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
164 164 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
165 165 ^size[ ](?P<oid_size>[0-9]+)\n
166 166 (?:\n)?
167 167 """, re.VERBOSE | re.MULTILINE)
168 168 match = pattern.match(raw_content)
169 169 if match:
170 170 return match.groupdict()
171 171
172 172 return {}
173 173
174 174 @reraise_safe_exceptions
175 175 def is_large_file(self, wire, sha):
176 176 repo = self._factory.repo(wire)
177 177 blob = repo[sha]
178 178 return self._parse_lfs_pointer(blob.as_raw_string())
179 179
180 180 @reraise_safe_exceptions
181 181 def in_largefiles_store(self, wire, oid):
182 182 repo = self._factory.repo(wire)
183 183 conf = self._wire_to_config(wire)
184 184
185 185 store_location = conf.get('vcs_git_lfs_store_location')
186 186 if store_location:
187 187 repo_name = repo.path
188 188 store = LFSOidStore(
189 189 oid=oid, repo=repo_name, store_location=store_location)
190 190 return store.has_oid()
191 191
192 192 return False
193 193
194 194 @reraise_safe_exceptions
195 195 def store_path(self, wire, oid):
196 196 repo = self._factory.repo(wire)
197 197 conf = self._wire_to_config(wire)
198 198
199 199 store_location = conf.get('vcs_git_lfs_store_location')
200 200 if store_location:
201 201 repo_name = repo.path
202 202 store = LFSOidStore(
203 203 oid=oid, repo=repo_name, store_location=store_location)
204 204 return store.oid_path
205 205 raise ValueError('Unable to fetch oid with path {}'.format(oid))
206 206
207 207 @reraise_safe_exceptions
208 208 def bulk_request(self, wire, rev, pre_load):
209 209 result = {}
210 210 for attr in pre_load:
211 211 try:
212 212 method = self._bulk_methods[attr]
213 213 args = [wire, rev]
214 214 if attr == "date":
215 215 args.extend(["commit_time", "commit_timezone"])
216 216 elif attr in ["author", "message", "parents"]:
217 217 args.append(attr)
218 218 result[attr] = method(*args)
219 219 except KeyError as e:
220 220 raise exceptions.VcsException(e)(
221 221 "Unknown bulk attribute: %s" % attr)
222 222 return result
223 223
224 224 def _build_opener(self, url):
225 225 handlers = []
226 226 url_obj = url_parser(url)
227 227 _, authinfo = url_obj.authinfo()
228 228
229 229 if authinfo:
230 230 # create a password manager
231 231 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
232 232 passmgr.add_password(*authinfo)
233 233
234 234 handlers.extend((httpbasicauthhandler(passmgr),
235 235 httpdigestauthhandler(passmgr)))
236 236
237 237 return urllib2.build_opener(*handlers)
238 238
239 239 @reraise_safe_exceptions
240 240 def check_url(self, url, config):
241 241 url_obj = url_parser(url)
242 242 test_uri, _ = url_obj.authinfo()
243 243 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
244 244 url_obj.query = obfuscate_qs(url_obj.query)
245 245 cleaned_uri = str(url_obj)
246 246 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
247 247
248 248 if not test_uri.endswith('info/refs'):
249 249 test_uri = test_uri.rstrip('/') + '/info/refs'
250 250
251 251 o = self._build_opener(url)
252 252 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
253 253
254 254 q = {"service": 'git-upload-pack'}
255 255 qs = '?%s' % urllib.urlencode(q)
256 256 cu = "%s%s" % (test_uri, qs)
257 257 req = urllib2.Request(cu, None, {})
258 258
259 259 try:
260 260 log.debug("Trying to open URL %s", cleaned_uri)
261 261 resp = o.open(req)
262 262 if resp.code != 200:
263 263 raise exceptions.URLError()('Return Code is not 200')
264 264 except Exception as e:
265 265 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
266 266 # means it cannot be cloned
267 267 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
268 268
269 269 # now detect if it's proper git repo
270 270 gitdata = resp.read()
271 271 if 'service=git-upload-pack' in gitdata:
272 272 pass
273 273 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
274 274 # old style git can return some other format !
275 275 pass
276 276 else:
277 277 raise exceptions.URLError()(
278 278 "url [%s] does not look like an git" % (cleaned_uri,))
279 279
280 280 return True
281 281
282 282 @reraise_safe_exceptions
283 283 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
284 284 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
285 285 remote_refs = self.pull(wire, url, apply_refs=False)
286 286 repo = self._factory.repo(wire)
287 287 if isinstance(valid_refs, list):
288 288 valid_refs = tuple(valid_refs)
289 289
290 290 for k in remote_refs:
291 291 # only parse heads/tags and skip so called deferred tags
292 292 if k.startswith(valid_refs) and not k.endswith(deferred):
293 293 repo[k] = remote_refs[k]
294 294
295 295 if update_after_clone:
296 296 # we want to checkout HEAD
297 297 repo["HEAD"] = remote_refs["HEAD"]
298 298 index.build_index_from_tree(repo.path, repo.index_path(),
299 299 repo.object_store, repo["HEAD"].tree)
300 300
301 301 # TODO: this is quite complex, check if that can be simplified
302 302 @reraise_safe_exceptions
303 303 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
304 304 repo = self._factory.repo(wire)
305 305 object_store = repo.object_store
306 306
307 307 # Create tree and populates it with blobs
308 308 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
309 309
310 310 for node in updated:
311 311 # Compute subdirs if needed
312 312 dirpath, nodename = vcspath.split(node['path'])
313 313 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
314 314 parent = commit_tree
315 315 ancestors = [('', parent)]
316 316
317 317 # Tries to dig for the deepest existing tree
318 318 while dirnames:
319 319 curdir = dirnames.pop(0)
320 320 try:
321 321 dir_id = parent[curdir][1]
322 322 except KeyError:
323 323 # put curdir back into dirnames and stops
324 324 dirnames.insert(0, curdir)
325 325 break
326 326 else:
327 327 # If found, updates parent
328 328 parent = repo[dir_id]
329 329 ancestors.append((curdir, parent))
330 330 # Now parent is deepest existing tree and we need to create
331 331 # subtrees for dirnames (in reverse order)
332 332 # [this only applies for nodes from added]
333 333 new_trees = []
334 334
335 335 blob = objects.Blob.from_string(node['content'])
336 336
337 337 if dirnames:
338 338 # If there are trees which should be created we need to build
339 339 # them now (in reverse order)
340 340 reversed_dirnames = list(reversed(dirnames))
341 341 curtree = objects.Tree()
342 342 curtree[node['node_path']] = node['mode'], blob.id
343 343 new_trees.append(curtree)
344 344 for dirname in reversed_dirnames[:-1]:
345 345 newtree = objects.Tree()
346 346 newtree[dirname] = (DIR_STAT, curtree.id)
347 347 new_trees.append(newtree)
348 348 curtree = newtree
349 349 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
350 350 else:
351 351 parent.add(
352 352 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
353 353
354 354 new_trees.append(parent)
355 355 # Update ancestors
356 356 reversed_ancestors = reversed(
357 357 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
358 358 for parent, tree, path in reversed_ancestors:
359 359 parent[path] = (DIR_STAT, tree.id)
360 360 object_store.add_object(tree)
361 361
362 362 object_store.add_object(blob)
363 363 for tree in new_trees:
364 364 object_store.add_object(tree)
365 365
366 366 for node_path in removed:
367 367 paths = node_path.split('/')
368 368 tree = commit_tree
369 369 trees = [tree]
370 370 # Traverse deep into the forest...
371 371 for path in paths:
372 372 try:
373 373 obj = repo[tree[path][1]]
374 374 if isinstance(obj, objects.Tree):
375 375 trees.append(obj)
376 376 tree = obj
377 377 except KeyError:
378 378 break
379 379 # Cut down the blob and all rotten trees on the way back...
380 380 for path, tree in reversed(zip(paths, trees)):
381 381 del tree[path]
382 382 if tree:
383 383 # This tree still has elements - don't remove it or any
384 384 # of it's parents
385 385 break
386 386
387 387 object_store.add_object(commit_tree)
388 388
389 389 # Create commit
390 390 commit = objects.Commit()
391 391 commit.tree = commit_tree.id
392 392 for k, v in commit_data.iteritems():
393 393 setattr(commit, k, v)
394 394 object_store.add_object(commit)
395 395
396 396 ref = 'refs/heads/%s' % branch
397 397 repo.refs[ref] = commit.id
398 398
399 399 return commit.id
400 400
401 401 @reraise_safe_exceptions
402 402 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
403 403 if url != 'default' and '://' not in url:
404 404 client = LocalGitClient(url)
405 405 else:
406 406 url_obj = url_parser(url)
407 407 o = self._build_opener(url)
408 408 url, _ = url_obj.authinfo()
409 409 client = HttpGitClient(base_url=url, opener=o)
410 410 repo = self._factory.repo(wire)
411 411
412 412 determine_wants = repo.object_store.determine_wants_all
413 413 if refs:
414 414 def determine_wants_requested(references):
415 415 return [references[r] for r in references if r in refs]
416 416 determine_wants = determine_wants_requested
417 417
418 418 try:
419 419 remote_refs = client.fetch(
420 420 path=url, target=repo, determine_wants=determine_wants)
421 421 except NotGitRepository as e:
422 422 log.warning(
423 423 'Trying to fetch from "%s" failed, not a Git repository.', url)
424 424 # Exception can contain unicode which we convert
425 425 raise exceptions.AbortException(e)(repr(e))
426 426
427 427 # mikhail: client.fetch() returns all the remote refs, but fetches only
428 428 # refs filtered by `determine_wants` function. We need to filter result
429 429 # as well
430 430 if refs:
431 431 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
432 432
433 433 if apply_refs:
434 434 # TODO: johbo: Needs proper test coverage with a git repository
435 435 # that contains a tag object, so that we would end up with
436 436 # a peeled ref at this point.
437 437 for k in remote_refs:
438 438 if k.endswith(self.peeled_ref_marker):
439 439 log.debug("Skipping peeled reference %s", k)
440 440 continue
441 441 repo[k] = remote_refs[k]
442 442
443 443 if refs and not update_after:
444 444 # mikhail: explicitly set the head to the last ref.
445 445 repo['HEAD'] = remote_refs[refs[-1]]
446 446
447 447 if update_after:
448 448 # we want to checkout HEAD
449 449 repo["HEAD"] = remote_refs["HEAD"]
450 450 index.build_index_from_tree(repo.path, repo.index_path(),
451 451 repo.object_store, repo["HEAD"].tree)
452 452 return remote_refs
453 453
454 454 @reraise_safe_exceptions
455 455 def sync_fetch(self, wire, url, refs=None):
456 456 repo = self._factory.repo(wire)
457 457 if refs and not isinstance(refs, (list, tuple)):
458 458 refs = [refs]
459 459
460 460 # get all remote refs we'll use to fetch later
461 461 output, __ = self.run_git_command(
462 462 wire, ['ls-remote', url], fail_on_stderr=False,
463 463 _copts=['-c', 'core.askpass=""'],
464 464 extra_env={'GIT_TERMINAL_PROMPT': '0'})
465 465
466 466 remote_refs = collections.OrderedDict()
467 467 fetch_refs = []
468 468
469 469 for ref_line in output.splitlines():
470 470 sha, ref = ref_line.split('\t')
471 471 sha = sha.strip()
472 472 if ref in remote_refs:
473 473 # duplicate, skip
474 474 continue
475 475 if ref.endswith(self.peeled_ref_marker):
476 476 log.debug("Skipping peeled reference %s", ref)
477 477 continue
478 478 # don't sync HEAD
479 479 if ref in ['HEAD']:
480 480 continue
481 481
482 482 remote_refs[ref] = sha
483 483
484 484 if refs and sha in refs:
485 485 # we filter fetch using our specified refs
486 486 fetch_refs.append('{}:{}'.format(ref, ref))
487 487 elif not refs:
488 488 fetch_refs.append('{}:{}'.format(ref, ref))
489 489
490 490 if fetch_refs:
491 491 _out, _err = self.run_git_command(
492 492 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs,
493 493 fail_on_stderr=False,
494 494 _copts=['-c', 'core.askpass=""'],
495 495 extra_env={'GIT_TERMINAL_PROMPT': '0'})
496 496
497 497 return remote_refs
498 498
499 499 @reraise_safe_exceptions
500 500 def sync_push(self, wire, url, refs=None):
501 501 if not self.check_url(url, wire):
502 502 return
503 503
504 504 repo = self._factory.repo(wire)
505 505 self.run_git_command(
506 506 wire, ['push', url, '--mirror'], fail_on_stderr=False,
507 507 _copts=['-c', 'core.askpass=""'],
508 508 extra_env={'GIT_TERMINAL_PROMPT': '0'})
509 509
510 510 @reraise_safe_exceptions
511 511 def get_remote_refs(self, wire, url):
512 512 repo = Repo(url)
513 513 return repo.get_refs()
514 514
515 515 @reraise_safe_exceptions
516 516 def get_description(self, wire):
517 517 repo = self._factory.repo(wire)
518 518 return repo.get_description()
519 519
520 520 @reraise_safe_exceptions
521 def get_file_history(self, wire, file_path, commit_id, limit):
522 repo = self._factory.repo(wire)
523 include = [commit_id]
524 paths = [file_path]
525
526 walker = repo.get_walker(include, paths=paths, max_entries=limit)
527 return [x.commit.id for x in walker]
528
529 @reraise_safe_exceptions
530 521 def get_missing_revs(self, wire, rev1, rev2, path2):
531 522 repo = self._factory.repo(wire)
532 523 LocalGitClient(thin_packs=False).fetch(path2, repo)
533 524
534 525 wire_remote = wire.copy()
535 526 wire_remote['path'] = path2
536 527 repo_remote = self._factory.repo(wire_remote)
537 528 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
538 529
539 530 revs = [
540 531 x.commit.id
541 532 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
542 533 return revs
543 534
544 535 @reraise_safe_exceptions
545 536 def get_object(self, wire, sha):
546 537 repo = self._factory.repo(wire)
547 538 obj = repo.get_object(sha)
548 539 commit_id = obj.id
549 540
550 541 if isinstance(obj, Tag):
551 542 commit_id = obj.object[1]
552 543
553 544 return {
554 545 'id': obj.id,
555 546 'type': obj.type_name,
556 547 'commit_id': commit_id
557 548 }
558 549
559 550 @reraise_safe_exceptions
560 551 def get_object_attrs(self, wire, sha, *attrs):
561 552 repo = self._factory.repo(wire)
562 553 obj = repo.get_object(sha)
563 554 return list(getattr(obj, a) for a in attrs)
564 555
565 556 @reraise_safe_exceptions
566 557 def get_refs(self, wire):
567 558 repo = self._factory.repo(wire)
568 559 result = {}
569 560 for ref, sha in repo.refs.as_dict().items():
570 561 peeled_sha = repo.get_peeled(ref)
571 562 result[ref] = peeled_sha
572 563 return result
573 564
574 565 @reraise_safe_exceptions
575 566 def get_refs_path(self, wire):
576 567 repo = self._factory.repo(wire)
577 568 return repo.refs.path
578 569
579 570 @reraise_safe_exceptions
580 571 def head(self, wire, show_exc=True):
581 572 repo = self._factory.repo(wire)
582 573 try:
583 574 return repo.head()
584 575 except Exception:
585 576 if show_exc:
586 577 raise
587 578
588 579 @reraise_safe_exceptions
589 580 def init(self, wire):
590 581 repo_path = str_to_dulwich(wire['path'])
591 582 self.repo = Repo.init(repo_path)
592 583
593 584 @reraise_safe_exceptions
594 585 def init_bare(self, wire):
595 586 repo_path = str_to_dulwich(wire['path'])
596 587 self.repo = Repo.init_bare(repo_path)
597 588
598 589 @reraise_safe_exceptions
599 590 def revision(self, wire, rev):
600 591 repo = self._factory.repo(wire)
601 592 obj = repo[rev]
602 593 obj_data = {
603 594 'id': obj.id,
604 595 }
605 596 try:
606 597 obj_data['tree'] = obj.tree
607 598 except AttributeError:
608 599 pass
609 600 return obj_data
610 601
611 602 @reraise_safe_exceptions
612 603 def commit_attribute(self, wire, rev, attr):
613 604 repo = self._factory.repo(wire)
614 605 obj = repo[rev]
615 606 return getattr(obj, attr)
616 607
617 608 @reraise_safe_exceptions
618 609 def set_refs(self, wire, key, value):
619 610 repo = self._factory.repo(wire)
620 611 repo.refs[key] = value
621 612
622 613 @reraise_safe_exceptions
623 614 def remove_ref(self, wire, key):
624 615 repo = self._factory.repo(wire)
625 616 del repo.refs[key]
626 617
627 618 @reraise_safe_exceptions
628 619 def tree_changes(self, wire, source_id, target_id):
629 620 repo = self._factory.repo(wire)
630 621 source = repo[source_id].tree if source_id else None
631 622 target = repo[target_id].tree
632 623 result = repo.object_store.tree_changes(source, target)
633 624 return list(result)
634 625
635 626 @reraise_safe_exceptions
636 627 def tree_items(self, wire, tree_id):
637 628 repo = self._factory.repo(wire)
638 629 tree = repo[tree_id]
639 630
640 631 result = []
641 632 for item in tree.iteritems():
642 633 item_sha = item.sha
643 634 item_mode = item.mode
644 635
645 636 if FILE_MODE(item_mode) == GIT_LINK:
646 637 item_type = "link"
647 638 else:
648 639 item_type = repo[item_sha].type_name
649 640
650 641 result.append((item.path, item_mode, item_sha, item_type))
651 642 return result
652 643
653 644 @reraise_safe_exceptions
654 645 def update_server_info(self, wire):
655 646 repo = self._factory.repo(wire)
656 647 update_server_info(repo)
657 648
658 649 @reraise_safe_exceptions
659 650 def discover_git_version(self):
660 651 stdout, _ = self.run_git_command(
661 652 {}, ['--version'], _bare=True, _safe=True)
662 653 prefix = 'git version'
663 654 if stdout.startswith(prefix):
664 655 stdout = stdout[len(prefix):]
665 656 return stdout.strip()
666 657
667 658 @reraise_safe_exceptions
668 659 def run_git_command(self, wire, cmd, **opts):
669 660 path = wire.get('path', None)
670 661
671 662 if path and os.path.isdir(path):
672 663 opts['cwd'] = path
673 664
674 665 if '_bare' in opts:
675 666 _copts = []
676 667 del opts['_bare']
677 668 else:
678 669 _copts = ['-c', 'core.quotepath=false', ]
679 670 safe_call = False
680 671 if '_safe' in opts:
681 672 # no exc on failure
682 673 del opts['_safe']
683 674 safe_call = True
684 675
685 676 if '_copts' in opts:
686 677 _copts.extend(opts['_copts'] or [])
687 678 del opts['_copts']
688 679
689 680 gitenv = os.environ.copy()
690 681 gitenv.update(opts.pop('extra_env', {}))
691 682 # need to clean fix GIT_DIR !
692 683 if 'GIT_DIR' in gitenv:
693 684 del gitenv['GIT_DIR']
694 685 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
695 686 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
696 687
697 688 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
698 689 _opts = {'env': gitenv, 'shell': False}
699 690
700 691 try:
701 692 _opts.update(opts)
702 693 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
703 694
704 695 return ''.join(p), ''.join(p.error)
705 696 except (EnvironmentError, OSError) as err:
706 697 cmd = ' '.join(cmd) # human friendly CMD
707 698 tb_err = ("Couldn't run git command (%s).\n"
708 699 "Original error was:%s\n"
709 700 "Call options:%s\n"
710 701 % (cmd, err, _opts))
711 702 log.exception(tb_err)
712 703 if safe_call:
713 704 return '', err
714 705 else:
715 706 raise exceptions.VcsException()(tb_err)
716 707
717 708 @reraise_safe_exceptions
718 709 def install_hooks(self, wire, force=False):
719 710 from vcsserver.hook_utils import install_git_hooks
720 711 repo = self._factory.repo(wire)
721 712 return install_git_hooks(repo.path, repo.bare, force_create=force)
722 713
723 714
724 715 def str_to_dulwich(value):
725 716 """
726 717 Dulwich 0.10.1a requires `unicode` objects to be passed in.
727 718 """
728 719 return value.decode(settings.WIRE_ENCODING)
@@ -1,795 +1,795 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23
24 24 from hgext import largefiles, rebase
25 25 from hgext.strip import strip as hgext_strip
26 26 from mercurial import commands
27 27 from mercurial import unionrepo
28 28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 37 RepoLookupError, InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 baseui.setconfig('ui', 'paginate', 'never')
57 57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 58 # signal in a non-main thread, thus generating a ValueError.
59 59 baseui.setconfig('worker', 'numcpus', 1)
60 60
61 61 # If there is no config for the largefiles extension, we explicitly disable
62 62 # it here. This overrides settings from repositories hgrc file. Recent
63 63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 64 # repo.
65 65 if not baseui.hasconfig('extensions', 'largefiles'):
66 66 log.debug('Explicitly disable largefiles extension for repo.')
67 67 baseui.setconfig('extensions', 'largefiles', '!')
68 68
69 69 return baseui
70 70
71 71
72 72 def reraise_safe_exceptions(func):
73 73 """Decorator for converting mercurial exceptions to something neutral."""
74 74 def wrapper(*args, **kwargs):
75 75 try:
76 76 return func(*args, **kwargs)
77 77 except (Abort, InterventionRequired) as e:
78 78 raise_from_original(exceptions.AbortException(e))
79 79 except RepoLookupError as e:
80 80 raise_from_original(exceptions.LookupException(e))
81 81 except RequirementError as e:
82 82 raise_from_original(exceptions.RequirementException(e))
83 83 except RepoError as e:
84 84 raise_from_original(exceptions.VcsException(e))
85 85 except LookupError as e:
86 86 raise_from_original(exceptions.LookupException(e))
87 87 except Exception as e:
88 88 if not hasattr(e, '_vcs_kind'):
89 89 log.exception("Unhandled exception in hg remote call")
90 90 raise_from_original(exceptions.UnhandledException(e))
91 91
92 92 raise
93 93 return wrapper
94 94
95 95
96 96 class MercurialFactory(RepoFactory):
97 97 repo_type = 'hg'
98 98
99 99 def _create_config(self, config, hooks=True):
100 100 if not hooks:
101 101 hooks_to_clean = frozenset((
102 102 'changegroup.repo_size', 'preoutgoing.pre_pull',
103 103 'outgoing.pull_logger', 'prechangegroup.pre_push'))
104 104 new_config = []
105 105 for section, option, value in config:
106 106 if section == 'hooks' and option in hooks_to_clean:
107 107 continue
108 108 new_config.append((section, option, value))
109 109 config = new_config
110 110
111 111 baseui = make_ui_from_config(config)
112 112 return baseui
113 113
114 114 def _create_repo(self, wire, create):
115 115 baseui = self._create_config(wire["config"])
116 116 return localrepository(baseui, wire["path"], create)
117 117
118 118
119 119 class HgRemote(object):
120 120
121 121 def __init__(self, factory):
122 122 self._factory = factory
123 123
124 124 self._bulk_methods = {
125 125 "affected_files": self.ctx_files,
126 126 "author": self.ctx_user,
127 127 "branch": self.ctx_branch,
128 128 "children": self.ctx_children,
129 129 "date": self.ctx_date,
130 130 "message": self.ctx_description,
131 131 "parents": self.ctx_parents,
132 132 "status": self.ctx_status,
133 133 "obsolete": self.ctx_obsolete,
134 134 "phase": self.ctx_phase,
135 135 "hidden": self.ctx_hidden,
136 136 "_file_paths": self.ctx_list,
137 137 }
138 138
139 139 @reraise_safe_exceptions
140 140 def discover_hg_version(self):
141 141 from mercurial import util
142 142 return util.version()
143 143
144 144 @reraise_safe_exceptions
145 145 def archive_repo(self, archive_path, mtime, file_info, kind):
146 146 if kind == "tgz":
147 147 archiver = archival.tarit(archive_path, mtime, "gz")
148 148 elif kind == "tbz2":
149 149 archiver = archival.tarit(archive_path, mtime, "bz2")
150 150 elif kind == 'zip':
151 151 archiver = archival.zipit(archive_path, mtime)
152 152 else:
153 153 raise exceptions.ArchiveException()(
154 154 'Remote does not support: "%s".' % kind)
155 155
156 156 for f_path, f_mode, f_is_link, f_content in file_info:
157 157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
158 158 archiver.done()
159 159
160 160 @reraise_safe_exceptions
161 161 def bookmarks(self, wire):
162 162 repo = self._factory.repo(wire)
163 163 return dict(repo._bookmarks)
164 164
165 165 @reraise_safe_exceptions
166 166 def branches(self, wire, normal, closed):
167 167 repo = self._factory.repo(wire)
168 168 iter_branches = repo.branchmap().iterbranches()
169 169 bt = {}
170 170 for branch_name, _heads, tip, is_closed in iter_branches:
171 171 if normal and not is_closed:
172 172 bt[branch_name] = tip
173 173 if closed and is_closed:
174 174 bt[branch_name] = tip
175 175
176 176 return bt
177 177
178 178 @reraise_safe_exceptions
179 179 def bulk_request(self, wire, rev, pre_load):
180 180 result = {}
181 181 for attr in pre_load:
182 182 try:
183 183 method = self._bulk_methods[attr]
184 184 result[attr] = method(wire, rev)
185 185 except KeyError as e:
186 186 raise exceptions.VcsException(e)(
187 187 'Unknown bulk attribute: "%s"' % attr)
188 188 return result
189 189
190 190 @reraise_safe_exceptions
191 191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
192 192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
193 193 clone(baseui, source, dest, noupdate=not update_after_clone)
194 194
195 195 @reraise_safe_exceptions
196 196 def commitctx(
197 197 self, wire, message, parents, commit_time, commit_timezone,
198 198 user, files, extra, removed, updated):
199 199
200 200 def _filectxfn(_repo, memctx, path):
201 201 """
202 202 Marks given path as added/changed/removed in a given _repo. This is
203 203 for internal mercurial commit function.
204 204 """
205 205
206 206 # check if this path is removed
207 207 if path in removed:
208 208 # returning None is a way to mark node for removal
209 209 return None
210 210
211 211 # check if this path is added
212 212 for node in updated:
213 213 if node['path'] == path:
214 214 return memfilectx(
215 215 _repo,
216 216 changectx=memctx,
217 217 path=node['path'],
218 218 data=node['content'],
219 219 islink=False,
220 220 isexec=bool(node['mode'] & stat.S_IXUSR),
221 221 copied=False)
222 222
223 223 raise exceptions.AbortException()(
224 224 "Given path haven't been marked as added, "
225 225 "changed or removed (%s)" % path)
226 226
227 227 repo = self._factory.repo(wire)
228 228
229 229 commit_ctx = memctx(
230 230 repo=repo,
231 231 parents=parents,
232 232 text=message,
233 233 files=files,
234 234 filectxfn=_filectxfn,
235 235 user=user,
236 236 date=(commit_time, commit_timezone),
237 237 extra=extra)
238 238
239 239 n = repo.commitctx(commit_ctx)
240 240 new_id = hex(n)
241 241
242 242 return new_id
243 243
244 244 @reraise_safe_exceptions
245 245 def ctx_branch(self, wire, revision):
246 246 repo = self._factory.repo(wire)
247 247 ctx = repo[revision]
248 248 return ctx.branch()
249 249
250 250 @reraise_safe_exceptions
251 251 def ctx_children(self, wire, revision):
252 252 repo = self._factory.repo(wire)
253 253 ctx = repo[revision]
254 254 return [child.rev() for child in ctx.children()]
255 255
256 256 @reraise_safe_exceptions
257 257 def ctx_date(self, wire, revision):
258 258 repo = self._factory.repo(wire)
259 259 ctx = repo[revision]
260 260 return ctx.date()
261 261
262 262 @reraise_safe_exceptions
263 263 def ctx_description(self, wire, revision):
264 264 repo = self._factory.repo(wire)
265 265 ctx = repo[revision]
266 266 return ctx.description()
267 267
268 268 @reraise_safe_exceptions
269 269 def ctx_diff(
270 270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
271 271 repo = self._factory.repo(wire)
272 272 ctx = repo[revision]
273 273 result = ctx.diff(
274 274 git=git, ignore_whitespace=ignore_whitespace, context=context)
275 275 return list(result)
276 276
277 277 @reraise_safe_exceptions
278 278 def ctx_files(self, wire, revision):
279 279 repo = self._factory.repo(wire)
280 280 ctx = repo[revision]
281 281 return ctx.files()
282 282
283 283 @reraise_safe_exceptions
284 284 def ctx_list(self, path, revision):
285 285 repo = self._factory.repo(path)
286 286 ctx = repo[revision]
287 287 return list(ctx)
288 288
289 289 @reraise_safe_exceptions
290 290 def ctx_parents(self, wire, revision):
291 291 repo = self._factory.repo(wire)
292 292 ctx = repo[revision]
293 293 return [parent.rev() for parent in ctx.parents()]
294 294
295 295 @reraise_safe_exceptions
296 296 def ctx_phase(self, wire, revision):
297 297 repo = self._factory.repo(wire)
298 298 ctx = repo[revision]
299 299 # public=0, draft=1, secret=3
300 300 return ctx.phase()
301 301
302 302 @reraise_safe_exceptions
303 303 def ctx_obsolete(self, wire, revision):
304 304 repo = self._factory.repo(wire)
305 305 ctx = repo[revision]
306 306 return ctx.obsolete()
307 307
308 308 @reraise_safe_exceptions
309 309 def ctx_hidden(self, wire, revision):
310 310 repo = self._factory.repo(wire)
311 311 ctx = repo[revision]
312 312 return ctx.hidden()
313 313
314 314 @reraise_safe_exceptions
315 315 def ctx_substate(self, wire, revision):
316 316 repo = self._factory.repo(wire)
317 317 ctx = repo[revision]
318 318 return ctx.substate
319 319
320 320 @reraise_safe_exceptions
321 321 def ctx_status(self, wire, revision):
322 322 repo = self._factory.repo(wire)
323 323 ctx = repo[revision]
324 324 status = repo[ctx.p1().node()].status(other=ctx.node())
325 325 # object of status (odd, custom named tuple in mercurial) is not
326 326 # correctly serializable, we make it a list, as the underling
327 327 # API expects this to be a list
328 328 return list(status)
329 329
330 330 @reraise_safe_exceptions
331 331 def ctx_user(self, wire, revision):
332 332 repo = self._factory.repo(wire)
333 333 ctx = repo[revision]
334 334 return ctx.user()
335 335
336 336 @reraise_safe_exceptions
337 337 def check_url(self, url, config):
338 338 _proto = None
339 339 if '+' in url[:url.find('://')]:
340 340 _proto = url[0:url.find('+')]
341 341 url = url[url.find('+') + 1:]
342 342 handlers = []
343 343 url_obj = url_parser(url)
344 344 test_uri, authinfo = url_obj.authinfo()
345 345 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
346 346 url_obj.query = obfuscate_qs(url_obj.query)
347 347
348 348 cleaned_uri = str(url_obj)
349 349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
350 350
351 351 if authinfo:
352 352 # create a password manager
353 353 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
354 354 passmgr.add_password(*authinfo)
355 355
356 356 handlers.extend((httpbasicauthhandler(passmgr),
357 357 httpdigestauthhandler(passmgr)))
358 358
359 359 o = urllib2.build_opener(*handlers)
360 360 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
361 361 ('Accept', 'application/mercurial-0.1')]
362 362
363 363 q = {"cmd": 'between'}
364 364 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
365 365 qs = '?%s' % urllib.urlencode(q)
366 366 cu = "%s%s" % (test_uri, qs)
367 367 req = urllib2.Request(cu, None, {})
368 368
369 369 try:
370 370 log.debug("Trying to open URL %s", cleaned_uri)
371 371 resp = o.open(req)
372 372 if resp.code != 200:
373 373 raise exceptions.URLError()('Return Code is not 200')
374 374 except Exception as e:
375 375 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
376 376 # means it cannot be cloned
377 377 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
378 378
379 379 # now check if it's a proper hg repo, but don't do it for svn
380 380 try:
381 381 if _proto == 'svn':
382 382 pass
383 383 else:
384 384 # check for pure hg repos
385 385 log.debug(
386 386 "Verifying if URL is a Mercurial repository: %s",
387 387 cleaned_uri)
388 388 ui = make_ui_from_config(config)
389 389 peer_checker = makepeer(ui, url)
390 390 peer_checker.lookup('tip')
391 391 except Exception as e:
392 392 log.warning("URL is not a valid Mercurial repository: %s",
393 393 cleaned_uri)
394 394 raise exceptions.URLError(e)(
395 395 "url [%s] does not look like an hg repo org_exc: %s"
396 396 % (cleaned_uri, e))
397 397
398 398 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
399 399 return True
400 400
401 401 @reraise_safe_exceptions
402 402 def diff(
403 403 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
404 404 context):
405 405 repo = self._factory.repo(wire)
406 406
407 407 if file_filter:
408 408 match_filter = match(file_filter[0], '', [file_filter[1]])
409 409 else:
410 410 match_filter = file_filter
411 411 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
412 412
413 413 try:
414 414 return "".join(patch.diff(
415 415 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
416 416 except RepoLookupError as e:
417 417 raise exceptions.LookupException(e)()
418 418
419 419 @reraise_safe_exceptions
420 def file_history(self, wire, revision, path, limit):
420 def node_history(self, wire, revision, path, limit):
421 421 repo = self._factory.repo(wire)
422 422
423 423 ctx = repo[revision]
424 424 fctx = ctx.filectx(path)
425 425
426 426 def history_iter():
427 427 limit_rev = fctx.rev()
428 428 for obj in reversed(list(fctx.filelog())):
429 429 obj = fctx.filectx(obj)
430 430 if limit_rev >= obj.rev():
431 431 yield obj
432 432
433 433 history = []
434 434 for cnt, obj in enumerate(history_iter()):
435 435 if limit and cnt >= limit:
436 436 break
437 437 history.append(hex(obj.node()))
438 438
439 439 return [x for x in history]
440 440
441 441 @reraise_safe_exceptions
442 def file_history_untill(self, wire, revision, path, limit):
442 def node_history_untill(self, wire, revision, path, limit):
443 443 repo = self._factory.repo(wire)
444 444 ctx = repo[revision]
445 445 fctx = ctx.filectx(path)
446 446
447 447 file_log = list(fctx.filelog())
448 448 if limit:
449 449 # Limit to the last n items
450 450 file_log = file_log[-limit:]
451 451
452 452 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
453 453
454 454 @reraise_safe_exceptions
455 455 def fctx_annotate(self, wire, revision, path):
456 456 repo = self._factory.repo(wire)
457 457 ctx = repo[revision]
458 458 fctx = ctx.filectx(path)
459 459
460 460 result = []
461 461 for i, annotate_obj in enumerate(fctx.annotate(), 1):
462 462 ln_no = i
463 463 sha = hex(annotate_obj.fctx.node())
464 464 content = annotate_obj.text
465 465 result.append((ln_no, sha, content))
466 466 return result
467 467
468 468 @reraise_safe_exceptions
469 469 def fctx_data(self, wire, revision, path):
470 470 repo = self._factory.repo(wire)
471 471 ctx = repo[revision]
472 472 fctx = ctx.filectx(path)
473 473 return fctx.data()
474 474
475 475 @reraise_safe_exceptions
476 476 def fctx_flags(self, wire, revision, path):
477 477 repo = self._factory.repo(wire)
478 478 ctx = repo[revision]
479 479 fctx = ctx.filectx(path)
480 480 return fctx.flags()
481 481
482 482 @reraise_safe_exceptions
483 483 def fctx_size(self, wire, revision, path):
484 484 repo = self._factory.repo(wire)
485 485 ctx = repo[revision]
486 486 fctx = ctx.filectx(path)
487 487 return fctx.size()
488 488
489 489 @reraise_safe_exceptions
490 490 def get_all_commit_ids(self, wire, name):
491 491 repo = self._factory.repo(wire)
492 492 revs = repo.filtered(name).changelog.index
493 493 return map(lambda x: hex(x[7]), revs)[:-1]
494 494
495 495 @reraise_safe_exceptions
496 496 def get_config_value(self, wire, section, name, untrusted=False):
497 497 repo = self._factory.repo(wire)
498 498 return repo.ui.config(section, name, untrusted=untrusted)
499 499
500 500 @reraise_safe_exceptions
501 501 def get_config_bool(self, wire, section, name, untrusted=False):
502 502 repo = self._factory.repo(wire)
503 503 return repo.ui.configbool(section, name, untrusted=untrusted)
504 504
505 505 @reraise_safe_exceptions
506 506 def get_config_list(self, wire, section, name, untrusted=False):
507 507 repo = self._factory.repo(wire)
508 508 return repo.ui.configlist(section, name, untrusted=untrusted)
509 509
510 510 @reraise_safe_exceptions
511 511 def is_large_file(self, wire, path):
512 512 return largefiles.lfutil.isstandin(path)
513 513
514 514 @reraise_safe_exceptions
515 515 def in_largefiles_store(self, wire, sha):
516 516 repo = self._factory.repo(wire)
517 517 return largefiles.lfutil.instore(repo, sha)
518 518
519 519 @reraise_safe_exceptions
520 520 def in_user_cache(self, wire, sha):
521 521 repo = self._factory.repo(wire)
522 522 return largefiles.lfutil.inusercache(repo.ui, sha)
523 523
524 524 @reraise_safe_exceptions
525 525 def store_path(self, wire, sha):
526 526 repo = self._factory.repo(wire)
527 527 return largefiles.lfutil.storepath(repo, sha)
528 528
529 529 @reraise_safe_exceptions
530 530 def link(self, wire, sha, path):
531 531 repo = self._factory.repo(wire)
532 532 largefiles.lfutil.link(
533 533 largefiles.lfutil.usercachepath(repo.ui, sha), path)
534 534
535 535 @reraise_safe_exceptions
536 536 def localrepository(self, wire, create=False):
537 537 self._factory.repo(wire, create=create)
538 538
539 539 @reraise_safe_exceptions
540 540 def lookup(self, wire, revision, both):
541 541
542 542 repo = self._factory.repo(wire)
543 543
544 544 if isinstance(revision, int):
545 545 # NOTE(marcink):
546 546 # since Mercurial doesn't support indexes properly
547 547 # we need to shift accordingly by one to get proper index, e.g
548 548 # repo[-1] => repo[-2]
549 549 # repo[0] => repo[-1]
550 550 # repo[1] => repo[2] we also never call repo[0] because
551 551 # it's actually second commit
552 552 if revision <= 0:
553 553 revision = revision + -1
554 554 else:
555 555 revision = revision + 1
556 556
557 557 try:
558 558 ctx = repo[revision]
559 559 except RepoLookupError as e:
560 560 raise exceptions.LookupException(e)(revision)
561 561 except LookupError as e:
562 562 raise exceptions.LookupException(e)(e.name)
563 563
564 564 if not both:
565 565 return ctx.hex()
566 566
567 567 ctx = repo[ctx.hex()]
568 568 return ctx.hex(), ctx.rev()
569 569
570 570 @reraise_safe_exceptions
571 571 def pull(self, wire, url, commit_ids=None):
572 572 repo = self._factory.repo(wire)
573 573 # Disable any prompts for this repo
574 574 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
575 575
576 576 remote = peer(repo, {}, url)
577 577 # Disable any prompts for this remote
578 578 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
579 579
580 580 if commit_ids:
581 581 commit_ids = [bin(commit_id) for commit_id in commit_ids]
582 582
583 583 return exchange.pull(
584 584 repo, remote, heads=commit_ids, force=None).cgresult
585 585
586 586 @reraise_safe_exceptions
587 587 def sync_push(self, wire, url):
588 588 if not self.check_url(url, wire['config']):
589 589 return
590 590
591 591 repo = self._factory.repo(wire)
592 592
593 593 # Disable any prompts for this repo
594 594 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
595 595
596 596 bookmarks = dict(repo._bookmarks).keys()
597 597 remote = peer(repo, {}, url)
598 598 # Disable any prompts for this remote
599 599 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
600 600
601 601 return exchange.push(
602 602 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
603 603
604 604 @reraise_safe_exceptions
605 605 def revision(self, wire, rev):
606 606 repo = self._factory.repo(wire)
607 607 ctx = repo[rev]
608 608 return ctx.rev()
609 609
610 610 @reraise_safe_exceptions
611 611 def rev_range(self, wire, filter):
612 612 repo = self._factory.repo(wire)
613 613 revisions = [rev for rev in revrange(repo, filter)]
614 614 return revisions
615 615
616 616 @reraise_safe_exceptions
617 617 def rev_range_hash(self, wire, node):
618 618 repo = self._factory.repo(wire)
619 619
620 620 def get_revs(repo, rev_opt):
621 621 if rev_opt:
622 622 revs = revrange(repo, rev_opt)
623 623 if len(revs) == 0:
624 624 return (nullrev, nullrev)
625 625 return max(revs), min(revs)
626 626 else:
627 627 return len(repo) - 1, 0
628 628
629 629 stop, start = get_revs(repo, [node + ':'])
630 630 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
631 631 return revs
632 632
633 633 @reraise_safe_exceptions
634 634 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
635 635 other_path = kwargs.pop('other_path', None)
636 636
637 637 # case when we want to compare two independent repositories
638 638 if other_path and other_path != wire["path"]:
639 639 baseui = self._factory._create_config(wire["config"])
640 640 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
641 641 else:
642 642 repo = self._factory.repo(wire)
643 643 return list(repo.revs(rev_spec, *args))
644 644
645 645 @reraise_safe_exceptions
646 646 def strip(self, wire, revision, update, backup):
647 647 repo = self._factory.repo(wire)
648 648 ctx = repo[revision]
649 649 hgext_strip(
650 650 repo.baseui, repo, ctx.node(), update=update, backup=backup)
651 651
652 652 @reraise_safe_exceptions
653 653 def verify(self, wire,):
654 654 repo = self._factory.repo(wire)
655 655 baseui = self._factory._create_config(wire['config'])
656 656 baseui.setconfig('ui', 'quiet', 'false')
657 657 output = io.BytesIO()
658 658
659 659 def write(data, **unused_kwargs):
660 660 output.write(data)
661 661 baseui.write = write
662 662
663 663 repo.ui = baseui
664 664 verify.verify(repo)
665 665 return output.getvalue()
666 666
667 667 @reraise_safe_exceptions
668 668 def tag(self, wire, name, revision, message, local, user,
669 669 tag_time, tag_timezone):
670 670 repo = self._factory.repo(wire)
671 671 ctx = repo[revision]
672 672 node = ctx.node()
673 673
674 674 date = (tag_time, tag_timezone)
675 675 try:
676 676 hg_tag.tag(repo, name, node, message, local, user, date)
677 677 except Abort as e:
678 678 log.exception("Tag operation aborted")
679 679 # Exception can contain unicode which we convert
680 680 raise exceptions.AbortException(e)(repr(e))
681 681
682 682 @reraise_safe_exceptions
683 683 def tags(self, wire):
684 684 repo = self._factory.repo(wire)
685 685 return repo.tags()
686 686
687 687 @reraise_safe_exceptions
688 688 def update(self, wire, node=None, clean=False):
689 689 repo = self._factory.repo(wire)
690 690 baseui = self._factory._create_config(wire['config'])
691 691 commands.update(baseui, repo, node=node, clean=clean)
692 692
693 693 @reraise_safe_exceptions
694 694 def identify(self, wire):
695 695 repo = self._factory.repo(wire)
696 696 baseui = self._factory._create_config(wire['config'])
697 697 output = io.BytesIO()
698 698 baseui.write = output.write
699 699 # This is required to get a full node id
700 700 baseui.debugflag = True
701 701 commands.identify(baseui, repo, id=True)
702 702
703 703 return output.getvalue()
704 704
705 705 @reraise_safe_exceptions
706 706 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
707 707 hooks=True):
708 708 repo = self._factory.repo(wire)
709 709 baseui = self._factory._create_config(wire['config'], hooks=hooks)
710 710
711 711 # Mercurial internally has a lot of logic that checks ONLY if
712 712 # option is defined, we just pass those if they are defined then
713 713 opts = {}
714 714 if bookmark:
715 715 opts['bookmark'] = bookmark
716 716 if branch:
717 717 opts['branch'] = branch
718 718 if revision:
719 719 opts['rev'] = revision
720 720
721 721 commands.pull(baseui, repo, source, **opts)
722 722
723 723 @reraise_safe_exceptions
724 724 def heads(self, wire, branch=None):
725 725 repo = self._factory.repo(wire)
726 726 baseui = self._factory._create_config(wire['config'])
727 727 output = io.BytesIO()
728 728
729 729 def write(data, **unused_kwargs):
730 730 output.write(data)
731 731
732 732 baseui.write = write
733 733 if branch:
734 734 args = [branch]
735 735 else:
736 736 args = []
737 737 commands.heads(baseui, repo, template='{node} ', *args)
738 738
739 739 return output.getvalue()
740 740
741 741 @reraise_safe_exceptions
742 742 def ancestor(self, wire, revision1, revision2):
743 743 repo = self._factory.repo(wire)
744 744 changelog = repo.changelog
745 745 lookup = repo.lookup
746 746 a = changelog.ancestor(lookup(revision1), lookup(revision2))
747 747 return hex(a)
748 748
749 749 @reraise_safe_exceptions
750 750 def push(self, wire, revisions, dest_path, hooks=True,
751 751 push_branches=False):
752 752 repo = self._factory.repo(wire)
753 753 baseui = self._factory._create_config(wire['config'], hooks=hooks)
754 754 commands.push(baseui, repo, dest=dest_path, rev=revisions,
755 755 new_branch=push_branches)
756 756
757 757 @reraise_safe_exceptions
758 758 def merge(self, wire, revision):
759 759 repo = self._factory.repo(wire)
760 760 baseui = self._factory._create_config(wire['config'])
761 761 repo.ui.setconfig('ui', 'merge', 'internal:dump')
762 762
763 763 # In case of sub repositories are used mercurial prompts the user in
764 764 # case of merge conflicts or different sub repository sources. By
765 765 # setting the interactive flag to `False` mercurial doesn't prompt the
766 766 # used but instead uses a default value.
767 767 repo.ui.setconfig('ui', 'interactive', False)
768 768
769 769 commands.merge(baseui, repo, rev=revision)
770 770
771 771 @reraise_safe_exceptions
772 772 def commit(self, wire, message, username, close_branch=False):
773 773 repo = self._factory.repo(wire)
774 774 baseui = self._factory._create_config(wire['config'])
775 775 repo.ui.setconfig('ui', 'username', username)
776 776 commands.commit(baseui, repo, message=message, close_branch=close_branch)
777 777
778 778 @reraise_safe_exceptions
779 779 def rebase(self, wire, source=None, dest=None, abort=False):
780 780 repo = self._factory.repo(wire)
781 781 baseui = self._factory._create_config(wire['config'])
782 782 repo.ui.setconfig('ui', 'merge', 'internal:dump')
783 783 rebase.rebase(
784 784 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
785 785
786 786 @reraise_safe_exceptions
787 787 def bookmark(self, wire, bookmark, revision=None):
788 788 repo = self._factory.repo(wire)
789 789 baseui = self._factory._create_config(wire['config'])
790 790 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
791 791
792 792 @reraise_safe_exceptions
793 793 def install_hooks(self, wire, force=False):
794 794 # we don't need any special hooks for Mercurial
795 795 pass
@@ -1,154 +1,154 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import re
21 21 import os
22 22 import sys
23 23 import datetime
24 24 import logging
25 25 import pkg_resources
26 26
27 27 import vcsserver
28 28
29 29 log = logging.getLogger(__name__)
30 30
31 31
32 32 def install_git_hooks(repo_path, bare, executable=None, force_create=False):
33 33 """
34 34 Creates a RhodeCode hook inside a git repository
35 35
36 36 :param repo_path: path to repository
37 37 :param executable: binary executable to put in the hooks
38 38 :param force_create: Create even if same name hook exists
39 39 """
40 40 executable = executable or sys.executable
41 41 hooks_path = os.path.join(repo_path, 'hooks')
42 42 if not bare:
43 43 hooks_path = os.path.join(repo_path, '.git', 'hooks')
44 44 if not os.path.isdir(hooks_path):
45 os.makedirs(hooks_path, mode=0777)
45 os.makedirs(hooks_path, mode=0o777)
46 46
47 47 tmpl_post = pkg_resources.resource_string(
48 48 'vcsserver', '/'.join(
49 49 ('hook_utils', 'hook_templates', 'git_post_receive.py.tmpl')))
50 50 tmpl_pre = pkg_resources.resource_string(
51 51 'vcsserver', '/'.join(
52 52 ('hook_utils', 'hook_templates', 'git_pre_receive.py.tmpl')))
53 53
54 54 path = '' # not used for now
55 55 timestamp = datetime.datetime.utcnow().isoformat()
56 56
57 57 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
58 58 log.debug('Installing git hook in repo %s', repo_path)
59 59 _hook_file = os.path.join(hooks_path, '%s-receive' % h_type)
60 60 _rhodecode_hook = check_rhodecode_hook(_hook_file)
61 61
62 62 if _rhodecode_hook or force_create:
63 63 log.debug('writing git %s hook file at %s !', h_type, _hook_file)
64 64 try:
65 65 with open(_hook_file, 'wb') as f:
66 66 template = template.replace(
67 67 '_TMPL_', vcsserver.__version__)
68 68 template = template.replace('_DATE_', timestamp)
69 69 template = template.replace('_ENV_', executable)
70 70 template = template.replace('_PATH_', path)
71 71 f.write(template)
72 os.chmod(_hook_file, 0755)
72 os.chmod(_hook_file, 0o755)
73 73 except IOError:
74 74 log.exception('error writing hook file %s', _hook_file)
75 75 else:
76 76 log.debug('skipping writing hook file')
77 77
78 78 return True
79 79
80 80
81 81 def install_svn_hooks(repo_path, executable=None, force_create=False):
82 82 """
83 83 Creates RhodeCode hooks inside a svn repository
84 84
85 85 :param repo_path: path to repository
86 86 :param executable: binary executable to put in the hooks
87 87 :param force_create: Create even if same name hook exists
88 88 """
89 89 executable = executable or sys.executable
90 90 hooks_path = os.path.join(repo_path, 'hooks')
91 91 if not os.path.isdir(hooks_path):
92 os.makedirs(hooks_path, mode=0777)
92 os.makedirs(hooks_path, mode=0o777)
93 93
94 94 tmpl_post = pkg_resources.resource_string(
95 95 'vcsserver', '/'.join(
96 96 ('hook_utils', 'hook_templates', 'svn_post_commit_hook.py.tmpl')))
97 97 tmpl_pre = pkg_resources.resource_string(
98 98 'vcsserver', '/'.join(
99 99 ('hook_utils', 'hook_templates', 'svn_pre_commit_hook.py.tmpl')))
100 100
101 101 path = '' # not used for now
102 102 timestamp = datetime.datetime.utcnow().isoformat()
103 103
104 104 for h_type, template in [('pre', tmpl_pre), ('post', tmpl_post)]:
105 105 log.debug('Installing svn hook in repo %s', repo_path)
106 106 _hook_file = os.path.join(hooks_path, '%s-commit' % h_type)
107 107 _rhodecode_hook = check_rhodecode_hook(_hook_file)
108 108
109 109 if _rhodecode_hook or force_create:
110 110 log.debug('writing svn %s hook file at %s !', h_type, _hook_file)
111 111
112 112 try:
113 113 with open(_hook_file, 'wb') as f:
114 114 template = template.replace(
115 115 '_TMPL_', vcsserver.__version__)
116 116 template = template.replace('_DATE_', timestamp)
117 117 template = template.replace('_ENV_', executable)
118 118 template = template.replace('_PATH_', path)
119 119
120 120 f.write(template)
121 os.chmod(_hook_file, 0755)
121 os.chmod(_hook_file, 0o755)
122 122 except IOError:
123 123 log.exception('error writing hook file %s', _hook_file)
124 124 else:
125 125 log.debug('skipping writing hook file')
126 126
127 127 return True
128 128
129 129
130 130 def check_rhodecode_hook(hook_path):
131 131 """
132 132 Check if the hook was created by RhodeCode
133 133 """
134 134 if not os.path.exists(hook_path):
135 135 return True
136 136
137 137 log.debug('hook exists, checking if it is from rhodecode')
138 138 hook_content = read_hook_content(hook_path)
139 139 matches = re.search(r'(?:RC_HOOK_VER)\s*=\s*(.*)', hook_content)
140 140 if matches:
141 141 try:
142 142 version = matches.groups()[0]
143 143 log.debug('got version %s from hooks.', version)
144 144 return True
145 145 except Exception:
146 146 log.exception("Exception while reading the hook version.")
147 147
148 148 return False
149 149
150 150
151 151 def read_hook_content(hook_path):
152 152 with open(hook_path, 'rb') as f:
153 153 content = f.read()
154 154 return content
@@ -1,702 +1,711 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 49 try:
50 50 connection.request('POST', '/', body)
51 51 except Exception:
52 52 log.error('Connection failed on %s', connection)
53 53 raise
54 54 response = connection.getresponse()
55 return json.loads(response.read())
55
56 response_data = response.read()
57
58 try:
59 return json.loads(response_data)
60 except Exception:
61 log.exception('Failed to decode hook response json data. '
62 'response_code:%s, raw_data:%s',
63 response.status, response_data)
64 raise
56 65
57 66 def _serialize(self, hook_name, extras):
58 67 data = {
59 68 'method': hook_name,
60 69 'extras': extras
61 70 }
62 71 return json.dumps(data)
63 72
64 73
65 74 class HooksDummyClient(object):
66 75 def __init__(self, hooks_module):
67 76 self._hooks_module = importlib.import_module(hooks_module)
68 77
69 78 def __call__(self, hook_name, extras):
70 79 with self._hooks_module.Hooks() as hooks:
71 80 return getattr(hooks, hook_name)(extras)
72 81
73 82
74 83 class RemoteMessageWriter(object):
75 84 """Writer base class."""
76 85 def write(self, message):
77 86 raise NotImplementedError()
78 87
79 88
80 89 class HgMessageWriter(RemoteMessageWriter):
81 90 """Writer that knows how to send messages to mercurial clients."""
82 91
83 92 def __init__(self, ui):
84 93 self.ui = ui
85 94
86 95 def write(self, message):
87 96 # TODO: Check why the quiet flag is set by default.
88 97 old = self.ui.quiet
89 98 self.ui.quiet = False
90 99 self.ui.status(message.encode('utf-8'))
91 100 self.ui.quiet = old
92 101
93 102
94 103 class GitMessageWriter(RemoteMessageWriter):
95 104 """Writer that knows how to send messages to git clients."""
96 105
97 106 def __init__(self, stdout=None):
98 107 self.stdout = stdout or sys.stdout
99 108
100 109 def write(self, message):
101 110 self.stdout.write(message.encode('utf-8'))
102 111
103 112
104 113 class SvnMessageWriter(RemoteMessageWriter):
105 114 """Writer that knows how to send messages to svn clients."""
106 115
107 116 def __init__(self, stderr=None):
108 117 # SVN needs data sent to stderr for back-to-client messaging
109 118 self.stderr = stderr or sys.stderr
110 119
111 120 def write(self, message):
112 121 self.stderr.write(message.encode('utf-8'))
113 122
114 123
115 124 def _handle_exception(result):
116 125 exception_class = result.get('exception')
117 126 exception_traceback = result.get('exception_traceback')
118 127
119 128 if exception_traceback:
120 129 log.error('Got traceback from remote call:%s', exception_traceback)
121 130
122 131 if exception_class == 'HTTPLockedRC':
123 132 raise exceptions.RepositoryLockedException()(*result['exception_args'])
124 133 elif exception_class == 'HTTPBranchProtected':
125 134 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
126 135 elif exception_class == 'RepositoryError':
127 136 raise exceptions.VcsException()(*result['exception_args'])
128 137 elif exception_class:
129 138 raise Exception('Got remote exception "%s" with args "%s"' %
130 139 (exception_class, result['exception_args']))
131 140
132 141
133 142 def _get_hooks_client(extras):
134 143 if 'hooks_uri' in extras:
135 144 protocol = extras.get('hooks_protocol')
136 145 return HooksHttpClient(extras['hooks_uri'])
137 146 else:
138 147 return HooksDummyClient(extras['hooks_module'])
139 148
140 149
141 150 def _call_hook(hook_name, extras, writer):
142 151 hooks_client = _get_hooks_client(extras)
143 152 log.debug('Hooks, using client:%s', hooks_client)
144 153 result = hooks_client(hook_name, extras)
145 154 log.debug('Hooks got result: %s', result)
146 155
147 156 _handle_exception(result)
148 157 writer.write(result['output'])
149 158
150 159 return result['status']
151 160
152 161
153 162 def _extras_from_ui(ui):
154 163 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
155 164 if not hook_data:
156 165 # maybe it's inside environ ?
157 166 env_hook_data = os.environ.get('RC_SCM_DATA')
158 167 if env_hook_data:
159 168 hook_data = env_hook_data
160 169
161 170 extras = {}
162 171 if hook_data:
163 172 extras = json.loads(hook_data)
164 173 return extras
165 174
166 175
167 176 def _rev_range_hash(repo, node, check_heads=False):
168 177
169 178 commits = []
170 179 revs = []
171 180 start = repo[node].rev()
172 181 end = len(repo)
173 182 for rev in range(start, end):
174 183 revs.append(rev)
175 184 ctx = repo[rev]
176 185 commit_id = mercurial.node.hex(ctx.node())
177 186 branch = ctx.branch()
178 187 commits.append((commit_id, branch))
179 188
180 189 parent_heads = []
181 190 if check_heads:
182 191 parent_heads = _check_heads(repo, start, end, revs)
183 192 return commits, parent_heads
184 193
185 194
186 195 def _check_heads(repo, start, end, commits):
187 196 changelog = repo.changelog
188 197 parents = set()
189 198
190 199 for new_rev in commits:
191 200 for p in changelog.parentrevs(new_rev):
192 201 if p == mercurial.node.nullrev:
193 202 continue
194 203 if p < start:
195 204 parents.add(p)
196 205
197 206 for p in parents:
198 207 branch = repo[p].branch()
199 208 # The heads descending from that parent, on the same branch
200 209 parent_heads = set([p])
201 210 reachable = set([p])
202 211 for x in xrange(p + 1, end):
203 212 if repo[x].branch() != branch:
204 213 continue
205 214 for pp in changelog.parentrevs(x):
206 215 if pp in reachable:
207 216 reachable.add(x)
208 217 parent_heads.discard(pp)
209 218 parent_heads.add(x)
210 219 # More than one head? Suggest merging
211 220 if len(parent_heads) > 1:
212 221 return list(parent_heads)
213 222
214 223 return []
215 224
216 225
217 226 def _get_git_env():
218 227 env = {}
219 228 for k, v in os.environ.items():
220 229 if k.startswith('GIT'):
221 230 env[k] = v
222 231
223 232 # serialized version
224 233 return [(k, v) for k, v in env.items()]
225 234
226 235
227 236 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
228 237 env = {}
229 238 for k, v in os.environ.items():
230 239 if k.startswith('HG'):
231 240 env[k] = v
232 241
233 242 env['HG_NODE'] = old_rev
234 243 env['HG_NODE_LAST'] = new_rev
235 244 env['HG_TXNID'] = txnid
236 245 env['HG_PENDING'] = repo_path
237 246
238 247 return [(k, v) for k, v in env.items()]
239 248
240 249
241 250 def repo_size(ui, repo, **kwargs):
242 251 extras = _extras_from_ui(ui)
243 252 return _call_hook('repo_size', extras, HgMessageWriter(ui))
244 253
245 254
246 255 def pre_pull(ui, repo, **kwargs):
247 256 extras = _extras_from_ui(ui)
248 257 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
249 258
250 259
251 260 def pre_pull_ssh(ui, repo, **kwargs):
252 261 extras = _extras_from_ui(ui)
253 262 if extras and extras.get('SSH'):
254 263 return pre_pull(ui, repo, **kwargs)
255 264 return 0
256 265
257 266
258 267 def post_pull(ui, repo, **kwargs):
259 268 extras = _extras_from_ui(ui)
260 269 return _call_hook('post_pull', extras, HgMessageWriter(ui))
261 270
262 271
263 272 def post_pull_ssh(ui, repo, **kwargs):
264 273 extras = _extras_from_ui(ui)
265 274 if extras and extras.get('SSH'):
266 275 return post_pull(ui, repo, **kwargs)
267 276 return 0
268 277
269 278
270 279 def pre_push(ui, repo, node=None, **kwargs):
271 280 """
272 281 Mercurial pre_push hook
273 282 """
274 283 extras = _extras_from_ui(ui)
275 284 detect_force_push = extras.get('detect_force_push')
276 285
277 286 rev_data = []
278 287 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
279 288 branches = collections.defaultdict(list)
280 289 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
281 290 for commit_id, branch in commits:
282 291 branches[branch].append(commit_id)
283 292
284 293 for branch, commits in branches.items():
285 294 old_rev = kwargs.get('node_last') or commits[0]
286 295 rev_data.append({
287 296 'total_commits': len(commits),
288 297 'old_rev': old_rev,
289 298 'new_rev': commits[-1],
290 299 'ref': '',
291 300 'type': 'branch',
292 301 'name': branch,
293 302 })
294 303
295 304 for push_ref in rev_data:
296 305 push_ref['multiple_heads'] = _heads
297 306
298 307 repo_path = os.path.join(
299 308 extras.get('repo_store', ''), extras.get('repository', ''))
300 309 push_ref['hg_env'] = _get_hg_env(
301 310 old_rev=push_ref['old_rev'],
302 311 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
303 312 repo_path=repo_path)
304 313
305 314 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
306 315 extras['commit_ids'] = rev_data
307 316
308 317 return _call_hook('pre_push', extras, HgMessageWriter(ui))
309 318
310 319
311 320 def pre_push_ssh(ui, repo, node=None, **kwargs):
312 321 extras = _extras_from_ui(ui)
313 322 if extras.get('SSH'):
314 323 return pre_push(ui, repo, node, **kwargs)
315 324
316 325 return 0
317 326
318 327
319 328 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
320 329 """
321 330 Mercurial pre_push hook for SSH
322 331 """
323 332 extras = _extras_from_ui(ui)
324 333 if extras.get('SSH'):
325 334 permission = extras['SSH_PERMISSIONS']
326 335
327 336 if 'repository.write' == permission or 'repository.admin' == permission:
328 337 return 0
329 338
330 339 # non-zero ret code
331 340 return 1
332 341
333 342 return 0
334 343
335 344
336 345 def post_push(ui, repo, node, **kwargs):
337 346 """
338 347 Mercurial post_push hook
339 348 """
340 349 extras = _extras_from_ui(ui)
341 350
342 351 commit_ids = []
343 352 branches = []
344 353 bookmarks = []
345 354 tags = []
346 355
347 356 commits, _heads = _rev_range_hash(repo, node)
348 357 for commit_id, branch in commits:
349 358 commit_ids.append(commit_id)
350 359 if branch not in branches:
351 360 branches.append(branch)
352 361
353 362 if hasattr(ui, '_rc_pushkey_branches'):
354 363 bookmarks = ui._rc_pushkey_branches
355 364
356 365 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
357 366 extras['commit_ids'] = commit_ids
358 367 extras['new_refs'] = {
359 368 'branches': branches,
360 369 'bookmarks': bookmarks,
361 370 'tags': tags
362 371 }
363 372
364 373 return _call_hook('post_push', extras, HgMessageWriter(ui))
365 374
366 375
367 376 def post_push_ssh(ui, repo, node, **kwargs):
368 377 """
369 378 Mercurial post_push hook for SSH
370 379 """
371 380 if _extras_from_ui(ui).get('SSH'):
372 381 return post_push(ui, repo, node, **kwargs)
373 382 return 0
374 383
375 384
376 385 def key_push(ui, repo, **kwargs):
377 386 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
378 387 # store new bookmarks in our UI object propagated later to post_push
379 388 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
380 389 return
381 390
382 391
383 392 # backward compat
384 393 log_pull_action = post_pull
385 394
386 395 # backward compat
387 396 log_push_action = post_push
388 397
389 398
390 399 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
391 400 """
392 401 Old hook name: keep here for backward compatibility.
393 402
394 403 This is only required when the installed git hooks are not upgraded.
395 404 """
396 405 pass
397 406
398 407
399 408 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
400 409 """
401 410 Old hook name: keep here for backward compatibility.
402 411
403 412 This is only required when the installed git hooks are not upgraded.
404 413 """
405 414 pass
406 415
407 416
408 417 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
409 418
410 419
411 420 def git_pre_pull(extras):
412 421 """
413 422 Pre pull hook.
414 423
415 424 :param extras: dictionary containing the keys defined in simplevcs
416 425 :type extras: dict
417 426
418 427 :return: status code of the hook. 0 for success.
419 428 :rtype: int
420 429 """
421 430 if 'pull' not in extras['hooks']:
422 431 return HookResponse(0, '')
423 432
424 433 stdout = io.BytesIO()
425 434 try:
426 435 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
427 436 except Exception as error:
428 437 status = 128
429 438 stdout.write('ERROR: %s\n' % str(error))
430 439
431 440 return HookResponse(status, stdout.getvalue())
432 441
433 442
434 443 def git_post_pull(extras):
435 444 """
436 445 Post pull hook.
437 446
438 447 :param extras: dictionary containing the keys defined in simplevcs
439 448 :type extras: dict
440 449
441 450 :return: status code of the hook. 0 for success.
442 451 :rtype: int
443 452 """
444 453 if 'pull' not in extras['hooks']:
445 454 return HookResponse(0, '')
446 455
447 456 stdout = io.BytesIO()
448 457 try:
449 458 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
450 459 except Exception as error:
451 460 status = 128
452 461 stdout.write('ERROR: %s\n' % error)
453 462
454 463 return HookResponse(status, stdout.getvalue())
455 464
456 465
457 466 def _parse_git_ref_lines(revision_lines):
458 467 rev_data = []
459 468 for revision_line in revision_lines or []:
460 469 old_rev, new_rev, ref = revision_line.strip().split(' ')
461 470 ref_data = ref.split('/', 2)
462 471 if ref_data[1] in ('tags', 'heads'):
463 472 rev_data.append({
464 473 # NOTE(marcink):
465 474 # we're unable to tell total_commits for git at this point
466 475 # but we set the variable for consistency with GIT
467 476 'total_commits': -1,
468 477 'old_rev': old_rev,
469 478 'new_rev': new_rev,
470 479 'ref': ref,
471 480 'type': ref_data[1],
472 481 'name': ref_data[2],
473 482 })
474 483 return rev_data
475 484
476 485
477 486 def git_pre_receive(unused_repo_path, revision_lines, env):
478 487 """
479 488 Pre push hook.
480 489
481 490 :param extras: dictionary containing the keys defined in simplevcs
482 491 :type extras: dict
483 492
484 493 :return: status code of the hook. 0 for success.
485 494 :rtype: int
486 495 """
487 496 extras = json.loads(env['RC_SCM_DATA'])
488 497 rev_data = _parse_git_ref_lines(revision_lines)
489 498 if 'push' not in extras['hooks']:
490 499 return 0
491 500 empty_commit_id = '0' * 40
492 501
493 502 detect_force_push = extras.get('detect_force_push')
494 503
495 504 for push_ref in rev_data:
496 505 # store our git-env which holds the temp store
497 506 push_ref['git_env'] = _get_git_env()
498 507 push_ref['pruned_sha'] = ''
499 508 if not detect_force_push:
500 509 # don't check for forced-push when we don't need to
501 510 continue
502 511
503 512 type_ = push_ref['type']
504 513 new_branch = push_ref['old_rev'] == empty_commit_id
505 514 if type_ == 'heads' and not new_branch:
506 515 old_rev = push_ref['old_rev']
507 516 new_rev = push_ref['new_rev']
508 517 cmd = [settings.GIT_EXECUTABLE, 'rev-list',
509 518 old_rev, '^{}'.format(new_rev)]
510 519 stdout, stderr = subprocessio.run_command(
511 520 cmd, env=os.environ.copy())
512 521 # means we're having some non-reachable objects, this forced push
513 522 # was used
514 523 if stdout:
515 524 push_ref['pruned_sha'] = stdout.splitlines()
516 525
517 526 extras['hook_type'] = 'pre_receive'
518 527 extras['commit_ids'] = rev_data
519 528 return _call_hook('pre_push', extras, GitMessageWriter())
520 529
521 530
522 531 def git_post_receive(unused_repo_path, revision_lines, env):
523 532 """
524 533 Post push hook.
525 534
526 535 :param extras: dictionary containing the keys defined in simplevcs
527 536 :type extras: dict
528 537
529 538 :return: status code of the hook. 0 for success.
530 539 :rtype: int
531 540 """
532 541 extras = json.loads(env['RC_SCM_DATA'])
533 542 if 'push' not in extras['hooks']:
534 543 return 0
535 544
536 545 rev_data = _parse_git_ref_lines(revision_lines)
537 546
538 547 git_revs = []
539 548
540 549 # N.B.(skreft): it is ok to just call git, as git before calling a
541 550 # subcommand sets the PATH environment variable so that it point to the
542 551 # correct version of the git executable.
543 552 empty_commit_id = '0' * 40
544 553 branches = []
545 554 tags = []
546 555 for push_ref in rev_data:
547 556 type_ = push_ref['type']
548 557
549 558 if type_ == 'heads':
550 559 if push_ref['old_rev'] == empty_commit_id:
551 560 # starting new branch case
552 561 if push_ref['name'] not in branches:
553 562 branches.append(push_ref['name'])
554 563
555 564 # Fix up head revision if needed
556 565 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
557 566 try:
558 567 subprocessio.run_command(cmd, env=os.environ.copy())
559 568 except Exception:
560 569 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
561 570 'refs/heads/%s' % push_ref['name']]
562 571 print("Setting default branch to %s" % push_ref['name'])
563 572 subprocessio.run_command(cmd, env=os.environ.copy())
564 573
565 574 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
566 575 '--format=%(refname)', 'refs/heads/*']
567 576 stdout, stderr = subprocessio.run_command(
568 577 cmd, env=os.environ.copy())
569 578 heads = stdout
570 579 heads = heads.replace(push_ref['ref'], '')
571 580 heads = ' '.join(head for head
572 581 in heads.splitlines() if head) or '.'
573 582 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
574 583 '--pretty=format:%H', '--', push_ref['new_rev'],
575 584 '--not', heads]
576 585 stdout, stderr = subprocessio.run_command(
577 586 cmd, env=os.environ.copy())
578 587 git_revs.extend(stdout.splitlines())
579 588 elif push_ref['new_rev'] == empty_commit_id:
580 589 # delete branch case
581 590 git_revs.append('delete_branch=>%s' % push_ref['name'])
582 591 else:
583 592 if push_ref['name'] not in branches:
584 593 branches.append(push_ref['name'])
585 594
586 595 cmd = [settings.GIT_EXECUTABLE, 'log',
587 596 '{old_rev}..{new_rev}'.format(**push_ref),
588 597 '--reverse', '--pretty=format:%H']
589 598 stdout, stderr = subprocessio.run_command(
590 599 cmd, env=os.environ.copy())
591 600 git_revs.extend(stdout.splitlines())
592 601 elif type_ == 'tags':
593 602 if push_ref['name'] not in tags:
594 603 tags.append(push_ref['name'])
595 604 git_revs.append('tag=>%s' % push_ref['name'])
596 605
597 606 extras['hook_type'] = 'post_receive'
598 607 extras['commit_ids'] = git_revs
599 608 extras['new_refs'] = {
600 609 'branches': branches,
601 610 'bookmarks': [],
602 611 'tags': tags,
603 612 }
604 613
605 614 if 'repo_size' in extras['hooks']:
606 615 try:
607 616 _call_hook('repo_size', extras, GitMessageWriter())
608 617 except:
609 618 pass
610 619
611 620 return _call_hook('post_push', extras, GitMessageWriter())
612 621
613 622
614 623 def _get_extras_from_txn_id(path, txn_id):
615 624 extras = {}
616 625 try:
617 626 cmd = ['svnlook', 'pget',
618 627 '-t', txn_id,
619 628 '--revprop', path, 'rc-scm-extras']
620 629 stdout, stderr = subprocessio.run_command(
621 630 cmd, env=os.environ.copy())
622 631 extras = json.loads(base64.urlsafe_b64decode(stdout))
623 632 except Exception:
624 633 log.exception('Failed to extract extras info from txn_id')
625 634
626 635 return extras
627 636
628 637
629 638 def _get_extras_from_commit_id(commit_id, path):
630 639 extras = {}
631 640 try:
632 641 cmd = ['svnlook', 'pget',
633 642 '-r', commit_id,
634 643 '--revprop', path, 'rc-scm-extras']
635 644 stdout, stderr = subprocessio.run_command(
636 645 cmd, env=os.environ.copy())
637 646 extras = json.loads(base64.urlsafe_b64decode(stdout))
638 647 except Exception:
639 648 log.exception('Failed to extract extras info from commit_id')
640 649
641 650 return extras
642 651
643 652
644 653 def svn_pre_commit(repo_path, commit_data, env):
645 654 path, txn_id = commit_data
646 655 branches = []
647 656 tags = []
648 657
649 658 if env.get('RC_SCM_DATA'):
650 659 extras = json.loads(env['RC_SCM_DATA'])
651 660 else:
652 661 # fallback method to read from TXN-ID stored data
653 662 extras = _get_extras_from_txn_id(path, txn_id)
654 663 if not extras:
655 664 return 0
656 665
657 666 extras['hook_type'] = 'pre_commit'
658 667 extras['commit_ids'] = []
659 668 extras['txn_id'] = txn_id
660 669 extras['new_refs'] = {
661 670 'total_commits': 1,
662 671 'branches': branches,
663 672 'bookmarks': [],
664 673 'tags': tags,
665 674 }
666 675
667 676 return _call_hook('pre_push', extras, SvnMessageWriter())
668 677
669 678
670 679 def svn_post_commit(repo_path, commit_data, env):
671 680 """
672 681 commit_data is path, rev, txn_id
673 682 """
674 683 path, commit_id, txn_id = commit_data
675 684 branches = []
676 685 tags = []
677 686
678 687 if env.get('RC_SCM_DATA'):
679 688 extras = json.loads(env['RC_SCM_DATA'])
680 689 else:
681 690 # fallback method to read from TXN-ID stored data
682 691 extras = _get_extras_from_commit_id(commit_id, path)
683 692 if not extras:
684 693 return 0
685 694
686 695 extras['hook_type'] = 'post_commit'
687 696 extras['commit_ids'] = [commit_id]
688 697 extras['txn_id'] = txn_id
689 698 extras['new_refs'] = {
690 699 'branches': branches,
691 700 'bookmarks': [],
692 701 'tags': tags,
693 702 'total_commits': 1,
694 703 }
695 704
696 705 if 'repo_size' in extras['hooks']:
697 706 try:
698 707 _call_hook('repo_size', extras, SvnMessageWriter())
699 708 except Exception:
700 709 pass
701 710
702 711 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,598 +1,607 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import base64
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import wsgiref.util
25 25 import traceback
26 26 import tempfile
27 27 from itertools import chain
28 28
29 29 import simplejson as json
30 30 import msgpack
31 31 from pyramid.config import Configurator
32 32 from pyramid.settings import asbool, aslist
33 33 from pyramid.wsgi import wsgiapp
34 34 from pyramid.compat import configparser
35 35
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
40 40 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
41 41
42 42 try:
43 43 locale.setlocale(locale.LC_ALL, '')
44 44 except locale.Error as e:
45 45 log.error(
46 46 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
47 47 os.environ['LC_ALL'] = 'C'
48 48
49 49 import vcsserver
50 50 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
51 51 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
52 52 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
53 53 from vcsserver.echo_stub.echo_app import EchoApp
54 54 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
55 55 from vcsserver.lib.exc_tracking import store_exception
56 56 from vcsserver.server import VcsServer
57 57
58 58 try:
59 59 from vcsserver.git import GitFactory, GitRemote
60 60 except ImportError:
61 61 GitFactory = None
62 62 GitRemote = None
63 63
64 64 try:
65 65 from vcsserver.hg import MercurialFactory, HgRemote
66 66 except ImportError:
67 67 MercurialFactory = None
68 68 HgRemote = None
69 69
70 70 try:
71 71 from vcsserver.svn import SubversionFactory, SvnRemote
72 72 except ImportError:
73 73 SubversionFactory = None
74 74 SvnRemote = None
75 75
76 76
77 77 def _is_request_chunked(environ):
78 78 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
79 79 return stream
80 80
81 81
82 82 def _int_setting(settings, name, default):
83 83 settings[name] = int(settings.get(name, default))
84 84 return settings[name]
85 85
86 86
87 87 def _bool_setting(settings, name, default):
88 88 input_val = settings.get(name, default)
89 89 if isinstance(input_val, unicode):
90 90 input_val = input_val.encode('utf8')
91 91 settings[name] = asbool(input_val)
92 92 return settings[name]
93 93
94 94
95 95 def _list_setting(settings, name, default):
96 96 raw_value = settings.get(name, default)
97 97
98 98 # Otherwise we assume it uses pyramids space/newline separation.
99 99 settings[name] = aslist(raw_value)
100 100 return settings[name]
101 101
102 102
103 103 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
104 104 value = settings.get(name, default)
105 105
106 106 if default_when_empty and not value:
107 107 # use default value when value is empty
108 108 value = default
109 109
110 110 if lower:
111 111 value = value.lower()
112 112 settings[name] = value
113 113 return settings[name]
114 114
115 115
116 116 class VCS(object):
117 117 def __init__(self, locale=None, cache_config=None):
118 118 self.locale = locale
119 119 self.cache_config = cache_config
120 120 self._configure_locale()
121 121
122 122 if GitFactory and GitRemote:
123 123 git_factory = GitFactory()
124 124 self._git_remote = GitRemote(git_factory)
125 125 else:
126 126 log.info("Git client import failed")
127 127
128 128 if MercurialFactory and HgRemote:
129 129 hg_factory = MercurialFactory()
130 130 self._hg_remote = HgRemote(hg_factory)
131 131 else:
132 132 log.info("Mercurial client import failed")
133 133
134 134 if SubversionFactory and SvnRemote:
135 135 svn_factory = SubversionFactory()
136 136
137 137 # hg factory is used for svn url validation
138 138 hg_factory = MercurialFactory()
139 139 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
140 140 else:
141 141 log.info("Subversion client import failed")
142 142
143 143 self._vcsserver = VcsServer()
144 144
145 145 def _configure_locale(self):
146 146 if self.locale:
147 147 log.info('Settings locale: `LC_ALL` to %s', self.locale)
148 148 else:
149 149 log.info(
150 150 'Configuring locale subsystem based on environment variables')
151 151 try:
152 152 # If self.locale is the empty string, then the locale
153 153 # module will use the environment variables. See the
154 154 # documentation of the package `locale`.
155 155 locale.setlocale(locale.LC_ALL, self.locale)
156 156
157 157 language_code, encoding = locale.getlocale()
158 158 log.info(
159 159 'Locale set to language code "%s" with encoding "%s".',
160 160 language_code, encoding)
161 161 except locale.Error:
162 162 log.exception(
163 163 'Cannot set locale, not configuring the locale system')
164 164
165 165
166 166 class WsgiProxy(object):
167 167 def __init__(self, wsgi):
168 168 self.wsgi = wsgi
169 169
170 170 def __call__(self, environ, start_response):
171 171 input_data = environ['wsgi.input'].read()
172 172 input_data = msgpack.unpackb(input_data)
173 173
174 174 error = None
175 175 try:
176 176 data, status, headers = self.wsgi.handle(
177 177 input_data['environment'], input_data['input_data'],
178 178 *input_data['args'], **input_data['kwargs'])
179 179 except Exception as e:
180 180 data, status, headers = [], None, None
181 181 error = {
182 182 'message': str(e),
183 183 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 184 }
185 185
186 186 start_response(200, {})
187 187 return self._iterator(error, status, headers, data)
188 188
189 189 def _iterator(self, error, status, headers, data):
190 190 initial_data = [
191 191 error,
192 192 status,
193 193 headers,
194 194 ]
195 195
196 196 for d in chain(initial_data, data):
197 197 yield msgpack.packb(d)
198 198
199 199
200 def not_found(request):
201 return {'status': '404 NOT FOUND'}
202
203
204 class VCSViewPredicate(object):
205 def __init__(self, val, config):
206 self.remotes = val
207
208 def text(self):
209 return 'vcs view method = %s' % (self.remotes.keys(),)
210
211 phash = text
212
213 def __call__(self, context, request):
214 """
215 View predicate that returns true if given backend is supported by
216 defined remotes.
217 """
218 backend = request.matchdict.get('backend')
219 return backend in self.remotes
220
221
200 222 class HTTPApplication(object):
201 223 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
202 224
203 225 remote_wsgi = remote_wsgi
204 226 _use_echo_app = False
205 227
206 228 def __init__(self, settings=None, global_config=None):
207 229 self._sanitize_settings_and_apply_defaults(settings)
208 230
209 231 self.config = Configurator(settings=settings)
210 232 self.global_config = global_config
211 233 self.config.include('vcsserver.lib.rc_cache')
212 234
213 235 locale = settings.get('locale', '') or 'en_US.UTF-8'
214 236 vcs = VCS(locale=locale, cache_config=settings)
215 237 self._remotes = {
216 238 'hg': vcs._hg_remote,
217 239 'git': vcs._git_remote,
218 240 'svn': vcs._svn_remote,
219 241 'server': vcs._vcsserver,
220 242 }
221 243 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
222 244 self._use_echo_app = True
223 245 log.warning("Using EchoApp for VCS operations.")
224 246 self.remote_wsgi = remote_wsgi_stub
225 247
226 248 self._configure_settings(global_config, settings)
227 249 self._configure()
228 250
229 251 def _configure_settings(self, global_config, app_settings):
230 252 """
231 253 Configure the settings module.
232 254 """
233 255 settings_merged = global_config.copy()
234 256 settings_merged.update(app_settings)
235 257
236 258 git_path = app_settings.get('git_path', None)
237 259 if git_path:
238 260 settings.GIT_EXECUTABLE = git_path
239 261 binary_dir = app_settings.get('core.binary_dir', None)
240 262 if binary_dir:
241 263 settings.BINARY_DIR = binary_dir
242 264
243 265 # Store the settings to make them available to other modules.
244 266 vcsserver.PYRAMID_SETTINGS = settings_merged
245 267 vcsserver.CONFIG = settings_merged
246 268
247 269 def _sanitize_settings_and_apply_defaults(self, settings):
248 270 temp_store = tempfile.gettempdir()
249 271 default_cache_dir = os.path.join(temp_store, 'rc_cache')
250 272
251 273 # save default, cache dir, and use it for all backends later.
252 274 default_cache_dir = _string_setting(
253 275 settings,
254 276 'cache_dir',
255 277 default_cache_dir, lower=False, default_when_empty=True)
256 278
257 279 # ensure we have our dir created
258 280 if not os.path.isdir(default_cache_dir):
259 os.makedirs(default_cache_dir, mode=0755)
281 os.makedirs(default_cache_dir, mode=0o755)
260 282
261 283 # exception store cache
262 284 _string_setting(
263 285 settings,
264 286 'exception_tracker.store_path',
265 287 temp_store, lower=False, default_when_empty=True)
266 288
267 289 # repo_object cache
268 290 _string_setting(
269 291 settings,
270 292 'rc_cache.repo_object.backend',
271 293 'dogpile.cache.rc.memory_lru')
272 294 _int_setting(
273 295 settings,
274 296 'rc_cache.repo_object.expiration_time',
275 297 300)
276 298 _int_setting(
277 299 settings,
278 300 'rc_cache.repo_object.max_size',
279 301 1024)
280 302
281 303 def _configure(self):
282 self.config.add_renderer(
283 name='msgpack',
284 factory=self._msgpack_renderer_factory)
304 self.config.add_renderer(name='msgpack', factory=self._msgpack_renderer_factory)
285 305
286 306 self.config.add_route('service', '/_service')
287 307 self.config.add_route('status', '/status')
288 308 self.config.add_route('hg_proxy', '/proxy/hg')
289 309 self.config.add_route('git_proxy', '/proxy/git')
290 310 self.config.add_route('vcs', '/{backend}')
291 311 self.config.add_route('stream_git', '/stream/git/*repo_name')
292 312 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
293 313
294 self.config.add_view(
295 self.status_view, route_name='status', renderer='json')
296 self.config.add_view(
297 self.service_view, route_name='service', renderer='msgpack')
314 self.config.add_view(self.status_view, route_name='status', renderer='json')
315 self.config.add_view(self.service_view, route_name='service', renderer='msgpack')
298 316
299 317 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
300 318 self.config.add_view(self.git_proxy(), route_name='git_proxy')
301 self.config.add_view(
302 self.vcs_view, route_name='vcs', renderer='msgpack',
303 custom_predicates=[self.is_vcs_view])
319 self.config.add_view(self.vcs_view, route_name='vcs', renderer='msgpack',
320 vcs_view=self._remotes)
304 321
305 322 self.config.add_view(self.hg_stream(), route_name='stream_hg')
306 323 self.config.add_view(self.git_stream(), route_name='stream_git')
307 324
308 def notfound(request):
309 return {'status': '404 NOT FOUND'}
310 self.config.add_notfound_view(notfound, renderer='json')
325 self.config.add_view_predicate('vcs_view', VCSViewPredicate)
326
327 self.config.add_notfound_view(not_found, renderer='json')
311 328
312 329 self.config.add_view(self.handle_vcs_exception, context=Exception)
313 330
314 331 self.config.add_tween(
315 332 'vcsserver.tweens.RequestWrapperTween',
316 333 )
317 334
318 335 def wsgi_app(self):
319 336 return self.config.make_wsgi_app()
320 337
321 338 def vcs_view(self, request):
322 339 remote = self._remotes[request.matchdict['backend']]
323 340 payload = msgpack.unpackb(request.body, use_list=True)
324 341 method = payload.get('method')
325 342 params = payload.get('params')
326 343 wire = params.get('wire')
327 344 args = params.get('args')
328 345 kwargs = params.get('kwargs')
329 346 context_uid = None
330 347
331 348 if wire:
332 349 try:
333 350 wire['context'] = context_uid = uuid.UUID(wire['context'])
334 351 except KeyError:
335 352 pass
336 353 args.insert(0, wire)
337 354
338 355 log.debug('method called:%s with kwargs:%s context_uid: %s',
339 356 method, kwargs, context_uid)
340 357 try:
341 358 resp = getattr(remote, method)(*args, **kwargs)
342 359 except Exception as e:
343 360 exc_info = list(sys.exc_info())
344 361 exc_type, exc_value, exc_traceback = exc_info
345 362
346 363 org_exc = getattr(e, '_org_exc', None)
347 364 org_exc_name = None
348 365 if org_exc:
349 366 org_exc_name = org_exc.__class__.__name__
350 367 # replace our "faked" exception with our org
351 368 exc_info[0] = org_exc.__class__
352 369 exc_info[1] = org_exc
353 370
354 371 store_exception(id(exc_info), exc_info)
355 372
356 373 tb_info = ''.join(
357 374 traceback.format_exception(exc_type, exc_value, exc_traceback))
358 375
359 376 type_ = e.__class__.__name__
360 377 if type_ not in self.ALLOWED_EXCEPTIONS:
361 378 type_ = None
362 379
363 380 resp = {
364 381 'id': payload.get('id'),
365 382 'error': {
366 383 'message': e.message,
367 384 'traceback': tb_info,
368 385 'org_exc': org_exc_name,
369 386 'type': type_
370 387 }
371 388 }
372 389 try:
373 390 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
374 391 except AttributeError:
375 392 pass
376 393 else:
377 394 resp = {
378 395 'id': payload.get('id'),
379 396 'result': resp
380 397 }
381 398
382 399 return resp
383 400
384 401 def status_view(self, request):
385 402 import vcsserver
386 403 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
387 404 'pid': os.getpid()}
388 405
389 406 def service_view(self, request):
390 407 import vcsserver
391 408
392 409 payload = msgpack.unpackb(request.body, use_list=True)
393 410
394 411 try:
395 412 path = self.global_config['__file__']
396 413 config = configparser.ConfigParser()
397 414 config.read(path)
398 415 parsed_ini = config
399 416 if parsed_ini.has_section('server:main'):
400 417 parsed_ini = dict(parsed_ini.items('server:main'))
401 418 except Exception:
402 419 log.exception('Failed to read .ini file for display')
403 420 parsed_ini = {}
404 421
405 422 resp = {
406 423 'id': payload.get('id'),
407 424 'result': dict(
408 425 version=vcsserver.__version__,
409 426 config=parsed_ini,
410 427 payload=payload,
411 428 )
412 429 }
413 430 return resp
414 431
415 432 def _msgpack_renderer_factory(self, info):
416 433 def _render(value, system):
417 434 value = msgpack.packb(value)
418 435 request = system.get('request')
419 436 if request is not None:
420 437 response = request.response
421 438 ct = response.content_type
422 439 if ct == response.default_content_type:
423 440 response.content_type = 'application/x-msgpack'
424 441 return value
425 442 return _render
426 443
427 444 def set_env_from_config(self, environ, config):
428 445 dict_conf = {}
429 446 try:
430 447 for elem in config:
431 448 if elem[0] == 'rhodecode':
432 449 dict_conf = json.loads(elem[2])
433 450 break
434 451 except Exception:
435 452 log.exception('Failed to fetch SCM CONFIG')
436 453 return
437 454
438 455 username = dict_conf.get('username')
439 456 if username:
440 457 environ['REMOTE_USER'] = username
441 458 # mercurial specific, some extension api rely on this
442 459 environ['HGUSER'] = username
443 460
444 461 ip = dict_conf.get('ip')
445 462 if ip:
446 463 environ['REMOTE_HOST'] = ip
447 464
448 465 if _is_request_chunked(environ):
449 466 # set the compatibility flag for webob
450 467 environ['wsgi.input_terminated'] = True
451 468
452 469 def hg_proxy(self):
453 470 @wsgiapp
454 471 def _hg_proxy(environ, start_response):
455 472 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
456 473 return app(environ, start_response)
457 474 return _hg_proxy
458 475
459 476 def git_proxy(self):
460 477 @wsgiapp
461 478 def _git_proxy(environ, start_response):
462 479 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
463 480 return app(environ, start_response)
464 481 return _git_proxy
465 482
466 483 def hg_stream(self):
467 484 if self._use_echo_app:
468 485 @wsgiapp
469 486 def _hg_stream(environ, start_response):
470 487 app = EchoApp('fake_path', 'fake_name', None)
471 488 return app(environ, start_response)
472 489 return _hg_stream
473 490 else:
474 491 @wsgiapp
475 492 def _hg_stream(environ, start_response):
476 493 log.debug('http-app: handling hg stream')
477 494 repo_path = environ['HTTP_X_RC_REPO_PATH']
478 495 repo_name = environ['HTTP_X_RC_REPO_NAME']
479 496 packed_config = base64.b64decode(
480 497 environ['HTTP_X_RC_REPO_CONFIG'])
481 498 config = msgpack.unpackb(packed_config)
482 499 app = scm_app.create_hg_wsgi_app(
483 500 repo_path, repo_name, config)
484 501
485 502 # Consistent path information for hgweb
486 503 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
487 504 environ['REPO_NAME'] = repo_name
488 505 self.set_env_from_config(environ, config)
489 506
490 507 log.debug('http-app: starting app handler '
491 508 'with %s and process request', app)
492 509 return app(environ, ResponseFilter(start_response))
493 510 return _hg_stream
494 511
495 512 def git_stream(self):
496 513 if self._use_echo_app:
497 514 @wsgiapp
498 515 def _git_stream(environ, start_response):
499 516 app = EchoApp('fake_path', 'fake_name', None)
500 517 return app(environ, start_response)
501 518 return _git_stream
502 519 else:
503 520 @wsgiapp
504 521 def _git_stream(environ, start_response):
505 522 log.debug('http-app: handling git stream')
506 523 repo_path = environ['HTTP_X_RC_REPO_PATH']
507 524 repo_name = environ['HTTP_X_RC_REPO_NAME']
508 525 packed_config = base64.b64decode(
509 526 environ['HTTP_X_RC_REPO_CONFIG'])
510 527 config = msgpack.unpackb(packed_config)
511 528
512 529 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
513 530 self.set_env_from_config(environ, config)
514 531
515 532 content_type = environ.get('CONTENT_TYPE', '')
516 533
517 534 path = environ['PATH_INFO']
518 535 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
519 536 log.debug(
520 537 'LFS: Detecting if request `%s` is LFS server path based '
521 538 'on content type:`%s`, is_lfs:%s',
522 539 path, content_type, is_lfs_request)
523 540
524 541 if not is_lfs_request:
525 542 # fallback detection by path
526 543 if GIT_LFS_PROTO_PAT.match(path):
527 544 is_lfs_request = True
528 545 log.debug(
529 546 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
530 547 path, is_lfs_request)
531 548
532 549 if is_lfs_request:
533 550 app = scm_app.create_git_lfs_wsgi_app(
534 551 repo_path, repo_name, config)
535 552 else:
536 553 app = scm_app.create_git_wsgi_app(
537 554 repo_path, repo_name, config)
538 555
539 556 log.debug('http-app: starting app handler '
540 557 'with %s and process request', app)
541 558
542 559 return app(environ, start_response)
543 560
544 561 return _git_stream
545 562
546 def is_vcs_view(self, context, request):
547 """
548 View predicate that returns true if given backend is supported by
549 defined remotes.
550 """
551 backend = request.matchdict.get('backend')
552 return backend in self._remotes
553
554 563 def handle_vcs_exception(self, exception, request):
555 564 _vcs_kind = getattr(exception, '_vcs_kind', '')
556 565 if _vcs_kind == 'repo_locked':
557 566 # Get custom repo-locked status code if present.
558 567 status_code = request.headers.get('X-RC-Locked-Status-Code')
559 568 return HTTPRepoLocked(
560 569 title=exception.message, status_code=status_code)
561 570
562 571 elif _vcs_kind == 'repo_branch_protected':
563 572 # Get custom repo-branch-protected status code if present.
564 573 return HTTPRepoBranchProtected(title=exception.message)
565 574
566 575 exc_info = request.exc_info
567 576 store_exception(id(exc_info), exc_info)
568 577
569 578 traceback_info = 'unavailable'
570 579 if request.exc_info:
571 580 exc_type, exc_value, exc_tb = request.exc_info
572 581 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
573 582
574 583 log.error(
575 584 'error occurred handling this request for path: %s, \n tb: %s',
576 585 request.path, traceback_info)
577 586 raise exception
578 587
579 588
580 589 class ResponseFilter(object):
581 590
582 591 def __init__(self, start_response):
583 592 self._start_response = start_response
584 593
585 594 def __call__(self, status, response_headers, exc_info=None):
586 595 headers = tuple(
587 596 (h, v) for h, v in response_headers
588 597 if not wsgiref.util.is_hop_by_hop(h))
589 598 return self._start_response(status, headers, exc_info)
590 599
591 600
592 601 def main(global_config, **settings):
593 602 if MercurialFactory:
594 603 hgpatches.patch_largefiles_capabilities()
595 604 hgpatches.patch_subrepo_type_mapping()
596 605
597 606 app = HTTPApplication(settings=settings, global_config=global_config)
598 607 return app.wsgi_app()
@@ -1,122 +1,132 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import os
20 20 import sys
21 21
22 22 import pytest
23 23
24 24 from vcsserver import subprocessio
25 25
26 26
27 27 @pytest.fixture(scope='module')
28 28 def environ():
29 29 """Delete coverage variables, as they make the tests fail."""
30 30 env = dict(os.environ)
31 31 for key in env.keys():
32 32 if key.startswith('COV_CORE_'):
33 33 del env[key]
34 34
35 35 return env
36 36
37 37
38 38 def _get_python_args(script):
39 return [sys.executable, '-c',
40 'import sys; import time; import shutil; ' + script]
39 return [sys.executable, '-c', 'import sys; import time; import shutil; ' + script]
41 40
42 41
43 42 def test_raise_exception_on_non_zero_return_code(environ):
44 43 args = _get_python_args('sys.exit(1)')
45 44 with pytest.raises(EnvironmentError):
46 45 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
47 46
48 47
49 48 def test_does_not_fail_on_non_zero_return_code(environ):
50 49 args = _get_python_args('sys.exit(1)')
51 output = ''.join(subprocessio.SubprocessIOChunker(
52 args, shell=False, fail_on_return_code=False, env=environ))
50 output = ''.join(
51 subprocessio.SubprocessIOChunker(
52 args, shell=False, fail_on_return_code=False, env=environ
53 )
54 )
53 55
54 56 assert output == ''
55 57
56 58
57 59 def test_raise_exception_on_stderr(environ):
58 60 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
59 61 with pytest.raises(EnvironmentError) as excinfo:
60 62 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
61 63
62 64 assert 'exited due to an error:\nX' in str(excinfo.value)
63 65
64 66
65 67 def test_does_not_fail_on_stderr(environ):
66 68 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
67 output = ''.join(subprocessio.SubprocessIOChunker(
68 args, shell=False, fail_on_stderr=False, env=environ))
69 output = ''.join(
70 subprocessio.SubprocessIOChunker(
71 args, shell=False, fail_on_stderr=False, env=environ
72 )
73 )
69 74
70 75 assert output == ''
71 76
72 77
73 78 @pytest.mark.parametrize('size', [1, 10**5])
74 79 def test_output_with_no_input(size, environ):
75 print type(environ)
80 print(type(environ))
76 81 data = 'X'
77 82 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
78 output = ''.join(subprocessio.SubprocessIOChunker(
79 args, shell=False, env=environ))
83 output = ''.join(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
80 84
81 85 assert output == data * size
82 86
83 87
84 88 @pytest.mark.parametrize('size', [1, 10**5])
85 89 def test_output_with_no_input_does_not_fail(size, environ):
86 90 data = 'X'
87 args = _get_python_args(
88 'sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
89 output = ''.join(subprocessio.SubprocessIOChunker(
90 args, shell=False, fail_on_return_code=False, env=environ))
91 args = _get_python_args('sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
92 output = ''.join(
93 subprocessio.SubprocessIOChunker(
94 args, shell=False, fail_on_return_code=False, env=environ
95 )
96 )
91 97
92 print len(data * size), len(output)
98 print("{} {}".format(len(data * size), len(output)))
93 99 assert output == data * size
94 100
95 101
96 102 @pytest.mark.parametrize('size', [1, 10**5])
97 103 def test_output_with_input(size, environ):
98 104 data = 'X' * size
99 105 inputstream = io.BytesIO(data)
100 106 # This acts like the cat command.
101 107 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
102 output = ''.join(subprocessio.SubprocessIOChunker(
103 args, shell=False, inputstream=inputstream, env=environ))
108 output = ''.join(
109 subprocessio.SubprocessIOChunker(
110 args, shell=False, inputstream=inputstream, env=environ
111 )
112 )
104 113
105 print len(data), len(output)
114 print("{} {}".format(len(data * size), len(output)))
106 115 assert output == data
107 116
108 117
109 118 @pytest.mark.parametrize('size', [1, 10**5])
110 119 def test_output_with_input_skipping_iterator(size, environ):
111 120 data = 'X' * size
112 121 inputstream = io.BytesIO(data)
113 122 # This acts like the cat command.
114 123 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
115 124
116 125 # Note: assigning the chunker makes sure that it is not deleted too early
117 126 chunker = subprocessio.SubprocessIOChunker(
118 args, shell=False, inputstream=inputstream, env=environ)
127 args, shell=False, inputstream=inputstream, env=environ
128 )
119 129 output = ''.join(chunker.output)
120 130
121 print len(data), len(output)
131 print("{} {}".format(len(data * size), len(output)))
122 132 assert output == data
General Comments 0
You need to be logged in to leave comments. Login now