##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r354:c835ad40 merge stable
parent child Browse files
Show More
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.10.6
2 current_version = 4.11.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,38 +1,37 b''
1 1 syntax: glob
2 2 *.orig
3 3 *.pyc
4 4 *.swp
5 5 *.sqlite
6 6 *.tox
7 7 *.egg-info
8 8 *.egg
9 9 *.eggs
10 10 *.idea
11 11 .DS_Store*
12 12
13 13
14 14 syntax: regexp
15 15
16 16 #.filename
17 17 ^\.settings$
18 18 ^\.project$
19 19 ^\.pydevproject$
20 20 ^\.coverage$
21 21 ^\.cache.*$
22 22 ^\.rhodecode$
23 23
24 ^_dev
25 ^._dev
24 ^.dev
26 25 ^build/
27 26 ^coverage\.xml$
28 27 ^data$
29 28 ^dev.ini$
30 29 ^acceptance_tests/dev.*\.ini$
31 30 ^dist/
32 31 ^fabfile.py
33 32 ^htmlcov
34 33 ^junit\.xml$
35 34 ^node_modules/
36 35 ^pylint.log$
37 36 ^build$
38 37 ^result$
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.10.6
12 state = in_progress
13 version = 4.11.0
16 14
@@ -1,165 +1,166 b''
1 1 # Nix environment for the community edition
2 2 #
3 3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 4 # derivation. For advanced tweaks to pimp up the development environment we use
5 5 # "shell.nix" so that it does not have to clutter this file.
6 6
7 7 { pkgs ? (import <nixpkgs> {})
8 8 , pythonPackages ? "python27Packages"
9 9 , pythonExternalOverrides ? self: super: {}
10 10 , doCheck ? true
11 11 }:
12 12
13 13 let pkgs_ = pkgs; in
14 14
15 15 let
16 16 pkgs = pkgs_.overridePackages (self: super: {
17 17 # bump GIT version
18 18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 19 name = "git-2.13.5";
20 20 src = pkgs.fetchurl {
21 21 url = "https://www.kernel.org/pub/software/scm/git/git-2.13.5.tar.xz";
22 22 sha256 = "18fi18103n7grshm4ffb0fwsnvbl48sbqy5gqx528vf8maff5j91";
23 23 };
24 24
25 25 patches = [
26 26 ./pkgs/git_patches/docbook2texi.patch
27 27 ./pkgs/git_patches/symlinks-in-bin.patch
28 28 ./pkgs/git_patches/git-sh-i18n.patch
29 29 ./pkgs/git_patches/ssh-path.patch
30 30 ];
31 31
32 32 });
33 33
34 34 # Override subversion derivation to
35 35 # - activate python bindings
36 36 subversion = let
37 37 subversionWithPython = super.subversion.override {
38 38 httpSupport = true;
39 39 pythonBindings = true;
40 40 python = self.python27Packages.python;
41 41 };
42 42
43 43 in
44 44
45 45 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
46 46 name = "subversion-1.9.7";
47 47 src = pkgs.fetchurl {
48 48 url = "https://www.apache.org/dist/subversion/subversion-1.9.7.tar.gz";
49 49 sha256 = "0g3cs2h008z8ymgkhbk54jp87bjh7y049rn42igj881yi2f20an7";
50 50 };
51 51
52 52 });
53 53
54 54 });
55 55
56 56 inherit (pkgs.lib) fix extends;
57 57 basePythonPackages = with builtins; if isAttrs pythonPackages
58 58 then pythonPackages
59 59 else getAttr pythonPackages pkgs;
60 60
61 61 elem = builtins.elem;
62 62 basename = path: with pkgs.lib; last (splitString "/" path);
63 63 startsWith = prefix: full: let
64 64 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
65 65 in actualPrefix == prefix;
66 66
67 67 src-filter = path: type: with pkgs.lib;
68 68 let
69 69 ext = last (splitString "." path);
70 70 in
71 71 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
72 72 "node_modules" "build" "data" "tmp"] &&
73 73 !elem ext ["egg-info" "pyc"] &&
74 74 !startsWith "result" path;
75 75
76 76 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
77 77
78 78 pythonGeneratedPackages = self: basePythonPackages.override (a: {
79 79 inherit self;
80 80 }) // (scopedImport {
81 81 self = self;
82 82 super = basePythonPackages;
83 83 inherit pkgs;
84 84 inherit (pkgs) fetchurl fetchgit;
85 85 } ./pkgs/python-packages.nix);
86 86
87 87 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
88 88 inherit basePythonPackages pkgs;
89 89 };
90 90
91 91 version = builtins.readFile ./vcsserver/VERSION;
92 92
93 93 pythonLocalOverrides = self: super: {
94 94 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
95 95 inherit doCheck version;
96 96
97 97 name = "rhodecode-vcsserver-${version}";
98 98 releaseName = "RhodeCodeVCSServer-${version}";
99 99 src = rhodecode-vcsserver-src;
100 100 dontStrip = true; # prevent strip, we don't need it.
101 101
102 102 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
103 103 pkgs.git
104 104 pkgs.subversion
105 105 ]);
106 106
107 107 # TODO: johbo: Make a nicer way to expose the parts. Maybe
108 108 # pkgs/default.nix?
109 109 passthru = {
110 110 pythonPackages = self;
111 111 };
112 112
113 113 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
114 114 preCheck = ''
115 115 export PATH="$out/bin:$PATH"
116 116 '';
117 117
118 118 # put custom attrs here
119 119 checkPhase = ''
120 120 runHook preCheck
121 121 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
122 122 runHook postCheck
123 123 '';
124 124
125 125 postInstall = ''
126 126 echo "Writing meta information for rccontrol to nix-support/rccontrol"
127 127 mkdir -p $out/nix-support/rccontrol
128 128 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
129 129 echo "DONE: Meta information for rccontrol written"
130 130
131 131 # python based programs need to be wrapped
132 132 ln -s ${self.pyramid}/bin/* $out/bin/
133 133 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
134 134
135 135 # Symlink version control utilities
136 136 #
137 137 # We ensure that always the correct version is available as a symlink.
138 138 # So that users calling them via the profile path will always use the
139 139 # correct version.
140 ln -s ${self.python}/bin/python $out/bin
140 141 ln -s ${pkgs.git}/bin/git $out/bin
141 142 ln -s ${self.mercurial}/bin/hg $out/bin
142 143 ln -s ${pkgs.subversion}/bin/svn* $out/bin
143 144
144 145 for file in $out/bin/*;
145 146 do
146 147 wrapProgram $file \
147 148 --set PATH $PATH \
148 149 --set PYTHONPATH $PYTHONPATH \
149 150 --set PYTHONHASHSEED random
150 151 done
151 152
152 153 '';
153 154
154 155 });
155 156 };
156 157
157 158 # Apply all overrides and fix the final package set
158 159 myPythonPackages =
159 160 (fix
160 161 (extends pythonExternalOverrides
161 162 (extends pythonLocalOverrides
162 163 (extends pythonOverrides
163 164 pythonGeneratedPackages))));
164 165
165 166 in myPythonPackages.rhodecode-vcsserver
@@ -1,47 +1,54 b''
1 1 # Overrides for the generated python-packages.nix
2 2 #
3 3 # This function is intended to be used as an extension to the generated file
4 4 # python-packages.nix. The main objective is to add needed dependencies of C
5 5 # libraries and tweak the build instructions where needed.
6 6
7 7 { pkgs, basePythonPackages }:
8 8
9 9 let
10 10 sed = "sed -i";
11 11 in
12 12
13 13 self: super: {
14 14
15 15 subvertpy = super.subvertpy.override (attrs: {
16 16 # TODO: johbo: Remove the "or" once we drop 16.03 support
17 17 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
18 18 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
19 19 pkgs.aprutil
20 20 pkgs.subversion
21 21 ];
22 22 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
23 23 ${sed} -e "s/'gcc'/'clang'/" setup.py
24 24 '';
25 25 });
26 26
27 hgsubversion = super.hgsubversion.override (attrs: {
28 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
29 pkgs.sqlite
30 basePythonPackages.sqlite3
31 ];
32 });
33
27 34 mercurial = super.mercurial.override (attrs: {
28 35 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
29 36 self.python.modules.curses
30 37 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
31 38 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
32 39 });
33 40
34 41 pyramid = super.pyramid.override (attrs: {
35 42 postFixup = ''
36 43 wrapPythonPrograms
37 44 # TODO: johbo: "wrapPython" adds this magic line which
38 45 # confuses pserve.
39 46 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
40 47 '';
41 48 });
42 49
43 50 # Avoid that setuptools is replaced, this leads to trouble
44 51 # with buildPythonPackage.
45 52 setuptools = basePythonPackages.setuptools;
46 53
47 54 }
@@ -1,877 +1,877 b''
1 1 # Generated by pip2nix 0.4.0
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 {
5 5 Beaker = super.buildPythonPackage {
6 6 name = "Beaker-1.9.0";
7 7 buildInputs = with self; [];
8 8 doCheck = false;
9 9 propagatedBuildInputs = with self; [funcsigs];
10 10 src = fetchurl {
11 11 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
12 12 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 16 };
17 17 };
18 18 Jinja2 = super.buildPythonPackage {
19 name = "Jinja2-2.8";
19 name = "Jinja2-2.9.6";
20 20 buildInputs = with self; [];
21 21 doCheck = false;
22 22 propagatedBuildInputs = with self; [MarkupSafe];
23 23 src = fetchurl {
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
24 url = "https://pypi.python.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
25 md5 = "6411537324b4dba0956aaa8109f3c77b";
26 26 };
27 27 meta = {
28 28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 29 };
30 30 };
31 31 Mako = super.buildPythonPackage {
32 32 name = "Mako-1.0.7";
33 33 buildInputs = with self; [];
34 34 doCheck = false;
35 35 propagatedBuildInputs = with self; [MarkupSafe];
36 36 src = fetchurl {
37 37 url = "https://pypi.python.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
38 38 md5 = "5836cc997b1b773ef389bf6629c30e65";
39 39 };
40 40 meta = {
41 41 license = [ pkgs.lib.licenses.mit ];
42 42 };
43 43 };
44 44 MarkupSafe = super.buildPythonPackage {
45 name = "MarkupSafe-0.23";
45 name = "MarkupSafe-1.0";
46 46 buildInputs = with self; [];
47 47 doCheck = false;
48 48 propagatedBuildInputs = with self; [];
49 49 src = fetchurl {
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
50 url = "https://pypi.python.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
51 md5 = "2fcedc9284d50e577b5192e8e3578355";
52 52 };
53 53 meta = {
54 54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 55 };
56 56 };
57 57 PasteDeploy = super.buildPythonPackage {
58 58 name = "PasteDeploy-1.5.2";
59 59 buildInputs = with self; [];
60 60 doCheck = false;
61 61 propagatedBuildInputs = with self; [];
62 62 src = fetchurl {
63 63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 64 md5 = "352b7205c78c8de4987578d19431af3b";
65 65 };
66 66 meta = {
67 67 license = [ pkgs.lib.licenses.mit ];
68 68 };
69 69 };
70 70 WebOb = super.buildPythonPackage {
71 71 name = "WebOb-1.7.4";
72 72 buildInputs = with self; [];
73 73 doCheck = false;
74 74 propagatedBuildInputs = with self; [];
75 75 src = fetchurl {
76 76 url = "https://pypi.python.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
77 77 md5 = "397e46892d7f199b1a07eb20a2d3d9bd";
78 78 };
79 79 meta = {
80 80 license = [ pkgs.lib.licenses.mit ];
81 81 };
82 82 };
83 83 WebTest = super.buildPythonPackage {
84 name = "WebTest-2.0.27";
84 name = "WebTest-2.0.29";
85 85 buildInputs = with self; [];
86 86 doCheck = false;
87 87 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
88 88 src = fetchurl {
89 url = "https://pypi.python.org/packages/80/fa/ca3a759985c72e3a124cbca3e1f8a2e931a07ffd31fd45d8f7bf21cb95cf/WebTest-2.0.27.tar.gz";
90 md5 = "54e6515ac71c51b6fc90179483c749ad";
89 url = "https://pypi.python.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
90 md5 = "30b4cf0d340b9a5335fac4389e6f84fc";
91 91 };
92 92 meta = {
93 93 license = [ pkgs.lib.licenses.mit ];
94 94 };
95 95 };
96 96 backports.shutil-get-terminal-size = super.buildPythonPackage {
97 97 name = "backports.shutil-get-terminal-size-1.0.0";
98 98 buildInputs = with self; [];
99 99 doCheck = false;
100 100 propagatedBuildInputs = with self; [];
101 101 src = fetchurl {
102 102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
103 103 md5 = "03267762480bd86b50580dc19dff3c66";
104 104 };
105 105 meta = {
106 106 license = [ pkgs.lib.licenses.mit ];
107 107 };
108 108 };
109 109 beautifulsoup4 = super.buildPythonPackage {
110 110 name = "beautifulsoup4-4.6.0";
111 111 buildInputs = with self; [];
112 112 doCheck = false;
113 113 propagatedBuildInputs = with self; [];
114 114 src = fetchurl {
115 115 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
116 116 md5 = "c17714d0f91a23b708a592cb3c697728";
117 117 };
118 118 meta = {
119 119 license = [ pkgs.lib.licenses.mit ];
120 120 };
121 121 };
122 122 configobj = super.buildPythonPackage {
123 123 name = "configobj-5.0.6";
124 124 buildInputs = with self; [];
125 125 doCheck = false;
126 126 propagatedBuildInputs = with self; [six];
127 127 src = fetchurl {
128 128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
129 129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
130 130 };
131 131 meta = {
132 132 license = [ pkgs.lib.licenses.bsdOriginal ];
133 133 };
134 134 };
135 135 cov-core = super.buildPythonPackage {
136 136 name = "cov-core-1.15.0";
137 137 buildInputs = with self; [];
138 138 doCheck = false;
139 139 propagatedBuildInputs = with self; [coverage];
140 140 src = fetchurl {
141 141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
142 142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
143 143 };
144 144 meta = {
145 145 license = [ pkgs.lib.licenses.mit ];
146 146 };
147 147 };
148 148 coverage = super.buildPythonPackage {
149 149 name = "coverage-3.7.1";
150 150 buildInputs = with self; [];
151 151 doCheck = false;
152 152 propagatedBuildInputs = with self; [];
153 153 src = fetchurl {
154 154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
155 155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
156 156 };
157 157 meta = {
158 158 license = [ pkgs.lib.licenses.bsdOriginal ];
159 159 };
160 160 };
161 161 decorator = super.buildPythonPackage {
162 name = "decorator-4.0.11";
162 name = "decorator-4.1.2";
163 163 buildInputs = with self; [];
164 164 doCheck = false;
165 165 propagatedBuildInputs = with self; [];
166 166 src = fetchurl {
167 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
168 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
167 url = "https://pypi.python.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
168 md5 = "a0f7f4fe00ae2dde93494d90c192cf8c";
169 169 };
170 170 meta = {
171 171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
172 172 };
173 173 };
174 174 dulwich = super.buildPythonPackage {
175 175 name = "dulwich-0.13.0";
176 176 buildInputs = with self; [];
177 177 doCheck = false;
178 178 propagatedBuildInputs = with self; [];
179 179 src = fetchurl {
180 180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
181 181 md5 = "6dede0626657c2bd08f48ca1221eea91";
182 182 };
183 183 meta = {
184 184 license = [ pkgs.lib.licenses.gpl2Plus ];
185 185 };
186 186 };
187 187 enum34 = super.buildPythonPackage {
188 188 name = "enum34-1.1.6";
189 189 buildInputs = with self; [];
190 190 doCheck = false;
191 191 propagatedBuildInputs = with self; [];
192 192 src = fetchurl {
193 193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
194 194 md5 = "5f13a0841a61f7fc295c514490d120d0";
195 195 };
196 196 meta = {
197 197 license = [ pkgs.lib.licenses.bsdOriginal ];
198 198 };
199 199 };
200 200 funcsigs = super.buildPythonPackage {
201 201 name = "funcsigs-1.0.2";
202 202 buildInputs = with self; [];
203 203 doCheck = false;
204 204 propagatedBuildInputs = with self; [];
205 205 src = fetchurl {
206 206 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
207 207 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
208 208 };
209 209 meta = {
210 210 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
211 211 };
212 212 };
213 213 gevent = super.buildPythonPackage {
214 214 name = "gevent-1.2.2";
215 215 buildInputs = with self; [];
216 216 doCheck = false;
217 217 propagatedBuildInputs = with self; [greenlet];
218 218 src = fetchurl {
219 219 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
220 220 md5 = "7f0baf355384fe5ff2ecf66853422554";
221 221 };
222 222 meta = {
223 223 license = [ pkgs.lib.licenses.mit ];
224 224 };
225 225 };
226 226 gprof2dot = super.buildPythonPackage {
227 name = "gprof2dot-2016.10.13";
227 name = "gprof2dot-2017.9.19";
228 228 buildInputs = with self; [];
229 229 doCheck = false;
230 230 propagatedBuildInputs = with self; [];
231 231 src = fetchurl {
232 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
233 md5 = "0125401f15fd2afe1df686a76c64a4fd";
232 url = "https://pypi.python.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
233 md5 = "cda2d552bb0d0b9f16e6824a9aabd225";
234 234 };
235 235 meta = {
236 license = [ { fullName = "LGPL"; } ];
236 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
237 237 };
238 238 };
239 239 greenlet = super.buildPythonPackage {
240 240 name = "greenlet-0.4.12";
241 241 buildInputs = with self; [];
242 242 doCheck = false;
243 243 propagatedBuildInputs = with self; [];
244 244 src = fetchurl {
245 245 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
246 246 md5 = "e8637647d58a26c4a1f51ca393e53c00";
247 247 };
248 248 meta = {
249 249 license = [ pkgs.lib.licenses.mit ];
250 250 };
251 251 };
252 252 gunicorn = super.buildPythonPackage {
253 253 name = "gunicorn-19.7.1";
254 254 buildInputs = with self; [];
255 255 doCheck = false;
256 256 propagatedBuildInputs = with self; [];
257 257 src = fetchurl {
258 258 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
259 259 md5 = "174d3c3cd670a5be0404d84c484e590c";
260 260 };
261 261 meta = {
262 262 license = [ pkgs.lib.licenses.mit ];
263 263 };
264 264 };
265 265 hg-evolve = super.buildPythonPackage {
266 name = "hg-evolve-6.6.0";
266 name = "hg-evolve-7.0.1";
267 267 buildInputs = with self; [];
268 268 doCheck = false;
269 269 propagatedBuildInputs = with self; [];
270 270 src = fetchurl {
271 url = "https://pypi.python.org/packages/c5/04/3557c97eaa320b5a6769edade64a299cd2710f5f3b818f64991ab6c8c08f/hg-evolve-6.6.0.tar.gz";
272 md5 = "06b9a9c8e8137bbf0c4fbf940c009725";
271 url = "https://pypi.python.org/packages/92/5c/4c216be1a08f326a12076b645f4892a2b0865810db1f4a0c9648f1f4c113/hg-evolve-7.0.1.tar.gz";
272 md5 = "2dfa926846ea873a8406bababb06b277";
273 273 };
274 274 meta = {
275 275 license = [ { fullName = "GPLv2+"; } ];
276 276 };
277 277 };
278 278 hgsubversion = super.buildPythonPackage {
279 name = "hgsubversion-1.8.7";
279 name = "hgsubversion-1.9";
280 280 buildInputs = with self; [];
281 281 doCheck = false;
282 282 propagatedBuildInputs = with self; [mercurial subvertpy];
283 283 src = fetchurl {
284 url = "https://pypi.python.org/packages/1c/b8/ff4d2e0ec486f9765b410f09728c02a010e7485d68d6154968074498a403/hgsubversion-1.8.7.tar.gz";
285 md5 = "289f1c36c13bd6a3435a9be390a77bdc";
284 url = "https://pypi.python.org/packages/db/26/7293a6c6b85e2a74ab452e9ba7f00b04ff0e440e6cd4f84131ac5d5e6b22/hgsubversion-1.9.tar.gz";
285 md5 = "0c6f93ef12cc2e7fe67286f16bcc7211";
286 286 };
287 287 meta = {
288 288 license = [ pkgs.lib.licenses.gpl1 ];
289 289 };
290 290 };
291 291 hupper = super.buildPythonPackage {
292 292 name = "hupper-1.0";
293 293 buildInputs = with self; [];
294 294 doCheck = false;
295 295 propagatedBuildInputs = with self; [];
296 296 src = fetchurl {
297 297 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
298 298 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
299 299 };
300 300 meta = {
301 301 license = [ pkgs.lib.licenses.mit ];
302 302 };
303 303 };
304 304 infrae.cache = super.buildPythonPackage {
305 305 name = "infrae.cache-1.0.1";
306 306 buildInputs = with self; [];
307 307 doCheck = false;
308 308 propagatedBuildInputs = with self; [Beaker repoze.lru];
309 309 src = fetchurl {
310 310 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
311 311 md5 = "b09076a766747e6ed2a755cc62088e32";
312 312 };
313 313 meta = {
314 314 license = [ pkgs.lib.licenses.zpt21 ];
315 315 };
316 316 };
317 317 ipdb = super.buildPythonPackage {
318 318 name = "ipdb-0.10.3";
319 319 buildInputs = with self; [];
320 320 doCheck = false;
321 321 propagatedBuildInputs = with self; [setuptools ipython];
322 322 src = fetchurl {
323 323 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
324 324 md5 = "def1f6ac075d54bdee07e6501263d4fa";
325 325 };
326 326 meta = {
327 327 license = [ pkgs.lib.licenses.bsdOriginal ];
328 328 };
329 329 };
330 330 ipython = super.buildPythonPackage {
331 331 name = "ipython-5.1.0";
332 332 buildInputs = with self; [];
333 333 doCheck = false;
334 334 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
335 335 src = fetchurl {
336 336 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
337 337 md5 = "47c8122420f65b58784cb4b9b4af35e3";
338 338 };
339 339 meta = {
340 340 license = [ pkgs.lib.licenses.bsdOriginal ];
341 341 };
342 342 };
343 343 ipython-genutils = super.buildPythonPackage {
344 344 name = "ipython-genutils-0.2.0";
345 345 buildInputs = with self; [];
346 346 doCheck = false;
347 347 propagatedBuildInputs = with self; [];
348 348 src = fetchurl {
349 349 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
350 350 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
351 351 };
352 352 meta = {
353 353 license = [ pkgs.lib.licenses.bsdOriginal ];
354 354 };
355 355 };
356 356 mercurial = super.buildPythonPackage {
357 name = "mercurial-4.2.3";
357 name = "mercurial-4.4.2";
358 358 buildInputs = with self; [];
359 359 doCheck = false;
360 360 propagatedBuildInputs = with self; [];
361 361 src = fetchurl {
362 url = "https://www.mercurial-scm.org/release/mercurial-4.2.3.tar.gz";
363 md5 = "a24a8fab7c2ad2c65e945b1b35d94e3b";
362 url = "https://pypi.python.org/packages/d0/83/92a5fa662ba277128db305e39e7ea5a638f2f1cbbc6dc5fbf4c14aefae22/mercurial-4.4.2.tar.gz";
363 md5 = "95769125cf7e9dbc341a983253acefcd";
364 364 };
365 365 meta = {
366 366 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
367 367 };
368 368 };
369 369 mock = super.buildPythonPackage {
370 370 name = "mock-1.0.1";
371 371 buildInputs = with self; [];
372 372 doCheck = false;
373 373 propagatedBuildInputs = with self; [];
374 374 src = fetchurl {
375 375 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
376 376 md5 = "869f08d003c289a97c1a6610faf5e913";
377 377 };
378 378 meta = {
379 379 license = [ pkgs.lib.licenses.bsdOriginal ];
380 380 };
381 381 };
382 382 msgpack-python = super.buildPythonPackage {
383 383 name = "msgpack-python-0.4.8";
384 384 buildInputs = with self; [];
385 385 doCheck = false;
386 386 propagatedBuildInputs = with self; [];
387 387 src = fetchurl {
388 388 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
389 389 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
390 390 };
391 391 meta = {
392 392 license = [ pkgs.lib.licenses.asl20 ];
393 393 };
394 394 };
395 395 pathlib2 = super.buildPythonPackage {
396 396 name = "pathlib2-2.3.0";
397 397 buildInputs = with self; [];
398 398 doCheck = false;
399 399 propagatedBuildInputs = with self; [six scandir];
400 400 src = fetchurl {
401 401 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
402 402 md5 = "89c90409d11fd5947966b6a30a47d18c";
403 403 };
404 404 meta = {
405 405 license = [ pkgs.lib.licenses.mit ];
406 406 };
407 407 };
408 408 pexpect = super.buildPythonPackage {
409 name = "pexpect-4.2.1";
409 name = "pexpect-4.3.0";
410 410 buildInputs = with self; [];
411 411 doCheck = false;
412 412 propagatedBuildInputs = with self; [ptyprocess];
413 413 src = fetchurl {
414 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
415 md5 = "3694410001a99dff83f0b500a1ca1c95";
414 url = "https://pypi.python.org/packages/f8/44/5466c30e49762bb92e442bbdf4472d6904608d211258eb3198a11f0309a4/pexpect-4.3.0.tar.gz";
415 md5 = "047a486dcd26134b74f2e67046bb61a0";
416 416 };
417 417 meta = {
418 418 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
419 419 };
420 420 };
421 421 pickleshare = super.buildPythonPackage {
422 422 name = "pickleshare-0.7.4";
423 423 buildInputs = with self; [];
424 424 doCheck = false;
425 425 propagatedBuildInputs = with self; [pathlib2];
426 426 src = fetchurl {
427 427 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
428 428 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
429 429 };
430 430 meta = {
431 431 license = [ pkgs.lib.licenses.mit ];
432 432 };
433 433 };
434 434 plaster = super.buildPythonPackage {
435 name = "plaster-0.5";
435 name = "plaster-1.0";
436 436 buildInputs = with self; [];
437 437 doCheck = false;
438 438 propagatedBuildInputs = with self; [setuptools];
439 439 src = fetchurl {
440 url = "https://pypi.python.org/packages/99/b3/d7ca1fe31d2b56dba68a238721fda6820770f9c2a3de17a582d4b5b2edcc/plaster-0.5.tar.gz";
441 md5 = "c59345a67a860cfcaa1bd6a81451399d";
440 url = "https://pypi.python.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
441 md5 = "80e6beb4760c16fea31754babcc0576e";
442 442 };
443 443 meta = {
444 444 license = [ pkgs.lib.licenses.mit ];
445 445 };
446 446 };
447 447 plaster-pastedeploy = super.buildPythonPackage {
448 name = "plaster-pastedeploy-0.4.1";
448 name = "plaster-pastedeploy-0.4.2";
449 449 buildInputs = with self; [];
450 450 doCheck = false;
451 451 propagatedBuildInputs = with self; [PasteDeploy plaster];
452 452 src = fetchurl {
453 url = "https://pypi.python.org/packages/9d/6e/f8be01ed41c94e6c54ac97cf2eb142a702aae0c8cce31c846f785e525b40/plaster_pastedeploy-0.4.1.tar.gz";
454 md5 = "f48d5344b922e56c4978eebf1cd2e0d3";
453 url = "https://pypi.python.org/packages/2c/62/0daf9c0be958e785023e583e51baac15863699e956bfb3d448898d80edd8/plaster_pastedeploy-0.4.2.tar.gz";
454 md5 = "58fd7852002909378e818c9d5b71e90a";
455 455 };
456 456 meta = {
457 457 license = [ pkgs.lib.licenses.mit ];
458 458 };
459 459 };
460 460 prompt-toolkit = super.buildPythonPackage {
461 461 name = "prompt-toolkit-1.0.15";
462 462 buildInputs = with self; [];
463 463 doCheck = false;
464 464 propagatedBuildInputs = with self; [six wcwidth];
465 465 src = fetchurl {
466 466 url = "https://pypi.python.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
467 467 md5 = "8fe70295006dbc8afedd43e5eba99032";
468 468 };
469 469 meta = {
470 470 license = [ pkgs.lib.licenses.bsdOriginal ];
471 471 };
472 472 };
473 473 ptyprocess = super.buildPythonPackage {
474 474 name = "ptyprocess-0.5.2";
475 475 buildInputs = with self; [];
476 476 doCheck = false;
477 477 propagatedBuildInputs = with self; [];
478 478 src = fetchurl {
479 479 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
480 480 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
481 481 };
482 482 meta = {
483 483 license = [ ];
484 484 };
485 485 };
486 486 py = super.buildPythonPackage {
487 name = "py-1.4.34";
487 name = "py-1.5.2";
488 488 buildInputs = with self; [];
489 489 doCheck = false;
490 490 propagatedBuildInputs = with self; [];
491 491 src = fetchurl {
492 url = "https://pypi.python.org/packages/68/35/58572278f1c097b403879c1e9369069633d1cbad5239b9057944bb764782/py-1.4.34.tar.gz";
493 md5 = "d9c3d8f734b0819ff48e355d77bf1730";
492 url = "https://pypi.python.org/packages/90/e3/e075127d39d35f09a500ebb4a90afd10f9ef0a1d28a6d09abeec0e444fdd/py-1.5.2.tar.gz";
493 md5 = "279ca69c632069e1b71e11b14641ca28";
494 494 };
495 495 meta = {
496 496 license = [ pkgs.lib.licenses.mit ];
497 497 };
498 498 };
499 499 pygments = super.buildPythonPackage {
500 500 name = "pygments-2.2.0";
501 501 buildInputs = with self; [];
502 502 doCheck = false;
503 503 propagatedBuildInputs = with self; [];
504 504 src = fetchurl {
505 505 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
506 506 md5 = "13037baca42f16917cbd5ad2fab50844";
507 507 };
508 508 meta = {
509 509 license = [ pkgs.lib.licenses.bsdOriginal ];
510 510 };
511 511 };
512 512 pyramid = super.buildPythonPackage {
513 513 name = "pyramid-1.9.1";
514 514 buildInputs = with self; [];
515 515 doCheck = false;
516 516 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
517 517 src = fetchurl {
518 518 url = "https://pypi.python.org/packages/9a/57/73447be9e7d0512d601e3f0a1fb9d7d1efb941911f49efdfe036d2826507/pyramid-1.9.1.tar.gz";
519 519 md5 = "0163e19c58c2d12976a3b6fdb57e052d";
520 520 };
521 521 meta = {
522 522 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
523 523 };
524 524 };
525 525 pyramid-jinja2 = super.buildPythonPackage {
526 name = "pyramid-jinja2-2.5";
526 name = "pyramid-jinja2-2.7";
527 527 buildInputs = with self; [];
528 528 doCheck = false;
529 529 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
530 530 src = fetchurl {
531 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
532 md5 = "07cb6547204ac5e6f0b22a954ccee928";
531 url = "https://pypi.python.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
532 md5 = "c2f8b2cd7b73a6f1d9a311fcfaf4fb92";
533 533 };
534 534 meta = {
535 535 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 536 };
537 537 };
538 538 pyramid-mako = super.buildPythonPackage {
539 539 name = "pyramid-mako-1.0.2";
540 540 buildInputs = with self; [];
541 541 doCheck = false;
542 542 propagatedBuildInputs = with self; [pyramid Mako];
543 543 src = fetchurl {
544 544 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
545 545 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
546 546 };
547 547 meta = {
548 548 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
549 549 };
550 550 };
551 551 pytest = super.buildPythonPackage {
552 name = "pytest-3.1.2";
552 name = "pytest-3.2.5";
553 553 buildInputs = with self; [];
554 554 doCheck = false;
555 555 propagatedBuildInputs = with self; [py setuptools];
556 556 src = fetchurl {
557 url = "https://pypi.python.org/packages/72/2b/2d3155e01f45a5a04427857352ee88220ee39550b2bc078f9db3190aea46/pytest-3.1.2.tar.gz";
558 md5 = "c4d179f89043cc925e1c169d03128e02";
557 url = "https://pypi.python.org/packages/1f/f8/8cd74c16952163ce0db0bd95fdd8810cbf093c08be00e6e665ebf0dc3138/pytest-3.2.5.tar.gz";
558 md5 = "6dbe9bb093883f75394a689a1426ac6f";
559 559 };
560 560 meta = {
561 561 license = [ pkgs.lib.licenses.mit ];
562 562 };
563 563 };
564 564 pytest-catchlog = super.buildPythonPackage {
565 565 name = "pytest-catchlog-1.2.2";
566 566 buildInputs = with self; [];
567 567 doCheck = false;
568 568 propagatedBuildInputs = with self; [py pytest];
569 569 src = fetchurl {
570 570 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
571 571 md5 = "09d890c54c7456c818102b7ff8c182c8";
572 572 };
573 573 meta = {
574 574 license = [ pkgs.lib.licenses.mit ];
575 575 };
576 576 };
577 577 pytest-cov = super.buildPythonPackage {
578 578 name = "pytest-cov-2.5.1";
579 579 buildInputs = with self; [];
580 580 doCheck = false;
581 581 propagatedBuildInputs = with self; [pytest coverage];
582 582 src = fetchurl {
583 583 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 584 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
585 585 };
586 586 meta = {
587 587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
588 588 };
589 589 };
590 590 pytest-profiling = super.buildPythonPackage {
591 name = "pytest-profiling-1.2.6";
591 name = "pytest-profiling-1.2.11";
592 592 buildInputs = with self; [];
593 593 doCheck = false;
594 594 propagatedBuildInputs = with self; [six pytest gprof2dot];
595 595 src = fetchurl {
596 url = "https://pypi.python.org/packages/f9/0d/df67fb9ce16c2cef201693da956321b1bccfbf9a4ead39748b9f9d1d74cb/pytest-profiling-1.2.6.tar.gz";
597 md5 = "50eb4c66c3762a2f1a49669bedc0b894";
596 url = "https://pypi.python.org/packages/c0/4a/b4aa786e93c07a86f1f87c581a36bf355a9e06a9da7e00dbd05047626bd2/pytest-profiling-1.2.11.tar.gz";
597 md5 = "9ef6b60248731be5d44477980408e8f7";
598 598 };
599 599 meta = {
600 600 license = [ pkgs.lib.licenses.mit ];
601 601 };
602 602 };
603 603 pytest-runner = super.buildPythonPackage {
604 name = "pytest-runner-2.11.1";
604 name = "pytest-runner-3.0";
605 605 buildInputs = with self; [];
606 606 doCheck = false;
607 607 propagatedBuildInputs = with self; [];
608 608 src = fetchurl {
609 url = "https://pypi.python.org/packages/9e/4d/08889e5e27a9f5d6096b9ad257f4dea1faabb03c5ded8f665ead448f5d8a/pytest-runner-2.11.1.tar.gz";
610 md5 = "bdb73eb18eca2727944a2dcf963c5a81";
609 url = "https://pypi.python.org/packages/65/b4/ae89338cd2d81e2cc54bd6db2e962bfe948f612303610d68ab24539ac2d1/pytest-runner-3.0.tar.gz";
610 md5 = "8f8363a52bbabc4cedd5e239beb2ba11";
611 611 };
612 612 meta = {
613 613 license = [ pkgs.lib.licenses.mit ];
614 614 };
615 615 };
616 616 pytest-sugar = super.buildPythonPackage {
617 name = "pytest-sugar-0.8.0";
617 name = "pytest-sugar-0.9.0";
618 618 buildInputs = with self; [];
619 619 doCheck = false;
620 620 propagatedBuildInputs = with self; [pytest termcolor];
621 621 src = fetchurl {
622 url = "https://pypi.python.org/packages/a5/b0/b2773dee078f17773a5bf2dfad49b0be57b6354bbd84bbefe4313e509d87/pytest-sugar-0.8.0.tar.gz";
623 md5 = "8cafbdad648068e0e44b8fc5f9faae42";
622 url = "https://pypi.python.org/packages/49/d8/c5ff6cca3ce2ebd8b73eec89779bf6b4a7737456a70e8ea4d44c1ff90f71/pytest-sugar-0.9.0.tar.gz";
623 md5 = "89fbff17277fa6a95a560a04b68cb9f9";
624 624 };
625 625 meta = {
626 626 license = [ pkgs.lib.licenses.bsdOriginal ];
627 627 };
628 628 };
629 629 pytest-timeout = super.buildPythonPackage {
630 630 name = "pytest-timeout-1.2.0";
631 631 buildInputs = with self; [];
632 632 doCheck = false;
633 633 propagatedBuildInputs = with self; [pytest];
634 634 src = fetchurl {
635 635 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
636 636 md5 = "83607d91aa163562c7ee835da57d061d";
637 637 };
638 638 meta = {
639 639 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
640 640 };
641 641 };
642 642 repoze.lru = super.buildPythonPackage {
643 name = "repoze.lru-0.6";
643 name = "repoze.lru-0.7";
644 644 buildInputs = with self; [];
645 645 doCheck = false;
646 646 propagatedBuildInputs = with self; [];
647 647 src = fetchurl {
648 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
649 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
648 url = "https://pypi.python.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
649 md5 = "c08cc030387e0b1fc53c5c7d964b35e2";
650 650 };
651 651 meta = {
652 652 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
653 653 };
654 654 };
655 655 rhodecode-vcsserver = super.buildPythonPackage {
656 name = "rhodecode-vcsserver-4.10.6";
656 name = "rhodecode-vcsserver-4.11.0";
657 657 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
658 658 doCheck = true;
659 659 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
660 660 src = ./.;
661 661 meta = {
662 662 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
663 663 };
664 664 };
665 665 scandir = super.buildPythonPackage {
666 name = "scandir-1.5";
666 name = "scandir-1.6";
667 667 buildInputs = with self; [];
668 668 doCheck = false;
669 669 propagatedBuildInputs = with self; [];
670 670 src = fetchurl {
671 url = "https://pypi.python.org/packages/bd/f4/3143e0289faf0883228017dbc6387a66d0b468df646645e29e1eb89ea10e/scandir-1.5.tar.gz";
672 md5 = "a2713043de681bba6b084be42e7a8a44";
671 url = "https://pypi.python.org/packages/77/3f/916f524f50ee65e3f465a280d2851bd63685250fddb3020c212b3977664d/scandir-1.6.tar.gz";
672 md5 = "0180ddb97c96cbb2d4f25d2ae11c64ac";
673 673 };
674 674 meta = {
675 675 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
676 676 };
677 677 };
678 678 setuptools = super.buildPythonPackage {
679 679 name = "setuptools-30.1.0";
680 680 buildInputs = with self; [];
681 681 doCheck = false;
682 682 propagatedBuildInputs = with self; [];
683 683 src = fetchurl {
684 684 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
685 685 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
686 686 };
687 687 meta = {
688 688 license = [ pkgs.lib.licenses.mit ];
689 689 };
690 690 };
691 691 simplegeneric = super.buildPythonPackage {
692 692 name = "simplegeneric-0.8.1";
693 693 buildInputs = with self; [];
694 694 doCheck = false;
695 695 propagatedBuildInputs = with self; [];
696 696 src = fetchurl {
697 697 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
698 698 md5 = "f9c1fab00fd981be588fc32759f474e3";
699 699 };
700 700 meta = {
701 701 license = [ pkgs.lib.licenses.zpt21 ];
702 702 };
703 703 };
704 704 simplejson = super.buildPythonPackage {
705 705 name = "simplejson-3.11.1";
706 706 buildInputs = with self; [];
707 707 doCheck = false;
708 708 propagatedBuildInputs = with self; [];
709 709 src = fetchurl {
710 710 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
711 711 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
712 712 };
713 713 meta = {
714 714 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
715 715 };
716 716 };
717 717 six = super.buildPythonPackage {
718 name = "six-1.9.0";
718 name = "six-1.11.0";
719 719 buildInputs = with self; [];
720 720 doCheck = false;
721 721 propagatedBuildInputs = with self; [];
722 722 src = fetchurl {
723 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
724 md5 = "476881ef4012262dfc8adc645ee786c4";
723 url = "https://pypi.python.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
724 md5 = "d12789f9baf7e9fb2524c0c64f1773f8";
725 725 };
726 726 meta = {
727 727 license = [ pkgs.lib.licenses.mit ];
728 728 };
729 729 };
730 730 subprocess32 = super.buildPythonPackage {
731 731 name = "subprocess32-3.2.7";
732 732 buildInputs = with self; [];
733 733 doCheck = false;
734 734 propagatedBuildInputs = with self; [];
735 735 src = fetchurl {
736 736 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
737 737 md5 = "824c801e479d3e916879aae3e9c15e16";
738 738 };
739 739 meta = {
740 740 license = [ pkgs.lib.licenses.psfl ];
741 741 };
742 742 };
743 743 subvertpy = super.buildPythonPackage {
744 name = "subvertpy-0.9.3";
744 name = "subvertpy-0.10.1";
745 745 buildInputs = with self; [];
746 746 doCheck = false;
747 747 propagatedBuildInputs = with self; [];
748 748 src = fetchurl {
749 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
750 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
749 url = "https://pypi.python.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
750 md5 = "a70e03579902d480f5e9f8c570f6536b";
751 751 };
752 752 meta = {
753 license = [ pkgs.lib.licenses.lgpl21Plus ];
753 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
754 754 };
755 755 };
756 756 termcolor = super.buildPythonPackage {
757 757 name = "termcolor-1.1.0";
758 758 buildInputs = with self; [];
759 759 doCheck = false;
760 760 propagatedBuildInputs = with self; [];
761 761 src = fetchurl {
762 762 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
763 763 md5 = "043e89644f8909d462fbbfa511c768df";
764 764 };
765 765 meta = {
766 766 license = [ pkgs.lib.licenses.mit ];
767 767 };
768 768 };
769 769 traitlets = super.buildPythonPackage {
770 770 name = "traitlets-4.3.2";
771 771 buildInputs = with self; [];
772 772 doCheck = false;
773 773 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
774 774 src = fetchurl {
775 775 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
776 776 md5 = "3068663f2f38fd939a9eb3a500ccc154";
777 777 };
778 778 meta = {
779 779 license = [ pkgs.lib.licenses.bsdOriginal ];
780 780 };
781 781 };
782 782 translationstring = super.buildPythonPackage {
783 783 name = "translationstring-1.3";
784 784 buildInputs = with self; [];
785 785 doCheck = false;
786 786 propagatedBuildInputs = with self; [];
787 787 src = fetchurl {
788 788 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
789 789 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
790 790 };
791 791 meta = {
792 792 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
793 793 };
794 794 };
795 795 venusian = super.buildPythonPackage {
796 796 name = "venusian-1.1.0";
797 797 buildInputs = with self; [];
798 798 doCheck = false;
799 799 propagatedBuildInputs = with self; [];
800 800 src = fetchurl {
801 801 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
802 802 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
803 803 };
804 804 meta = {
805 805 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
806 806 };
807 807 };
808 808 waitress = super.buildPythonPackage {
809 name = "waitress-1.0.2";
809 name = "waitress-1.1.0";
810 810 buildInputs = with self; [];
811 811 doCheck = false;
812 812 propagatedBuildInputs = with self; [];
813 813 src = fetchurl {
814 url = "https://pypi.python.org/packages/cd/f4/400d00863afa1e03618e31fd7e2092479a71b8c9718b00eb1eeb603746c6/waitress-1.0.2.tar.gz";
815 md5 = "b968f39e95d609f6194c6e50425d4bb7";
814 url = "https://pypi.python.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
815 md5 = "0f1eb7fdfdbf2e6d18decbda1733045c";
816 816 };
817 817 meta = {
818 818 license = [ pkgs.lib.licenses.zpt21 ];
819 819 };
820 820 };
821 821 wcwidth = super.buildPythonPackage {
822 822 name = "wcwidth-0.1.7";
823 823 buildInputs = with self; [];
824 824 doCheck = false;
825 825 propagatedBuildInputs = with self; [];
826 826 src = fetchurl {
827 827 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
828 828 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
829 829 };
830 830 meta = {
831 831 license = [ pkgs.lib.licenses.mit ];
832 832 };
833 833 };
834 834 wheel = super.buildPythonPackage {
835 835 name = "wheel-0.29.0";
836 836 buildInputs = with self; [];
837 837 doCheck = false;
838 838 propagatedBuildInputs = with self; [];
839 839 src = fetchurl {
840 840 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
841 841 md5 = "555a67e4507cedee23a0deb9651e452f";
842 842 };
843 843 meta = {
844 844 license = [ pkgs.lib.licenses.mit ];
845 845 };
846 846 };
847 847 zope.deprecation = super.buildPythonPackage {
848 848 name = "zope.deprecation-4.1.2";
849 849 buildInputs = with self; [];
850 850 doCheck = false;
851 851 propagatedBuildInputs = with self; [setuptools];
852 852 src = fetchurl {
853 853 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
854 854 md5 = "e9a663ded58f4f9f7881beb56cae2782";
855 855 };
856 856 meta = {
857 857 license = [ pkgs.lib.licenses.zpt21 ];
858 858 };
859 859 };
860 860 zope.interface = super.buildPythonPackage {
861 861 name = "zope.interface-4.1.3";
862 862 buildInputs = with self; [];
863 863 doCheck = false;
864 864 propagatedBuildInputs = with self; [setuptools];
865 865 src = fetchurl {
866 866 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
867 867 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
868 868 };
869 869 meta = {
870 870 license = [ pkgs.lib.licenses.zpt21 ];
871 871 };
872 872 };
873 873
874 874 ### Test requirements
875 875
876 876
877 877 }
@@ -1,41 +1,40 b''
1 1 ## core
2 2 setuptools==30.1.0
3 3
4 4 Beaker==1.9.0
5 5 configobj==5.0.6
6 decorator==4.0.11
6 decorator==4.1.2
7 7 dulwich==0.13.0
8 hgsubversion==1.8.7
9 hg-evolve==6.6.0
8 hgsubversion==1.9.0
9 hg-evolve==7.0.1
10 10 infrae.cache==1.0.1
11 mercurial==4.2.3
11 mercurial==4.4.2
12 12 msgpack-python==0.4.8
13 pyramid-jinja2==2.5
13 pyramid-jinja2==2.7
14 14 pyramid==1.9.1
15 15 pyramid-mako==1.0.2
16 repoze.lru==0.6
16 repoze.lru==0.7
17 17 simplejson==3.11.1
18 18 subprocess32==3.2.7
19 19
20 # Custom subvertpy that is not available on pypi.
21 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
20 subvertpy==0.10.1
22 21
23 six==1.9.0
22 six==1.11.0
24 23 translationstring==1.3
25 24 WebOb==1.7.4
26 25 wheel==0.29.0
27 26 zope.deprecation==4.1.2
28 27 zope.interface==4.1.3
29 28
30 29 ## http servers
31 30 gevent==1.2.2
32 31 greenlet==0.4.12
33 32 gunicorn==19.7.1
34 waitress==1.0.2
33 waitress==1.1.0
35 34
36 35 ## debug
37 36 ipdb==0.10.3
38 37 ipython==5.1.0
39 38
40 39 ## test related requirements
41 40 -r requirements_test.txt
@@ -1,15 +1,15 b''
1 1 # test related requirements
2 pytest==3.1.2
3 py==1.4.34
2 pytest==3.2.5
3 py==1.5.2
4 4 pytest-cov==2.5.1
5 pytest-sugar==0.8.0
6 pytest-runner==2.11.1
5 pytest-sugar==0.9.0
6 pytest-runner==3.0.0
7 7 pytest-catchlog==1.2.2
8 pytest-profiling==1.2.6
9 gprof2dot==2016.10.13
8 pytest-profiling==1.2.11
9 gprof2dot==2017.9.19
10 10 pytest-timeout==1.2.0
11 11
12 12 mock==1.0.1
13 WebTest==2.0.27
13 WebTest==2.0.29
14 14 cov-core==1.15.0
15 15 coverage==3.7.1
@@ -1,1 +1,1 b''
1 4.10.6 No newline at end of file
1 4.11.0 No newline at end of file
@@ -1,21 +1,21 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import pkgutil
19 19
20 20
21 21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip()
@@ -1,98 +1,98 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import sys
19 19 import traceback
20 20 import logging
21 21 import urlparse
22 22
23 23 log = logging.getLogger(__name__)
24 24
25 25
26 26 class RepoFactory(object):
27 27 """
28 28 Utility to create instances of repository
29 29
30 30 It provides internal caching of the `repo` object based on
31 31 the :term:`call context`.
32 32 """
33 33
34 34 def __init__(self, repo_cache):
35 35 self._cache = repo_cache
36 36
37 37 def _create_config(self, path, config):
38 38 config = {}
39 39 return config
40 40
41 41 def _create_repo(self, wire, create):
42 42 raise NotImplementedError()
43 43
44 44 def repo(self, wire, create=False):
45 45 """
46 46 Get a repository instance for the given path.
47 47
48 48 Uses internally the low level beaker API since the decorators introduce
49 49 significant overhead.
50 50 """
51 51 def create_new_repo():
52 52 return self._create_repo(wire, create)
53 53
54 54 return self._repo(wire, create_new_repo)
55 55
56 56 def _repo(self, wire, createfunc):
57 57 context = wire.get('context', None)
58 58 cache = wire.get('cache', True)
59 59
60 60 if context and cache:
61 61 cache_key = (context, wire['path'])
62 62 log.debug(
63 63 'FETCH %s@%s repo object from cache. Context: %s',
64 64 self.__class__.__name__, wire['path'], context)
65 65 return self._cache.get(key=cache_key, createfunc=createfunc)
66 66 else:
67 67 log.debug(
68 68 'INIT %s@%s repo object based on wire %s. Context: %s',
69 69 self.__class__.__name__, wire['path'], wire, context)
70 70 return createfunc()
71 71
72 72
73 73 def obfuscate_qs(query_string):
74 74 if query_string is None:
75 75 return None
76 76
77 77 parsed = []
78 78 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
79 79 if k in ['auth_token', 'api_key']:
80 80 v = "*****"
81 81 parsed.append((k, v))
82 82
83 83 return '&'.join('{}{}'.format(
84 84 k, '={}'.format(v) if v else '') for k, v in parsed)
85 85
86 86
87 87 def raise_from_original(new_type):
88 88 """
89 89 Raise a new exception type with original args and traceback.
90 90 """
91 91 exc_type, exc_value, exc_traceback = sys.exc_info()
92 92
93 93 traceback.format_exception(exc_type, exc_value, exc_traceback)
94 94
95 95 try:
96 96 raise new_type(*exc_value.args), None, exc_traceback
97 97 finally:
98 98 del exc_traceback
@@ -1,70 +1,70 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Special exception handling over the wire.
20 20
21 21 Since we cannot assume that our client is able to import our exception classes,
22 22 this module provides a "wrapping" mechanism to raise plain exceptions
23 23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 24 different error conditions.
25 25 """
26 26
27 27 import functools
28 28 from pyramid.httpexceptions import HTTPLocked
29 29
30 30
31 31 def _make_exception(kind, *args):
32 32 """
33 33 Prepares a base `Exception` instance to be sent over the wire.
34 34
35 35 To give our caller a hint what this is about, it will attach an attribute
36 36 `_vcs_kind` to the exception.
37 37 """
38 38 exc = Exception(*args)
39 39 exc._vcs_kind = kind
40 40 return exc
41 41
42 42
43 43 AbortException = functools.partial(_make_exception, 'abort')
44 44
45 45 ArchiveException = functools.partial(_make_exception, 'archive')
46 46
47 47 LookupException = functools.partial(_make_exception, 'lookup')
48 48
49 49 VcsException = functools.partial(_make_exception, 'error')
50 50
51 51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
52 52
53 53 RequirementException = functools.partial(_make_exception, 'requirement')
54 54
55 55 UnhandledException = functools.partial(_make_exception, 'unhandled')
56 56
57 57 URLError = functools.partial(_make_exception, 'url_error')
58 58
59 59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
60 60
61 61
62 62 class HTTPRepoLocked(HTTPLocked):
63 63 """
64 64 Subclass of HTTPLocked response that allows to set the title and status
65 65 code via constructor arguments.
66 66 """
67 67 def __init__(self, title, status_code=None, **kwargs):
68 68 self.code = status_code or HTTPLocked.code
69 69 self.title = title
70 70 super(HTTPRepoLocked, self).__init__(**kwargs)
@@ -1,645 +1,658 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 import traceback
23 24 import urllib
24 25 import urllib2
25 26 from functools import wraps
26 27
27 28 from dulwich import index, objects
28 29 from dulwich.client import HttpGitClient, LocalGitClient
29 30 from dulwich.errors import (
30 31 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 32 MissingCommitError, ObjectMissing, HangupException,
32 33 UnexpectedCommandError)
33 34 from dulwich.repo import Repo as DulwichRepo, Tag
34 35 from dulwich.server import update_server_info
35 36
36 37 from vcsserver import exceptions, settings, subprocessio
37 38 from vcsserver.utils import safe_str
38 39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 40 from vcsserver.hgcompat import (
40 41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 42 from vcsserver.git_lfs.lib import LFSOidStore
42 43
43 44 DIR_STAT = stat.S_IFDIR
44 45 FILE_MODE = stat.S_IFMT
45 46 GIT_LINK = objects.S_IFGITLINK
46 47
47 48 log = logging.getLogger(__name__)
48 49
49 50
50 51 def reraise_safe_exceptions(func):
51 52 """Converts Dulwich exceptions to something neutral."""
52 53 @wraps(func)
53 54 def wrapper(*args, **kwargs):
54 55 try:
55 56 return func(*args, **kwargs)
56 57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 58 ObjectMissing) as e:
58 59 raise exceptions.LookupException(e.message)
59 60 except (HangupException, UnexpectedCommandError) as e:
60 61 raise exceptions.VcsException(e.message)
61 62 except Exception as e:
62 63 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 64 # (KeyError on empty repos), we cannot track this and catch all
64 65 # exceptions, it's an exceptions from other handlers
65 66 #if not hasattr(e, '_vcs_kind'):
66 67 #log.exception("Unhandled exception in git remote call")
67 68 #raise_from_original(exceptions.UnhandledException)
68 69 raise
69 70 return wrapper
70 71
71 72
72 73 class Repo(DulwichRepo):
73 74 """
74 75 A wrapper for dulwich Repo class.
75 76
76 77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 78 "Too many open files" error. We need to close all opened file descriptors
78 79 once the repo object is destroyed.
79 80
80 81 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 82 to 0.12.0 +
82 83 """
83 84 def __del__(self):
84 85 if hasattr(self, 'object_store'):
85 86 self.close()
86 87
87 88
88 89 class GitFactory(RepoFactory):
89 90
90 91 def _create_repo(self, wire, create):
91 92 repo_path = str_to_dulwich(wire['path'])
92 93 return Repo(repo_path)
93 94
94 95
95 96 class GitRemote(object):
96 97
97 98 def __init__(self, factory):
98 99 self._factory = factory
99 100
100 101 self._bulk_methods = {
101 102 "author": self.commit_attribute,
102 103 "date": self.get_object_attrs,
103 104 "message": self.commit_attribute,
104 105 "parents": self.commit_attribute,
105 106 "_commit": self.revision,
106 107 }
107 108
108 109 def _wire_to_config(self, wire):
109 110 if 'config' in wire:
110 111 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
111 112 return {}
112 113
113 114 def _assign_ref(self, wire, ref, commit_id):
114 115 repo = self._factory.repo(wire)
115 116 repo[ref] = commit_id
116 117
117 118 @reraise_safe_exceptions
118 119 def add_object(self, wire, content):
119 120 repo = self._factory.repo(wire)
120 121 blob = objects.Blob()
121 122 blob.set_raw_string(content)
122 123 repo.object_store.add_object(blob)
123 124 return blob.id
124 125
125 126 @reraise_safe_exceptions
126 127 def assert_correct_path(self, wire):
128 path = wire.get('path')
127 129 try:
128 130 self._factory.repo(wire)
129 131 except NotGitRepository as e:
130 # Exception can contain unicode which we convert
131 raise exceptions.AbortException(repr(e))
132 tb = traceback.format_exc()
133 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 return False
135
136 return True
132 137
133 138 @reraise_safe_exceptions
134 139 def bare(self, wire):
135 140 repo = self._factory.repo(wire)
136 141 return repo.bare
137 142
138 143 @reraise_safe_exceptions
139 144 def blob_as_pretty_string(self, wire, sha):
140 145 repo = self._factory.repo(wire)
141 146 return repo[sha].as_pretty_string()
142 147
143 148 @reraise_safe_exceptions
144 149 def blob_raw_length(self, wire, sha):
145 150 repo = self._factory.repo(wire)
146 151 blob = repo[sha]
147 152 return blob.raw_length()
148 153
149 154 def _parse_lfs_pointer(self, raw_content):
150 155
151 156 spec_string = 'version https://git-lfs.github.com/spec'
152 157 if raw_content and raw_content.startswith(spec_string):
153 158 pattern = re.compile(r"""
154 159 (?:\n)?
155 160 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
156 161 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
157 162 ^size[ ](?P<oid_size>[0-9]+)\n
158 163 (?:\n)?
159 164 """, re.VERBOSE | re.MULTILINE)
160 165 match = pattern.match(raw_content)
161 166 if match:
162 167 return match.groupdict()
163 168
164 169 return {}
165 170
166 171 @reraise_safe_exceptions
167 172 def is_large_file(self, wire, sha):
168 173 repo = self._factory.repo(wire)
169 174 blob = repo[sha]
170 175 return self._parse_lfs_pointer(blob.as_raw_string())
171 176
172 177 @reraise_safe_exceptions
173 178 def in_largefiles_store(self, wire, oid):
174 179 repo = self._factory.repo(wire)
175 180 conf = self._wire_to_config(wire)
176 181
177 182 store_location = conf.get('vcs_git_lfs_store_location')
178 183 if store_location:
179 184 repo_name = repo.path
180 185 store = LFSOidStore(
181 186 oid=oid, repo=repo_name, store_location=store_location)
182 187 return store.has_oid()
183 188
184 189 return False
185 190
186 191 @reraise_safe_exceptions
187 192 def store_path(self, wire, oid):
188 193 repo = self._factory.repo(wire)
189 194 conf = self._wire_to_config(wire)
190 195
191 196 store_location = conf.get('vcs_git_lfs_store_location')
192 197 if store_location:
193 198 repo_name = repo.path
194 199 store = LFSOidStore(
195 200 oid=oid, repo=repo_name, store_location=store_location)
196 201 return store.oid_path
197 202 raise ValueError('Unable to fetch oid with path {}'.format(oid))
198 203
199 204 @reraise_safe_exceptions
200 205 def bulk_request(self, wire, rev, pre_load):
201 206 result = {}
202 207 for attr in pre_load:
203 208 try:
204 209 method = self._bulk_methods[attr]
205 210 args = [wire, rev]
206 211 if attr == "date":
207 212 args.extend(["commit_time", "commit_timezone"])
208 213 elif attr in ["author", "message", "parents"]:
209 214 args.append(attr)
210 215 result[attr] = method(*args)
211 216 except KeyError:
212 217 raise exceptions.VcsException(
213 218 "Unknown bulk attribute: %s" % attr)
214 219 return result
215 220
216 221 def _build_opener(self, url):
217 222 handlers = []
218 223 url_obj = url_parser(url)
219 224 _, authinfo = url_obj.authinfo()
220 225
221 226 if authinfo:
222 227 # create a password manager
223 228 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
224 229 passmgr.add_password(*authinfo)
225 230
226 231 handlers.extend((httpbasicauthhandler(passmgr),
227 232 httpdigestauthhandler(passmgr)))
228 233
229 234 return urllib2.build_opener(*handlers)
230 235
231 236 @reraise_safe_exceptions
232 237 def check_url(self, url, config):
233 238 url_obj = url_parser(url)
234 239 test_uri, _ = url_obj.authinfo()
235 240 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
236 241 url_obj.query = obfuscate_qs(url_obj.query)
237 242 cleaned_uri = str(url_obj)
238 243 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
239 244
240 245 if not test_uri.endswith('info/refs'):
241 246 test_uri = test_uri.rstrip('/') + '/info/refs'
242 247
243 248 o = self._build_opener(url)
244 249 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
245 250
246 251 q = {"service": 'git-upload-pack'}
247 252 qs = '?%s' % urllib.urlencode(q)
248 253 cu = "%s%s" % (test_uri, qs)
249 254 req = urllib2.Request(cu, None, {})
250 255
251 256 try:
252 257 log.debug("Trying to open URL %s", cleaned_uri)
253 258 resp = o.open(req)
254 259 if resp.code != 200:
255 260 raise exceptions.URLError('Return Code is not 200')
256 261 except Exception as e:
257 262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
258 263 # means it cannot be cloned
259 264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
260 265
261 266 # now detect if it's proper git repo
262 267 gitdata = resp.read()
263 268 if 'service=git-upload-pack' in gitdata:
264 269 pass
265 270 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
266 271 # old style git can return some other format !
267 272 pass
268 273 else:
269 274 raise exceptions.URLError(
270 275 "url [%s] does not look like an git" % (cleaned_uri,))
271 276
272 277 return True
273 278
274 279 @reraise_safe_exceptions
275 280 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
276 281 remote_refs = self.fetch(wire, url, apply_refs=False)
277 282 repo = self._factory.repo(wire)
278 283 if isinstance(valid_refs, list):
279 284 valid_refs = tuple(valid_refs)
280 285
281 286 for k in remote_refs:
282 287 # only parse heads/tags and skip so called deferred tags
283 288 if k.startswith(valid_refs) and not k.endswith(deferred):
284 289 repo[k] = remote_refs[k]
285 290
286 291 if update_after_clone:
287 292 # we want to checkout HEAD
288 293 repo["HEAD"] = remote_refs["HEAD"]
289 294 index.build_index_from_tree(repo.path, repo.index_path(),
290 295 repo.object_store, repo["HEAD"].tree)
291 296
292 297 # TODO: this is quite complex, check if that can be simplified
293 298 @reraise_safe_exceptions
294 299 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
295 300 repo = self._factory.repo(wire)
296 301 object_store = repo.object_store
297 302
298 303 # Create tree and populates it with blobs
299 304 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
300 305
301 306 for node in updated:
302 307 # Compute subdirs if needed
303 308 dirpath, nodename = vcspath.split(node['path'])
304 309 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
305 310 parent = commit_tree
306 311 ancestors = [('', parent)]
307 312
308 313 # Tries to dig for the deepest existing tree
309 314 while dirnames:
310 315 curdir = dirnames.pop(0)
311 316 try:
312 317 dir_id = parent[curdir][1]
313 318 except KeyError:
314 319 # put curdir back into dirnames and stops
315 320 dirnames.insert(0, curdir)
316 321 break
317 322 else:
318 323 # If found, updates parent
319 324 parent = repo[dir_id]
320 325 ancestors.append((curdir, parent))
321 326 # Now parent is deepest existing tree and we need to create
322 327 # subtrees for dirnames (in reverse order)
323 328 # [this only applies for nodes from added]
324 329 new_trees = []
325 330
326 331 blob = objects.Blob.from_string(node['content'])
327 332
328 333 if dirnames:
329 334 # If there are trees which should be created we need to build
330 335 # them now (in reverse order)
331 336 reversed_dirnames = list(reversed(dirnames))
332 337 curtree = objects.Tree()
333 338 curtree[node['node_path']] = node['mode'], blob.id
334 339 new_trees.append(curtree)
335 340 for dirname in reversed_dirnames[:-1]:
336 341 newtree = objects.Tree()
337 342 newtree[dirname] = (DIR_STAT, curtree.id)
338 343 new_trees.append(newtree)
339 344 curtree = newtree
340 345 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
341 346 else:
342 347 parent.add(
343 348 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
344 349
345 350 new_trees.append(parent)
346 351 # Update ancestors
347 352 reversed_ancestors = reversed(
348 353 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
349 354 for parent, tree, path in reversed_ancestors:
350 355 parent[path] = (DIR_STAT, tree.id)
351 356 object_store.add_object(tree)
352 357
353 358 object_store.add_object(blob)
354 359 for tree in new_trees:
355 360 object_store.add_object(tree)
356 361
357 362 for node_path in removed:
358 363 paths = node_path.split('/')
359 364 tree = commit_tree
360 365 trees = [tree]
361 366 # Traverse deep into the forest...
362 367 for path in paths:
363 368 try:
364 369 obj = repo[tree[path][1]]
365 370 if isinstance(obj, objects.Tree):
366 371 trees.append(obj)
367 372 tree = obj
368 373 except KeyError:
369 374 break
370 375 # Cut down the blob and all rotten trees on the way back...
371 376 for path, tree in reversed(zip(paths, trees)):
372 377 del tree[path]
373 378 if tree:
374 379 # This tree still has elements - don't remove it or any
375 380 # of it's parents
376 381 break
377 382
378 383 object_store.add_object(commit_tree)
379 384
380 385 # Create commit
381 386 commit = objects.Commit()
382 387 commit.tree = commit_tree.id
383 388 for k, v in commit_data.iteritems():
384 389 setattr(commit, k, v)
385 390 object_store.add_object(commit)
386 391
387 392 ref = 'refs/heads/%s' % branch
388 393 repo.refs[ref] = commit.id
389 394
390 395 return commit.id
391 396
392 397 @reraise_safe_exceptions
393 398 def fetch(self, wire, url, apply_refs=True, refs=None):
394 399 if url != 'default' and '://' not in url:
395 400 client = LocalGitClient(url)
396 401 else:
397 402 url_obj = url_parser(url)
398 403 o = self._build_opener(url)
399 404 url, _ = url_obj.authinfo()
400 405 client = HttpGitClient(base_url=url, opener=o)
401 406 repo = self._factory.repo(wire)
402 407
403 408 determine_wants = repo.object_store.determine_wants_all
404 409 if refs:
405 410 def determine_wants_requested(references):
406 411 return [references[r] for r in references if r in refs]
407 412 determine_wants = determine_wants_requested
408 413
409 414 try:
410 415 remote_refs = client.fetch(
411 416 path=url, target=repo, determine_wants=determine_wants)
412 417 except NotGitRepository as e:
413 418 log.warning(
414 419 'Trying to fetch from "%s" failed, not a Git repository.', url)
415 420 # Exception can contain unicode which we convert
416 421 raise exceptions.AbortException(repr(e))
417 422
418 423 # mikhail: client.fetch() returns all the remote refs, but fetches only
419 424 # refs filtered by `determine_wants` function. We need to filter result
420 425 # as well
421 426 if refs:
422 427 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
423 428
424 429 if apply_refs:
425 430 # TODO: johbo: Needs proper test coverage with a git repository
426 431 # that contains a tag object, so that we would end up with
427 432 # a peeled ref at this point.
428 433 PEELED_REF_MARKER = '^{}'
429 434 for k in remote_refs:
430 435 if k.endswith(PEELED_REF_MARKER):
431 436 log.info("Skipping peeled reference %s", k)
432 437 continue
433 438 repo[k] = remote_refs[k]
434 439
435 440 if refs:
436 441 # mikhail: explicitly set the head to the last ref.
437 442 repo['HEAD'] = remote_refs[refs[-1]]
438 443
439 444 # TODO: mikhail: should we return remote_refs here to be
440 445 # consistent?
441 446 else:
442 447 return remote_refs
443 448
444 449 @reraise_safe_exceptions
450 def sync_push(self, wire, url, refs=None):
451 if self.check_url(url, wire):
452 repo = self._factory.repo(wire)
453 self.run_git_command(
454 wire, ['push', url, '--mirror'], fail_on_stderr=False)
455
456
457 @reraise_safe_exceptions
445 458 def get_remote_refs(self, wire, url):
446 459 repo = Repo(url)
447 460 return repo.get_refs()
448 461
449 462 @reraise_safe_exceptions
450 463 def get_description(self, wire):
451 464 repo = self._factory.repo(wire)
452 465 return repo.get_description()
453 466
454 467 @reraise_safe_exceptions
455 468 def get_file_history(self, wire, file_path, commit_id, limit):
456 469 repo = self._factory.repo(wire)
457 470 include = [commit_id]
458 471 paths = [file_path]
459 472
460 473 walker = repo.get_walker(include, paths=paths, max_entries=limit)
461 474 return [x.commit.id for x in walker]
462 475
463 476 @reraise_safe_exceptions
464 477 def get_missing_revs(self, wire, rev1, rev2, path2):
465 478 repo = self._factory.repo(wire)
466 479 LocalGitClient(thin_packs=False).fetch(path2, repo)
467 480
468 481 wire_remote = wire.copy()
469 482 wire_remote['path'] = path2
470 483 repo_remote = self._factory.repo(wire_remote)
471 484 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
472 485
473 486 revs = [
474 487 x.commit.id
475 488 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
476 489 return revs
477 490
478 491 @reraise_safe_exceptions
479 492 def get_object(self, wire, sha):
480 493 repo = self._factory.repo(wire)
481 494 obj = repo.get_object(sha)
482 495 commit_id = obj.id
483 496
484 497 if isinstance(obj, Tag):
485 498 commit_id = obj.object[1]
486 499
487 500 return {
488 501 'id': obj.id,
489 502 'type': obj.type_name,
490 503 'commit_id': commit_id
491 504 }
492 505
493 506 @reraise_safe_exceptions
494 507 def get_object_attrs(self, wire, sha, *attrs):
495 508 repo = self._factory.repo(wire)
496 509 obj = repo.get_object(sha)
497 510 return list(getattr(obj, a) for a in attrs)
498 511
499 512 @reraise_safe_exceptions
500 513 def get_refs(self, wire):
501 514 repo = self._factory.repo(wire)
502 515 result = {}
503 516 for ref, sha in repo.refs.as_dict().items():
504 517 peeled_sha = repo.get_peeled(ref)
505 518 result[ref] = peeled_sha
506 519 return result
507 520
508 521 @reraise_safe_exceptions
509 522 def get_refs_path(self, wire):
510 523 repo = self._factory.repo(wire)
511 524 return repo.refs.path
512 525
513 526 @reraise_safe_exceptions
514 527 def head(self, wire):
515 528 repo = self._factory.repo(wire)
516 529 return repo.head()
517 530
518 531 @reraise_safe_exceptions
519 532 def init(self, wire):
520 533 repo_path = str_to_dulwich(wire['path'])
521 534 self.repo = Repo.init(repo_path)
522 535
523 536 @reraise_safe_exceptions
524 537 def init_bare(self, wire):
525 538 repo_path = str_to_dulwich(wire['path'])
526 539 self.repo = Repo.init_bare(repo_path)
527 540
528 541 @reraise_safe_exceptions
529 542 def revision(self, wire, rev):
530 543 repo = self._factory.repo(wire)
531 544 obj = repo[rev]
532 545 obj_data = {
533 546 'id': obj.id,
534 547 }
535 548 try:
536 549 obj_data['tree'] = obj.tree
537 550 except AttributeError:
538 551 pass
539 552 return obj_data
540 553
541 554 @reraise_safe_exceptions
542 555 def commit_attribute(self, wire, rev, attr):
543 556 repo = self._factory.repo(wire)
544 557 obj = repo[rev]
545 558 return getattr(obj, attr)
546 559
547 560 @reraise_safe_exceptions
548 561 def set_refs(self, wire, key, value):
549 562 repo = self._factory.repo(wire)
550 563 repo.refs[key] = value
551 564
552 565 @reraise_safe_exceptions
553 566 def remove_ref(self, wire, key):
554 567 repo = self._factory.repo(wire)
555 568 del repo.refs[key]
556 569
557 570 @reraise_safe_exceptions
558 571 def tree_changes(self, wire, source_id, target_id):
559 572 repo = self._factory.repo(wire)
560 573 source = repo[source_id].tree if source_id else None
561 574 target = repo[target_id].tree
562 575 result = repo.object_store.tree_changes(source, target)
563 576 return list(result)
564 577
565 578 @reraise_safe_exceptions
566 579 def tree_items(self, wire, tree_id):
567 580 repo = self._factory.repo(wire)
568 581 tree = repo[tree_id]
569 582
570 583 result = []
571 584 for item in tree.iteritems():
572 585 item_sha = item.sha
573 586 item_mode = item.mode
574 587
575 588 if FILE_MODE(item_mode) == GIT_LINK:
576 589 item_type = "link"
577 590 else:
578 591 item_type = repo[item_sha].type_name
579 592
580 593 result.append((item.path, item_mode, item_sha, item_type))
581 594 return result
582 595
583 596 @reraise_safe_exceptions
584 597 def update_server_info(self, wire):
585 598 repo = self._factory.repo(wire)
586 599 update_server_info(repo)
587 600
588 601 @reraise_safe_exceptions
589 602 def discover_git_version(self):
590 603 stdout, _ = self.run_git_command(
591 604 {}, ['--version'], _bare=True, _safe=True)
592 605 prefix = 'git version'
593 606 if stdout.startswith(prefix):
594 607 stdout = stdout[len(prefix):]
595 608 return stdout.strip()
596 609
597 610 @reraise_safe_exceptions
598 611 def run_git_command(self, wire, cmd, **opts):
599 612 path = wire.get('path', None)
600 613
601 614 if path and os.path.isdir(path):
602 615 opts['cwd'] = path
603 616
604 617 if '_bare' in opts:
605 618 _copts = []
606 619 del opts['_bare']
607 620 else:
608 621 _copts = ['-c', 'core.quotepath=false', ]
609 622 safe_call = False
610 623 if '_safe' in opts:
611 624 # no exc on failure
612 625 del opts['_safe']
613 626 safe_call = True
614 627
615 628 gitenv = os.environ.copy()
616 629 gitenv.update(opts.pop('extra_env', {}))
617 630 # need to clean fix GIT_DIR !
618 631 if 'GIT_DIR' in gitenv:
619 632 del gitenv['GIT_DIR']
620 633 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
621 634
622 635 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
623 636
624 637 try:
625 638 _opts = {'env': gitenv, 'shell': False}
626 639 _opts.update(opts)
627 640 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
628 641
629 642 return ''.join(p), ''.join(p.error)
630 643 except (EnvironmentError, OSError) as err:
631 644 cmd = ' '.join(cmd) # human friendly CMD
632 645 tb_err = ("Couldn't run git command (%s).\n"
633 646 "Original error was:%s\n" % (cmd, err))
634 647 log.exception(tb_err)
635 648 if safe_call:
636 649 return '', err
637 650 else:
638 651 raise exceptions.VcsException(tb_err)
639 652
640 653
641 654 def str_to_dulwich(value):
642 655 """
643 656 Dulwich 0.10.1a requires `unicode` objects to be passed in.
644 657 """
645 658 return value.decode(settings.WIRE_ENCODING)
@@ -1,19 +1,19 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 19 from app import create_app
@@ -1,287 +1,287 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import re
19 19 import logging
20 20 from wsgiref.util import FileWrapper
21 21
22 22 import simplejson as json
23 23 from pyramid.config import Configurator
24 24 from pyramid.response import Response, FileIter
25 25 from pyramid.httpexceptions import (
26 26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 27 HTTPUnprocessableEntity)
28 28
29 29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 31 from vcsserver.utils import safe_int
32 32
33 33 log = logging.getLogger(__name__)
34 34
35 35
36 36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
37 37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38 38
39 39
40 40 def write_response_error(http_exception, text=None):
41 41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 42 _exception = http_exception(content_type=content_type)
43 43 _exception.content_type = content_type
44 44 if text:
45 45 _exception.body = json.dumps({'message': text})
46 46 log.debug('LFS: writing response of type %s to client with text:%s',
47 47 http_exception, text)
48 48 return _exception
49 49
50 50
51 51 class AuthHeaderRequired(object):
52 52 """
53 53 Decorator to check if request has proper auth-header
54 54 """
55 55
56 56 def __call__(self, func):
57 57 return get_cython_compat_decorator(self.__wrapper, func)
58 58
59 59 def __wrapper(self, func, *fargs, **fkwargs):
60 60 request = fargs[1]
61 61 auth = request.authorization
62 62 if not auth:
63 63 return write_response_error(HTTPForbidden)
64 64 return func(*fargs[1:], **fkwargs)
65 65
66 66
67 67 # views
68 68
69 69 def lfs_objects(request):
70 70 # indicate not supported, V1 API
71 71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73 73
74 74
75 75 @AuthHeaderRequired()
76 76 def lfs_objects_batch(request):
77 77 """
78 78 The client sends the following information to the Batch endpoint to transfer some objects:
79 79
80 80 operation - Should be download or upload.
81 81 transfers - An optional Array of String identifiers for transfer
82 82 adapters that the client has configured. If omitted, the basic
83 83 transfer adapter MUST be assumed by the server.
84 84 objects - An Array of objects to download.
85 85 oid - String OID of the LFS object.
86 86 size - Integer byte size of the LFS object. Must be at least zero.
87 87 """
88 88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 89 auth = request.authorization
90 90 repo = request.matchdict.get('repo')
91 91 data = request.json
92 92 operation = data.get('operation')
93 93 if operation not in ('download', 'upload'):
94 94 log.debug('LFS: unsupported operation:%s', operation)
95 95 return write_response_error(
96 96 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
97 97
98 98 if 'objects' not in data:
99 99 log.debug('LFS: missing objects data')
100 100 return write_response_error(
101 101 HTTPBadRequest, 'missing objects data')
102 102
103 103 log.debug('LFS: handling operation of type: %s', operation)
104 104
105 105 objects = []
106 106 for o in data['objects']:
107 107 try:
108 108 oid = o['oid']
109 109 obj_size = o['size']
110 110 except KeyError:
111 111 log.exception('LFS, failed to extract data')
112 112 return write_response_error(
113 113 HTTPBadRequest, 'unsupported data in objects')
114 114
115 115 obj_data = {'oid': oid}
116 116
117 117 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid)
118 118 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo)
119 119 store = LFSOidStore(
120 120 oid, repo, store_location=request.registry.git_lfs_store_path)
121 121 handler = OidHandler(
122 122 store, repo, auth, oid, obj_size, obj_data,
123 123 obj_href, obj_verify_href)
124 124
125 125 # this verifies also OIDs
126 126 actions, errors = handler.exec_operation(operation)
127 127 if errors:
128 128 log.warning('LFS: got following errors: %s', errors)
129 129 obj_data['errors'] = errors
130 130
131 131 if actions:
132 132 obj_data['actions'] = actions
133 133
134 134 obj_data['size'] = obj_size
135 135 obj_data['authenticated'] = True
136 136 objects.append(obj_data)
137 137
138 138 result = {'objects': objects, 'transfer': 'basic'}
139 139 log.debug('LFS Response %s', safe_result(result))
140 140
141 141 return result
142 142
143 143
144 144 def lfs_objects_oid_upload(request):
145 145 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
146 146 repo = request.matchdict.get('repo')
147 147 oid = request.matchdict.get('oid')
148 148 store = LFSOidStore(
149 149 oid, repo, store_location=request.registry.git_lfs_store_path)
150 150 engine = store.get_engine(mode='wb')
151 151 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
152 152
153 153 body = request.environ['wsgi.input']
154 154
155 155 with engine as f:
156 156 blksize = 64 * 1024 # 64kb
157 157 while True:
158 158 # read in chunks as stream comes in from Gunicorn
159 159 # this is a specific Gunicorn support function.
160 160 # might work differently on waitress
161 161 chunk = body.read(blksize)
162 162 if not chunk:
163 163 break
164 164 f.write(chunk)
165 165
166 166 return {'upload': 'ok'}
167 167
168 168
169 169 def lfs_objects_oid_download(request):
170 170 repo = request.matchdict.get('repo')
171 171 oid = request.matchdict.get('oid')
172 172
173 173 store = LFSOidStore(
174 174 oid, repo, store_location=request.registry.git_lfs_store_path)
175 175 if not store.has_oid():
176 176 log.debug('LFS: oid %s does not exists in store', oid)
177 177 return write_response_error(
178 178 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
179 179
180 180 # TODO(marcink): support range header ?
181 181 # Range: bytes=0-, `bytes=(\d+)\-.*`
182 182
183 183 f = open(store.oid_path, 'rb')
184 184 response = Response(
185 185 content_type='application/octet-stream', app_iter=FileIter(f))
186 186 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
187 187 return response
188 188
189 189
190 190 def lfs_objects_verify(request):
191 191 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
192 192 repo = request.matchdict.get('repo')
193 193
194 194 data = request.json
195 195 oid = data.get('oid')
196 196 size = safe_int(data.get('size'))
197 197
198 198 if not (oid and size):
199 199 return write_response_error(
200 200 HTTPBadRequest, 'missing oid and size in request data')
201 201
202 202 store = LFSOidStore(
203 203 oid, repo, store_location=request.registry.git_lfs_store_path)
204 204 if not store.has_oid():
205 205 log.debug('LFS: oid %s does not exists in store', oid)
206 206 return write_response_error(
207 207 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
208 208
209 209 store_size = store.size_oid()
210 210 if store_size != size:
211 211 msg = 'requested file size mismatch store size:%s requested:%s' % (
212 212 store_size, size)
213 213 return write_response_error(
214 214 HTTPUnprocessableEntity, msg)
215 215
216 216 return {'message': {'size': 'ok', 'in_store': 'ok'}}
217 217
218 218
219 219 def lfs_objects_lock(request):
220 220 return write_response_error(
221 221 HTTPNotImplemented, 'GIT LFS locking api not supported')
222 222
223 223
224 224 def not_found(request):
225 225 return write_response_error(
226 226 HTTPNotFound, 'request path not found')
227 227
228 228
229 229 def lfs_disabled(request):
230 230 return write_response_error(
231 231 HTTPNotImplemented, 'GIT LFS disabled for this repo')
232 232
233 233
234 234 def git_lfs_app(config):
235 235
236 236 # v1 API deprecation endpoint
237 237 config.add_route('lfs_objects',
238 238 '/{repo:.*?[^/]}/info/lfs/objects')
239 239 config.add_view(lfs_objects, route_name='lfs_objects',
240 240 request_method='POST', renderer='json')
241 241
242 242 # locking API
243 243 config.add_route('lfs_objects_lock',
244 244 '/{repo:.*?[^/]}/info/lfs/locks')
245 245 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
246 246 request_method=('POST', 'GET'), renderer='json')
247 247
248 248 config.add_route('lfs_objects_lock_verify',
249 249 '/{repo:.*?[^/]}/info/lfs/locks/verify')
250 250 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
251 251 request_method=('POST', 'GET'), renderer='json')
252 252
253 253 # batch API
254 254 config.add_route('lfs_objects_batch',
255 255 '/{repo:.*?[^/]}/info/lfs/objects/batch')
256 256 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
257 257 request_method='POST', renderer='json')
258 258
259 259 # oid upload/download API
260 260 config.add_route('lfs_objects_oid',
261 261 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
262 262 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
263 263 request_method='PUT', renderer='json')
264 264 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
265 265 request_method='GET', renderer='json')
266 266
267 267 # verification API
268 268 config.add_route('lfs_objects_verify',
269 269 '/{repo:.*?[^/]}/info/lfs/verify')
270 270 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
271 271 request_method='POST', renderer='json')
272 272
273 273 # not found handler for API
274 274 config.add_notfound_view(not_found, renderer='json')
275 275
276 276
277 277 def create_app(git_lfs_enabled, git_lfs_store_path):
278 278 config = Configurator()
279 279 if git_lfs_enabled:
280 280 config.include(git_lfs_app)
281 281 config.registry.git_lfs_store_path = git_lfs_store_path
282 282 else:
283 283 # not found handler for API, reporting disabled LFS support
284 284 config.add_notfound_view(lfs_disabled, renderer='json')
285 285
286 286 app = config.make_wsgi_app()
287 287 return app
@@ -1,175 +1,175 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import logging
21 21 from collections import OrderedDict
22 22
23 23 log = logging.getLogger(__name__)
24 24
25 25
26 26 class OidHandler(object):
27 27
28 28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 29 obj_verify_href=None):
30 30 self.current_store = store
31 31 self.repo_name = repo_name
32 32 self.auth = auth
33 33 self.oid = oid
34 34 self.obj_size = obj_size
35 35 self.obj_data = obj_data
36 36 self.obj_href = obj_href
37 37 self.obj_verify_href = obj_verify_href
38 38
39 39 def get_store(self, mode=None):
40 40 return self.current_store
41 41
42 42 def get_auth(self):
43 43 """returns auth header for re-use in upload/download"""
44 44 return " ".join(self.auth)
45 45
46 46 def download(self):
47 47
48 48 store = self.get_store()
49 49 response = None
50 50 has_errors = None
51 51
52 52 if not store.has_oid():
53 53 # error reply back to client that something is wrong with dl
54 54 err_msg = 'object: {} does not exist in store'.format(store.oid)
55 55 has_errors = OrderedDict(
56 56 error=OrderedDict(
57 57 code=404,
58 58 message=err_msg
59 59 )
60 60 )
61 61
62 62 download_action = OrderedDict(
63 63 href=self.obj_href,
64 64 header=OrderedDict([("Authorization", self.get_auth())])
65 65 )
66 66 if not has_errors:
67 67 response = OrderedDict(download=download_action)
68 68 return response, has_errors
69 69
70 70 def upload(self, skip_existing=True):
71 71 """
72 72 Write upload action for git-lfs server
73 73 """
74 74
75 75 store = self.get_store()
76 76 response = None
77 77 has_errors = None
78 78
79 79 # verify if we have the OID before, if we do, reply with empty
80 80 if store.has_oid():
81 81 log.debug('LFS: store already has oid %s', store.oid)
82 82
83 83 # validate size
84 84 store_size = store.size_oid()
85 85 size_match = store_size == self.obj_size
86 86 if not size_match:
87 87 log.warning(
88 88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
89 89 self.oid, store_size, self.obj_size)
90 90 elif skip_existing:
91 91 log.debug('LFS: skipping further action as oid is existing')
92 92 return response, has_errors
93 93
94 94 chunked = ("Transfer-Encoding", "chunked")
95 95 upload_action = OrderedDict(
96 96 href=self.obj_href,
97 97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
98 98 )
99 99 if not has_errors:
100 100 response = OrderedDict(upload=upload_action)
101 101 # if specified in handler, return the verification endpoint
102 102 if self.obj_verify_href:
103 103 verify_action = OrderedDict(
104 104 href=self.obj_verify_href,
105 105 header=OrderedDict([("Authorization", self.get_auth())])
106 106 )
107 107 response['verify'] = verify_action
108 108 return response, has_errors
109 109
110 110 def exec_operation(self, operation, *args, **kwargs):
111 111 handler = getattr(self, operation)
112 112 log.debug('LFS: handling request using %s handler', handler)
113 113 return handler(*args, **kwargs)
114 114
115 115
116 116 class LFSOidStore(object):
117 117
118 118 def __init__(self, oid, repo, store_location=None):
119 119 self.oid = oid
120 120 self.repo = repo
121 121 self.store_path = store_location or self.get_default_store()
122 122 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
123 123 self.oid_path = os.path.join(self.store_path, oid)
124 124 self.fd = None
125 125
126 126 def get_engine(self, mode):
127 127 """
128 128 engine = .get_engine(mode='wb')
129 129 with engine as f:
130 130 f.write('...')
131 131 """
132 132
133 133 class StoreEngine(object):
134 134 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
135 135 self.mode = mode
136 136 self.store_path = store_path
137 137 self.oid_path = oid_path
138 138 self.tmp_oid_path = tmp_oid_path
139 139
140 140 def __enter__(self):
141 141 if not os.path.isdir(self.store_path):
142 142 os.makedirs(self.store_path)
143 143
144 144 # TODO(marcink): maybe write metadata here with size/oid ?
145 145 fd = open(self.tmp_oid_path, self.mode)
146 146 self.fd = fd
147 147 return fd
148 148
149 149 def __exit__(self, exc_type, exc_value, traceback):
150 150 # close tmp file, and rename to final destination
151 151 self.fd.close()
152 152 shutil.move(self.tmp_oid_path, self.oid_path)
153 153
154 154 return StoreEngine(
155 155 mode, self.store_path, self.oid_path, self.tmp_oid_path)
156 156
157 157 def get_default_store(self):
158 158 """
159 159 Default store, consistent with defaults of Mercurial large files store
160 160 which is /home/username/.cache/largefiles
161 161 """
162 162 user_home = os.path.expanduser("~")
163 163 return os.path.join(user_home, '.cache', 'lfs-store')
164 164
165 165 def has_oid(self):
166 166 return os.path.exists(os.path.join(self.store_path, self.oid))
167 167
168 168 def size_oid(self):
169 169 size = -1
170 170
171 171 if self.has_oid():
172 172 oid = os.path.join(self.store_path, self.oid)
173 173 size = os.stat(oid).st_size
174 174
175 175 return size
@@ -1,16 +1,16 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,239 +1,239 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import pytest
20 20 from webtest.app import TestApp as WebObTestApp
21 21 import simplejson as json
22 22
23 23 from vcsserver.git_lfs.app import create_app
24 24
25 25
26 26 @pytest.fixture(scope='function')
27 27 def git_lfs_app(tmpdir):
28 28 custom_app = WebObTestApp(create_app(
29 29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir)))
30 30 custom_app._store = str(tmpdir)
31 31 return custom_app
32 32
33 33
34 34 @pytest.fixture()
35 35 def http_auth():
36 36 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
37 37
38 38
39 39 class TestLFSApplication(object):
40 40
41 41 def test_app_wrong_path(self, git_lfs_app):
42 42 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
43 43
44 44 def test_app_deprecated_endpoint(self, git_lfs_app):
45 45 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
46 46 assert response.status_code == 501
47 47 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
48 48
49 49 def test_app_lock_verify_api_not_available(self, git_lfs_app):
50 50 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
51 51 assert response.status_code == 501
52 52 assert json.loads(response.text) == {
53 53 u'message': u'GIT LFS locking api not supported'}
54 54
55 55 def test_app_lock_api_not_available(self, git_lfs_app):
56 56 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
57 57 assert response.status_code == 501
58 58 assert json.loads(response.text) == {
59 59 u'message': u'GIT LFS locking api not supported'}
60 60
61 61 def test_app_batch_api_missing_auth(self, git_lfs_app,):
62 62 git_lfs_app.post_json(
63 63 '/repo/info/lfs/objects/batch', params={}, status=403)
64 64
65 65 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
66 66 response = git_lfs_app.post_json(
67 67 '/repo/info/lfs/objects/batch', params={}, status=400,
68 68 extra_environ=http_auth)
69 69 assert json.loads(response.text) == {
70 70 u'message': u'unsupported operation mode: `None`'}
71 71
72 72 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
73 73 response = git_lfs_app.post_json(
74 74 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
75 75 status=400, extra_environ=http_auth)
76 76 assert json.loads(response.text) == {
77 77 u'message': u'missing objects data'}
78 78
79 79 def test_app_batch_api_unsupported_data_in_objects(
80 80 self, git_lfs_app, http_auth):
81 81 params = {'operation': 'download',
82 82 'objects': [{}]}
83 83 response = git_lfs_app.post_json(
84 84 '/repo/info/lfs/objects/batch', params=params, status=400,
85 85 extra_environ=http_auth)
86 86 assert json.loads(response.text) == {
87 87 u'message': u'unsupported data in objects'}
88 88
89 89 def test_app_batch_api_download_missing_object(
90 90 self, git_lfs_app, http_auth):
91 91 params = {'operation': 'download',
92 92 'objects': [{'oid': '123', 'size': '1024'}]}
93 93 response = git_lfs_app.post_json(
94 94 '/repo/info/lfs/objects/batch', params=params,
95 95 extra_environ=http_auth)
96 96
97 97 expected_objects = [
98 98 {u'authenticated': True,
99 99 u'errors': {u'error': {
100 100 u'code': 404,
101 101 u'message': u'object: 123 does not exist in store'}},
102 102 u'oid': u'123',
103 103 u'size': u'1024'}
104 104 ]
105 105 assert json.loads(response.text) == {
106 106 'objects': expected_objects, 'transfer': 'basic'}
107 107
108 108 def test_app_batch_api_download(self, git_lfs_app, http_auth):
109 109 oid = '456'
110 110 oid_path = os.path.join(git_lfs_app._store, oid)
111 111 if not os.path.isdir(os.path.dirname(oid_path)):
112 112 os.makedirs(os.path.dirname(oid_path))
113 113 with open(oid_path, 'wb') as f:
114 114 f.write('OID_CONTENT')
115 115
116 116 params = {'operation': 'download',
117 117 'objects': [{'oid': oid, 'size': '1024'}]}
118 118 response = git_lfs_app.post_json(
119 119 '/repo/info/lfs/objects/batch', params=params,
120 120 extra_environ=http_auth)
121 121
122 122 expected_objects = [
123 123 {u'authenticated': True,
124 124 u'actions': {
125 125 u'download': {
126 126 u'header': {u'Authorization': u'Basic XXXXX'},
127 127 u'href': u'http://localhost/repo/info/lfs/objects/456'},
128 128 },
129 129 u'oid': u'456',
130 130 u'size': u'1024'}
131 131 ]
132 132 assert json.loads(response.text) == {
133 133 'objects': expected_objects, 'transfer': 'basic'}
134 134
135 135 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
136 136 params = {'operation': 'upload',
137 137 'objects': [{'oid': '123', 'size': '1024'}]}
138 138 response = git_lfs_app.post_json(
139 139 '/repo/info/lfs/objects/batch', params=params,
140 140 extra_environ=http_auth)
141 141 expected_objects = [
142 142 {u'authenticated': True,
143 143 u'actions': {
144 144 u'upload': {
145 145 u'header': {u'Authorization': u'Basic XXXXX',
146 146 u'Transfer-Encoding': u'chunked'},
147 147 u'href': u'http://localhost/repo/info/lfs/objects/123'},
148 148 u'verify': {
149 149 u'header': {u'Authorization': u'Basic XXXXX'},
150 150 u'href': u'http://localhost/repo/info/lfs/verify'}
151 151 },
152 152 u'oid': u'123',
153 153 u'size': u'1024'}
154 154 ]
155 155 assert json.loads(response.text) == {
156 156 'objects': expected_objects, 'transfer': 'basic'}
157 157
158 158 def test_app_verify_api_missing_data(self, git_lfs_app):
159 159 params = {'oid': 'missing',}
160 160 response = git_lfs_app.post_json(
161 161 '/repo/info/lfs/verify', params=params,
162 162 status=400)
163 163
164 164 assert json.loads(response.text) == {
165 165 u'message': u'missing oid and size in request data'}
166 166
167 167 def test_app_verify_api_missing_obj(self, git_lfs_app):
168 168 params = {'oid': 'missing', 'size': '1024'}
169 169 response = git_lfs_app.post_json(
170 170 '/repo/info/lfs/verify', params=params,
171 171 status=404)
172 172
173 173 assert json.loads(response.text) == {
174 174 u'message': u'oid `missing` does not exists in store'}
175 175
176 176 def test_app_verify_api_size_mismatch(self, git_lfs_app):
177 177 oid = 'existing'
178 178 oid_path = os.path.join(git_lfs_app._store, oid)
179 179 if not os.path.isdir(os.path.dirname(oid_path)):
180 180 os.makedirs(os.path.dirname(oid_path))
181 181 with open(oid_path, 'wb') as f:
182 182 f.write('OID_CONTENT')
183 183
184 184 params = {'oid': oid, 'size': '1024'}
185 185 response = git_lfs_app.post_json(
186 186 '/repo/info/lfs/verify', params=params, status=422)
187 187
188 188 assert json.loads(response.text) == {
189 189 u'message': u'requested file size mismatch '
190 190 u'store size:11 requested:1024'}
191 191
192 192 def test_app_verify_api(self, git_lfs_app):
193 193 oid = 'existing'
194 194 oid_path = os.path.join(git_lfs_app._store, oid)
195 195 if not os.path.isdir(os.path.dirname(oid_path)):
196 196 os.makedirs(os.path.dirname(oid_path))
197 197 with open(oid_path, 'wb') as f:
198 198 f.write('OID_CONTENT')
199 199
200 200 params = {'oid': oid, 'size': 11}
201 201 response = git_lfs_app.post_json(
202 202 '/repo/info/lfs/verify', params=params)
203 203
204 204 assert json.loads(response.text) == {
205 205 u'message': {u'size': u'ok', u'in_store': u'ok'}}
206 206
207 207 def test_app_download_api_oid_not_existing(self, git_lfs_app):
208 208 oid = 'missing'
209 209
210 210 response = git_lfs_app.get(
211 211 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
212 212
213 213 assert json.loads(response.text) == {
214 214 u'message': u'requested file with oid `missing` not found in store'}
215 215
216 216 def test_app_download_api(self, git_lfs_app):
217 217 oid = 'existing'
218 218 oid_path = os.path.join(git_lfs_app._store, oid)
219 219 if not os.path.isdir(os.path.dirname(oid_path)):
220 220 os.makedirs(os.path.dirname(oid_path))
221 221 with open(oid_path, 'wb') as f:
222 222 f.write('OID_CONTENT')
223 223
224 224 response = git_lfs_app.get(
225 225 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
226 226 assert response
227 227
228 228 def test_app_upload(self, git_lfs_app):
229 229 oid = 'uploaded'
230 230
231 231 response = git_lfs_app.put(
232 232 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
233 233
234 234 assert json.loads(response.text) == {u'upload': u'ok'}
235 235
236 236 # verify that we actually wrote that OID
237 237 oid_path = os.path.join(git_lfs_app._store, oid)
238 238 assert os.path.isfile(oid_path)
239 239 assert 'CONTENT' == open(oid_path).read()
@@ -1,141 +1,141 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import pytest
20 20 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21 21
22 22
23 23 @pytest.fixture()
24 24 def lfs_store(tmpdir):
25 25 repo = 'test'
26 26 oid = '123456789'
27 27 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
28 28 return store
29 29
30 30
31 31 @pytest.fixture()
32 32 def oid_handler(lfs_store):
33 33 store = lfs_store
34 34 repo = store.repo
35 35 oid = store.oid
36 36
37 37 oid_handler = OidHandler(
38 38 store=store, repo_name=repo, auth=('basic', 'xxxx'),
39 39 oid=oid,
40 40 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
41 41 obj_verify_href='http://localhost/verify')
42 42 return oid_handler
43 43
44 44
45 45 class TestOidHandler(object):
46 46
47 47 @pytest.mark.parametrize('exec_action', [
48 48 'download',
49 49 'upload',
50 50 ])
51 51 def test_exec_action(self, exec_action, oid_handler):
52 52 handler = oid_handler.exec_operation(exec_action)
53 53 assert handler
54 54
55 55 def test_exec_action_undefined(self, oid_handler):
56 56 with pytest.raises(AttributeError):
57 57 oid_handler.exec_operation('wrong')
58 58
59 59 def test_download_oid_not_existing(self, oid_handler):
60 60 response, has_errors = oid_handler.exec_operation('download')
61 61
62 62 assert response is None
63 63 assert has_errors['error'] == {
64 64 'code': 404,
65 65 'message': 'object: 123456789 does not exist in store'}
66 66
67 67 def test_download_oid(self, oid_handler):
68 68 store = oid_handler.get_store()
69 69 if not os.path.isdir(os.path.dirname(store.oid_path)):
70 70 os.makedirs(os.path.dirname(store.oid_path))
71 71
72 72 with open(store.oid_path, 'wb') as f:
73 73 f.write('CONTENT')
74 74
75 75 response, has_errors = oid_handler.exec_operation('download')
76 76
77 77 assert has_errors is None
78 78 assert response['download'] == {
79 79 'header': {'Authorization': 'basic xxxx'},
80 80 'href': 'http://localhost/handle_oid'
81 81 }
82 82
83 83 def test_upload_oid_that_exists(self, oid_handler):
84 84 store = oid_handler.get_store()
85 85 if not os.path.isdir(os.path.dirname(store.oid_path)):
86 86 os.makedirs(os.path.dirname(store.oid_path))
87 87
88 88 with open(store.oid_path, 'wb') as f:
89 89 f.write('CONTENT')
90 90 oid_handler.obj_size = 7
91 91 response, has_errors = oid_handler.exec_operation('upload')
92 92 assert has_errors is None
93 93 assert response is None
94 94
95 95 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
96 96 store = oid_handler.get_store()
97 97 if not os.path.isdir(os.path.dirname(store.oid_path)):
98 98 os.makedirs(os.path.dirname(store.oid_path))
99 99
100 100 with open(store.oid_path, 'wb') as f:
101 101 f.write('CONTENT')
102 102
103 103 oid_handler.obj_size = 10240
104 104 response, has_errors = oid_handler.exec_operation('upload')
105 105 assert has_errors is None
106 106 assert response['upload'] == {
107 107 'header': {'Authorization': 'basic xxxx',
108 108 'Transfer-Encoding': 'chunked'},
109 109 'href': 'http://localhost/handle_oid',
110 110 }
111 111
112 112 def test_upload_oid(self, oid_handler):
113 113 response, has_errors = oid_handler.exec_operation('upload')
114 114 assert has_errors is None
115 115 assert response['upload'] == {
116 116 'header': {'Authorization': 'basic xxxx',
117 117 'Transfer-Encoding': 'chunked'},
118 118 'href': 'http://localhost/handle_oid'
119 119 }
120 120
121 121
122 122 class TestLFSStore(object):
123 123 def test_write_oid(self, lfs_store):
124 124 oid_location = lfs_store.oid_path
125 125
126 126 assert not os.path.isfile(oid_location)
127 127
128 128 engine = lfs_store.get_engine(mode='wb')
129 129 with engine as f:
130 130 f.write('CONTENT')
131 131
132 132 assert os.path.isfile(oid_location)
133 133
134 134 def test_detect_has_oid(self, lfs_store):
135 135
136 136 assert lfs_store.has_oid() is False
137 137 engine = lfs_store.get_engine(mode='wb')
138 138 with engine as f:
139 139 f.write('CONTENT')
140 140
141 141 assert lfs_store.has_oid() is True No newline at end of file
@@ -1,50 +1,50 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17 import copy
18 18 from functools import wraps
19 19
20 20
21 21 def get_cython_compat_decorator(wrapper, func):
22 22 """
23 23 Creates a cython compatible decorator. The previously used
24 24 decorator.decorator() function seems to be incompatible with cython.
25 25
26 26 :param wrapper: __wrapper method of the decorator class
27 27 :param func: decorated function
28 28 """
29 29 @wraps(func)
30 30 def local_wrapper(*args, **kwds):
31 31 return wrapper(func, *args, **kwds)
32 32 local_wrapper.__wrapped__ = func
33 33 return local_wrapper
34 34
35 35
36 36 def safe_result(result):
37 37 """clean result for better representation in logs"""
38 38 clean_copy = copy.deepcopy(result)
39 39
40 40 try:
41 41 if 'objects' in clean_copy:
42 42 for oid_data in clean_copy['objects']:
43 43 if 'actions' in oid_data:
44 44 for action_name, data in oid_data['actions'].items():
45 45 if 'header' in data:
46 46 data['header'] = {'Authorization': '*****'}
47 47 except Exception:
48 48 return result
49 49
50 50 return clean_copy
@@ -1,749 +1,758 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23
24 24 from hgext import largefiles, rebase
25 25 from hgext.strip import strip as hgext_strip
26 26 from mercurial import commands
27 27 from mercurial import unionrepo
28 28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 37 RepoLookupError, InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 baseui.setconfig('ui', 'paginate', 'never')
56 57 # force mercurial to only use 1 thread, otherwise it may try to set a
57 58 # signal in a non-main thread, thus generating a ValueError.
58 59 baseui.setconfig('worker', 'numcpus', 1)
59 60
60 61 # If there is no config for the largefiles extension, we explicitly disable
61 62 # it here. This overrides settings from repositories hgrc file. Recent
62 63 # mercurial versions enable largefiles in hgrc on clone from largefile
63 64 # repo.
64 65 if not baseui.hasconfig('extensions', 'largefiles'):
65 66 log.debug('Explicitly disable largefiles extension for repo.')
66 67 baseui.setconfig('extensions', 'largefiles', '!')
67 68
68 69 return baseui
69 70
70 71
71 72 def reraise_safe_exceptions(func):
72 73 """Decorator for converting mercurial exceptions to something neutral."""
73 74 def wrapper(*args, **kwargs):
74 75 try:
75 76 return func(*args, **kwargs)
76 77 except (Abort, InterventionRequired):
77 78 raise_from_original(exceptions.AbortException)
78 79 except RepoLookupError:
79 80 raise_from_original(exceptions.LookupException)
80 81 except RequirementError:
81 82 raise_from_original(exceptions.RequirementException)
82 83 except RepoError:
83 84 raise_from_original(exceptions.VcsException)
84 85 except LookupError:
85 86 raise_from_original(exceptions.LookupException)
86 87 except Exception as e:
87 88 if not hasattr(e, '_vcs_kind'):
88 89 log.exception("Unhandled exception in hg remote call")
89 90 raise_from_original(exceptions.UnhandledException)
90 91 raise
91 92 return wrapper
92 93
93 94
94 95 class MercurialFactory(RepoFactory):
95 96
96 97 def _create_config(self, config, hooks=True):
97 98 if not hooks:
98 99 hooks_to_clean = frozenset((
99 100 'changegroup.repo_size', 'preoutgoing.pre_pull',
100 101 'outgoing.pull_logger', 'prechangegroup.pre_push'))
101 102 new_config = []
102 103 for section, option, value in config:
103 104 if section == 'hooks' and option in hooks_to_clean:
104 105 continue
105 106 new_config.append((section, option, value))
106 107 config = new_config
107 108
108 109 baseui = make_ui_from_config(config)
109 110 return baseui
110 111
111 112 def _create_repo(self, wire, create):
112 113 baseui = self._create_config(wire["config"])
113 114 return localrepository(baseui, wire["path"], create)
114 115
115 116
116 117 class HgRemote(object):
117 118
118 119 def __init__(self, factory):
119 120 self._factory = factory
120 121
121 122 self._bulk_methods = {
122 123 "affected_files": self.ctx_files,
123 124 "author": self.ctx_user,
124 125 "branch": self.ctx_branch,
125 126 "children": self.ctx_children,
126 127 "date": self.ctx_date,
127 128 "message": self.ctx_description,
128 129 "parents": self.ctx_parents,
129 130 "status": self.ctx_status,
130 131 "obsolete": self.ctx_obsolete,
131 132 "phase": self.ctx_phase,
132 133 "hidden": self.ctx_hidden,
133 134 "_file_paths": self.ctx_list,
134 135 }
135 136
136 137 @reraise_safe_exceptions
137 138 def discover_hg_version(self):
138 139 from mercurial import util
139 140 return util.version()
140 141
141 142 @reraise_safe_exceptions
142 143 def archive_repo(self, archive_path, mtime, file_info, kind):
143 144 if kind == "tgz":
144 145 archiver = archival.tarit(archive_path, mtime, "gz")
145 146 elif kind == "tbz2":
146 147 archiver = archival.tarit(archive_path, mtime, "bz2")
147 148 elif kind == 'zip':
148 149 archiver = archival.zipit(archive_path, mtime)
149 150 else:
150 151 raise exceptions.ArchiveException(
151 152 'Remote does not support: "%s".' % kind)
152 153
153 154 for f_path, f_mode, f_is_link, f_content in file_info:
154 155 archiver.addfile(f_path, f_mode, f_is_link, f_content)
155 156 archiver.done()
156 157
157 158 @reraise_safe_exceptions
158 159 def bookmarks(self, wire):
159 160 repo = self._factory.repo(wire)
160 161 return dict(repo._bookmarks)
161 162
162 163 @reraise_safe_exceptions
163 164 def branches(self, wire, normal, closed):
164 165 repo = self._factory.repo(wire)
165 166 iter_branches = repo.branchmap().iterbranches()
166 167 bt = {}
167 168 for branch_name, _heads, tip, is_closed in iter_branches:
168 169 if normal and not is_closed:
169 170 bt[branch_name] = tip
170 171 if closed and is_closed:
171 172 bt[branch_name] = tip
172 173
173 174 return bt
174 175
175 176 @reraise_safe_exceptions
176 177 def bulk_request(self, wire, rev, pre_load):
177 178 result = {}
178 179 for attr in pre_load:
179 180 try:
180 181 method = self._bulk_methods[attr]
181 182 result[attr] = method(wire, rev)
182 183 except KeyError:
183 184 raise exceptions.VcsException(
184 185 'Unknown bulk attribute: "%s"' % attr)
185 186 return result
186 187
187 188 @reraise_safe_exceptions
188 189 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
189 190 baseui = self._factory._create_config(wire["config"], hooks=hooks)
190 191 clone(baseui, source, dest, noupdate=not update_after_clone)
191 192
192 193 @reraise_safe_exceptions
193 194 def commitctx(
194 195 self, wire, message, parents, commit_time, commit_timezone,
195 196 user, files, extra, removed, updated):
196 197
197 198 def _filectxfn(_repo, memctx, path):
198 199 """
199 200 Marks given path as added/changed/removed in a given _repo. This is
200 201 for internal mercurial commit function.
201 202 """
202 203
203 204 # check if this path is removed
204 205 if path in removed:
205 206 # returning None is a way to mark node for removal
206 207 return None
207 208
208 209 # check if this path is added
209 210 for node in updated:
210 211 if node['path'] == path:
211 212 return memfilectx(
212 213 _repo,
213 214 path=node['path'],
214 215 data=node['content'],
215 216 islink=False,
216 217 isexec=bool(node['mode'] & stat.S_IXUSR),
217 218 copied=False,
218 219 memctx=memctx)
219 220
220 221 raise exceptions.AbortException(
221 222 "Given path haven't been marked as added, "
222 223 "changed or removed (%s)" % path)
223 224
224 225 repo = self._factory.repo(wire)
225 226
226 227 commit_ctx = memctx(
227 228 repo=repo,
228 229 parents=parents,
229 230 text=message,
230 231 files=files,
231 232 filectxfn=_filectxfn,
232 233 user=user,
233 234 date=(commit_time, commit_timezone),
234 235 extra=extra)
235 236
236 237 n = repo.commitctx(commit_ctx)
237 238 new_id = hex(n)
238 239
239 240 return new_id
240 241
241 242 @reraise_safe_exceptions
242 243 def ctx_branch(self, wire, revision):
243 244 repo = self._factory.repo(wire)
244 245 ctx = repo[revision]
245 246 return ctx.branch()
246 247
247 248 @reraise_safe_exceptions
248 249 def ctx_children(self, wire, revision):
249 250 repo = self._factory.repo(wire)
250 251 ctx = repo[revision]
251 252 return [child.rev() for child in ctx.children()]
252 253
253 254 @reraise_safe_exceptions
254 255 def ctx_date(self, wire, revision):
255 256 repo = self._factory.repo(wire)
256 257 ctx = repo[revision]
257 258 return ctx.date()
258 259
259 260 @reraise_safe_exceptions
260 261 def ctx_description(self, wire, revision):
261 262 repo = self._factory.repo(wire)
262 263 ctx = repo[revision]
263 264 return ctx.description()
264 265
265 266 @reraise_safe_exceptions
266 267 def ctx_diff(
267 268 self, wire, revision, git=True, ignore_whitespace=True, context=3):
268 269 repo = self._factory.repo(wire)
269 270 ctx = repo[revision]
270 271 result = ctx.diff(
271 272 git=git, ignore_whitespace=ignore_whitespace, context=context)
272 273 return list(result)
273 274
274 275 @reraise_safe_exceptions
275 276 def ctx_files(self, wire, revision):
276 277 repo = self._factory.repo(wire)
277 278 ctx = repo[revision]
278 279 return ctx.files()
279 280
280 281 @reraise_safe_exceptions
281 282 def ctx_list(self, path, revision):
282 283 repo = self._factory.repo(path)
283 284 ctx = repo[revision]
284 285 return list(ctx)
285 286
286 287 @reraise_safe_exceptions
287 288 def ctx_parents(self, wire, revision):
288 289 repo = self._factory.repo(wire)
289 290 ctx = repo[revision]
290 291 return [parent.rev() for parent in ctx.parents()]
291 292
292 293 @reraise_safe_exceptions
293 294 def ctx_phase(self, wire, revision):
294 295 repo = self._factory.repo(wire)
295 296 ctx = repo[revision]
296 297 # public=0, draft=1, secret=3
297 298 return ctx.phase()
298 299
299 300 @reraise_safe_exceptions
300 301 def ctx_obsolete(self, wire, revision):
301 302 repo = self._factory.repo(wire)
302 303 ctx = repo[revision]
303 304 return ctx.obsolete()
304 305
305 306 @reraise_safe_exceptions
306 307 def ctx_hidden(self, wire, revision):
307 308 repo = self._factory.repo(wire)
308 309 ctx = repo[revision]
309 310 return ctx.hidden()
310 311
311 312 @reraise_safe_exceptions
312 313 def ctx_substate(self, wire, revision):
313 314 repo = self._factory.repo(wire)
314 315 ctx = repo[revision]
315 316 return ctx.substate
316 317
317 318 @reraise_safe_exceptions
318 319 def ctx_status(self, wire, revision):
319 320 repo = self._factory.repo(wire)
320 321 ctx = repo[revision]
321 322 status = repo[ctx.p1().node()].status(other=ctx.node())
322 323 # object of status (odd, custom named tuple in mercurial) is not
323 324 # correctly serializable, we make it a list, as the underling
324 325 # API expects this to be a list
325 326 return list(status)
326 327
327 328 @reraise_safe_exceptions
328 329 def ctx_user(self, wire, revision):
329 330 repo = self._factory.repo(wire)
330 331 ctx = repo[revision]
331 332 return ctx.user()
332 333
333 334 @reraise_safe_exceptions
334 335 def check_url(self, url, config):
335 336 _proto = None
336 337 if '+' in url[:url.find('://')]:
337 338 _proto = url[0:url.find('+')]
338 339 url = url[url.find('+') + 1:]
339 340 handlers = []
340 341 url_obj = url_parser(url)
341 342 test_uri, authinfo = url_obj.authinfo()
342 343 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
343 344 url_obj.query = obfuscate_qs(url_obj.query)
344 345
345 346 cleaned_uri = str(url_obj)
346 347 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
347 348
348 349 if authinfo:
349 350 # create a password manager
350 351 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
351 352 passmgr.add_password(*authinfo)
352 353
353 354 handlers.extend((httpbasicauthhandler(passmgr),
354 355 httpdigestauthhandler(passmgr)))
355 356
356 357 o = urllib2.build_opener(*handlers)
357 358 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
358 359 ('Accept', 'application/mercurial-0.1')]
359 360
360 361 q = {"cmd": 'between'}
361 362 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
362 363 qs = '?%s' % urllib.urlencode(q)
363 364 cu = "%s%s" % (test_uri, qs)
364 365 req = urllib2.Request(cu, None, {})
365 366
366 367 try:
367 368 log.debug("Trying to open URL %s", cleaned_uri)
368 369 resp = o.open(req)
369 370 if resp.code != 200:
370 371 raise exceptions.URLError('Return Code is not 200')
371 372 except Exception as e:
372 373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 374 # means it cannot be cloned
374 375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
375 376
376 377 # now check if it's a proper hg repo, but don't do it for svn
377 378 try:
378 379 if _proto == 'svn':
379 380 pass
380 381 else:
381 382 # check for pure hg repos
382 383 log.debug(
383 384 "Verifying if URL is a Mercurial repository: %s",
384 385 cleaned_uri)
385 386 httppeer(make_ui_from_config(config), url).lookup('tip')
386 387 except Exception as e:
387 388 log.warning("URL is not a valid Mercurial repository: %s",
388 389 cleaned_uri)
389 390 raise exceptions.URLError(
390 391 "url [%s] does not look like an hg repo org_exc: %s"
391 392 % (cleaned_uri, e))
392 393
393 394 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
394 395 return True
395 396
396 397 @reraise_safe_exceptions
397 398 def diff(
398 399 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
399 400 context):
400 401 repo = self._factory.repo(wire)
401 402
402 403 if file_filter:
403 404 match_filter = match(file_filter[0], '', [file_filter[1]])
404 405 else:
405 406 match_filter = file_filter
406 407 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
407 408
408 409 try:
409 410 return "".join(patch.diff(
410 411 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
411 412 except RepoLookupError:
412 413 raise exceptions.LookupException()
413 414
414 415 @reraise_safe_exceptions
415 416 def file_history(self, wire, revision, path, limit):
416 417 repo = self._factory.repo(wire)
417 418
418 419 ctx = repo[revision]
419 420 fctx = ctx.filectx(path)
420 421
421 422 def history_iter():
422 423 limit_rev = fctx.rev()
423 424 for obj in reversed(list(fctx.filelog())):
424 425 obj = fctx.filectx(obj)
425 426 if limit_rev >= obj.rev():
426 427 yield obj
427 428
428 429 history = []
429 430 for cnt, obj in enumerate(history_iter()):
430 431 if limit and cnt >= limit:
431 432 break
432 433 history.append(hex(obj.node()))
433 434
434 435 return [x for x in history]
435 436
436 437 @reraise_safe_exceptions
437 438 def file_history_untill(self, wire, revision, path, limit):
438 439 repo = self._factory.repo(wire)
439 440 ctx = repo[revision]
440 441 fctx = ctx.filectx(path)
441 442
442 443 file_log = list(fctx.filelog())
443 444 if limit:
444 445 # Limit to the last n items
445 446 file_log = file_log[-limit:]
446 447
447 448 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
448 449
449 450 @reraise_safe_exceptions
450 451 def fctx_annotate(self, wire, revision, path):
451 452 repo = self._factory.repo(wire)
452 453 ctx = repo[revision]
453 454 fctx = ctx.filectx(path)
454 455
455 456 result = []
456 for i, annotate_data in enumerate(fctx.annotate()):
457 for i, (a_line, content) in enumerate(fctx.annotate()):
457 458 ln_no = i + 1
458 node_info, content = annotate_data
459 sha = hex(node_info[0].node())
459 sha = hex(a_line.fctx.node())
460 460 result.append((ln_no, sha, content))
461 461 return result
462 462
463 463 @reraise_safe_exceptions
464 464 def fctx_data(self, wire, revision, path):
465 465 repo = self._factory.repo(wire)
466 466 ctx = repo[revision]
467 467 fctx = ctx.filectx(path)
468 468 return fctx.data()
469 469
470 470 @reraise_safe_exceptions
471 471 def fctx_flags(self, wire, revision, path):
472 472 repo = self._factory.repo(wire)
473 473 ctx = repo[revision]
474 474 fctx = ctx.filectx(path)
475 475 return fctx.flags()
476 476
477 477 @reraise_safe_exceptions
478 478 def fctx_size(self, wire, revision, path):
479 479 repo = self._factory.repo(wire)
480 480 ctx = repo[revision]
481 481 fctx = ctx.filectx(path)
482 482 return fctx.size()
483 483
484 484 @reraise_safe_exceptions
485 485 def get_all_commit_ids(self, wire, name):
486 486 repo = self._factory.repo(wire)
487 487 revs = repo.filtered(name).changelog.index
488 488 return map(lambda x: hex(x[7]), revs)[:-1]
489 489
490 490 @reraise_safe_exceptions
491 491 def get_config_value(self, wire, section, name, untrusted=False):
492 492 repo = self._factory.repo(wire)
493 493 return repo.ui.config(section, name, untrusted=untrusted)
494 494
495 495 @reraise_safe_exceptions
496 496 def get_config_bool(self, wire, section, name, untrusted=False):
497 497 repo = self._factory.repo(wire)
498 498 return repo.ui.configbool(section, name, untrusted=untrusted)
499 499
500 500 @reraise_safe_exceptions
501 501 def get_config_list(self, wire, section, name, untrusted=False):
502 502 repo = self._factory.repo(wire)
503 503 return repo.ui.configlist(section, name, untrusted=untrusted)
504 504
505 505 @reraise_safe_exceptions
506 506 def is_large_file(self, wire, path):
507 507 return largefiles.lfutil.isstandin(path)
508 508
509 509 @reraise_safe_exceptions
510 510 def in_largefiles_store(self, wire, sha):
511 511 repo = self._factory.repo(wire)
512 512 return largefiles.lfutil.instore(repo, sha)
513 513
514 514 @reraise_safe_exceptions
515 515 def in_user_cache(self, wire, sha):
516 516 repo = self._factory.repo(wire)
517 517 return largefiles.lfutil.inusercache(repo.ui, sha)
518 518
519 519 @reraise_safe_exceptions
520 520 def store_path(self, wire, sha):
521 521 repo = self._factory.repo(wire)
522 522 return largefiles.lfutil.storepath(repo, sha)
523 523
524 524 @reraise_safe_exceptions
525 525 def link(self, wire, sha, path):
526 526 repo = self._factory.repo(wire)
527 527 largefiles.lfutil.link(
528 528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
529 529
530 530 @reraise_safe_exceptions
531 531 def localrepository(self, wire, create=False):
532 532 self._factory.repo(wire, create=create)
533 533
534 534 @reraise_safe_exceptions
535 535 def lookup(self, wire, revision, both):
536 536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
537 537 if isinstance(revision, float):
538 538 revision = long(revision)
539 539 repo = self._factory.repo(wire)
540 540 try:
541 541 ctx = repo[revision]
542 542 except RepoLookupError:
543 543 raise exceptions.LookupException(revision)
544 544 except LookupError as e:
545 545 raise exceptions.LookupException(e.name)
546 546
547 547 if not both:
548 548 return ctx.hex()
549 549
550 550 ctx = repo[ctx.hex()]
551 551 return ctx.hex(), ctx.rev()
552 552
553 553 @reraise_safe_exceptions
554 554 def pull(self, wire, url, commit_ids=None):
555 555 repo = self._factory.repo(wire)
556 556 remote = peer(repo, {}, url)
557 557 if commit_ids:
558 558 commit_ids = [bin(commit_id) for commit_id in commit_ids]
559 559
560 560 return exchange.pull(
561 561 repo, remote, heads=commit_ids, force=None).cgresult
562 562
563 563 @reraise_safe_exceptions
564 def sync_push(self, wire, url):
565 if self.check_url(url, wire['config']):
566 repo = self._factory.repo(wire)
567 bookmarks = dict(repo._bookmarks).keys()
568 remote = peer(repo, {}, url)
569 return exchange.push(
570 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
571
572 @reraise_safe_exceptions
564 573 def revision(self, wire, rev):
565 574 repo = self._factory.repo(wire)
566 575 ctx = repo[rev]
567 576 return ctx.rev()
568 577
569 578 @reraise_safe_exceptions
570 579 def rev_range(self, wire, filter):
571 580 repo = self._factory.repo(wire)
572 581 revisions = [rev for rev in revrange(repo, filter)]
573 582 return revisions
574 583
575 584 @reraise_safe_exceptions
576 585 def rev_range_hash(self, wire, node):
577 586 repo = self._factory.repo(wire)
578 587
579 588 def get_revs(repo, rev_opt):
580 589 if rev_opt:
581 590 revs = revrange(repo, rev_opt)
582 591 if len(revs) == 0:
583 592 return (nullrev, nullrev)
584 593 return max(revs), min(revs)
585 594 else:
586 595 return len(repo) - 1, 0
587 596
588 597 stop, start = get_revs(repo, [node + ':'])
589 598 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
590 599 return revs
591 600
592 601 @reraise_safe_exceptions
593 602 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
594 603 other_path = kwargs.pop('other_path', None)
595 604
596 605 # case when we want to compare two independent repositories
597 606 if other_path and other_path != wire["path"]:
598 607 baseui = self._factory._create_config(wire["config"])
599 608 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
600 609 else:
601 610 repo = self._factory.repo(wire)
602 611 return list(repo.revs(rev_spec, *args))
603 612
604 613 @reraise_safe_exceptions
605 614 def strip(self, wire, revision, update, backup):
606 615 repo = self._factory.repo(wire)
607 616 ctx = repo[revision]
608 617 hgext_strip(
609 618 repo.baseui, repo, ctx.node(), update=update, backup=backup)
610 619
611 620 @reraise_safe_exceptions
612 621 def verify(self, wire,):
613 622 repo = self._factory.repo(wire)
614 623 baseui = self._factory._create_config(wire['config'])
615 624 baseui.setconfig('ui', 'quiet', 'false')
616 625 output = io.BytesIO()
617 626
618 627 def write(data, **unused_kwargs):
619 628 output.write(data)
620 629 baseui.write = write
621 630
622 631 repo.ui = baseui
623 632 verify.verify(repo)
624 633 return output.getvalue()
625 634
626 635 @reraise_safe_exceptions
627 636 def tag(self, wire, name, revision, message, local, user,
628 637 tag_time, tag_timezone):
629 638 repo = self._factory.repo(wire)
630 639 ctx = repo[revision]
631 640 node = ctx.node()
632 641
633 642 date = (tag_time, tag_timezone)
634 643 try:
635 644 hg_tag.tag(repo, name, node, message, local, user, date)
636 645 except Abort as e:
637 646 log.exception("Tag operation aborted")
638 647 # Exception can contain unicode which we convert
639 648 raise exceptions.AbortException(repr(e))
640 649
641 650 @reraise_safe_exceptions
642 651 def tags(self, wire):
643 652 repo = self._factory.repo(wire)
644 653 return repo.tags()
645 654
646 655 @reraise_safe_exceptions
647 656 def update(self, wire, node=None, clean=False):
648 657 repo = self._factory.repo(wire)
649 658 baseui = self._factory._create_config(wire['config'])
650 659 commands.update(baseui, repo, node=node, clean=clean)
651 660
652 661 @reraise_safe_exceptions
653 662 def identify(self, wire):
654 663 repo = self._factory.repo(wire)
655 664 baseui = self._factory._create_config(wire['config'])
656 665 output = io.BytesIO()
657 666 baseui.write = output.write
658 667 # This is required to get a full node id
659 668 baseui.debugflag = True
660 669 commands.identify(baseui, repo, id=True)
661 670
662 671 return output.getvalue()
663 672
664 673 @reraise_safe_exceptions
665 674 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
666 675 hooks=True):
667 676 repo = self._factory.repo(wire)
668 677 baseui = self._factory._create_config(wire['config'], hooks=hooks)
669 678
670 679 # Mercurial internally has a lot of logic that checks ONLY if
671 680 # option is defined, we just pass those if they are defined then
672 681 opts = {}
673 682 if bookmark:
674 683 opts['bookmark'] = bookmark
675 684 if branch:
676 685 opts['branch'] = branch
677 686 if revision:
678 687 opts['rev'] = revision
679 688
680 689 commands.pull(baseui, repo, source, **opts)
681 690
682 691 @reraise_safe_exceptions
683 692 def heads(self, wire, branch=None):
684 693 repo = self._factory.repo(wire)
685 694 baseui = self._factory._create_config(wire['config'])
686 695 output = io.BytesIO()
687 696
688 697 def write(data, **unused_kwargs):
689 698 output.write(data)
690 699
691 700 baseui.write = write
692 701 if branch:
693 702 args = [branch]
694 703 else:
695 704 args = []
696 705 commands.heads(baseui, repo, template='{node} ', *args)
697 706
698 707 return output.getvalue()
699 708
700 709 @reraise_safe_exceptions
701 710 def ancestor(self, wire, revision1, revision2):
702 711 repo = self._factory.repo(wire)
703 712 changelog = repo.changelog
704 713 lookup = repo.lookup
705 714 a = changelog.ancestor(lookup(revision1), lookup(revision2))
706 715 return hex(a)
707 716
708 717 @reraise_safe_exceptions
709 718 def push(self, wire, revisions, dest_path, hooks=True,
710 719 push_branches=False):
711 720 repo = self._factory.repo(wire)
712 721 baseui = self._factory._create_config(wire['config'], hooks=hooks)
713 722 commands.push(baseui, repo, dest=dest_path, rev=revisions,
714 723 new_branch=push_branches)
715 724
716 725 @reraise_safe_exceptions
717 726 def merge(self, wire, revision):
718 727 repo = self._factory.repo(wire)
719 728 baseui = self._factory._create_config(wire['config'])
720 729 repo.ui.setconfig('ui', 'merge', 'internal:dump')
721 730
722 731 # In case of sub repositories are used mercurial prompts the user in
723 732 # case of merge conflicts or different sub repository sources. By
724 733 # setting the interactive flag to `False` mercurial doesn't prompt the
725 734 # used but instead uses a default value.
726 735 repo.ui.setconfig('ui', 'interactive', False)
727 736
728 737 commands.merge(baseui, repo, rev=revision)
729 738
730 739 @reraise_safe_exceptions
731 740 def commit(self, wire, message, username, close_branch=False):
732 741 repo = self._factory.repo(wire)
733 742 baseui = self._factory._create_config(wire['config'])
734 743 repo.ui.setconfig('ui', 'username', username)
735 744 commands.commit(baseui, repo, message=message, close_branch=close_branch)
736 745
737 746 @reraise_safe_exceptions
738 747 def rebase(self, wire, source=None, dest=None, abort=False):
739 748 repo = self._factory.repo(wire)
740 749 baseui = self._factory._create_config(wire['config'])
741 750 repo.ui.setconfig('ui', 'merge', 'internal:dump')
742 751 rebase.rebase(
743 752 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
744 753
745 754 @reraise_safe_exceptions
746 755 def bookmark(self, wire, bookmark, revision=None):
747 756 repo = self._factory.repo(wire)
748 757 baseui = self._factory._create_config(wire['config'])
749 758 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,63 +1,63 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Mercurial libs compatibility
20 20 """
21 21
22 22 import mercurial
23 import mercurial.demandimport
23 from mercurial import demandimport
24 24 # patch demandimport, due to bug in mercurial when it always triggers
25 25 # demandimport.enable()
26 mercurial.demandimport.enable = lambda *args, **kwargs: 1
26 demandimport.enable = lambda *args, **kwargs: 1
27 27
28 28 from mercurial import ui
29 29 from mercurial import patch
30 30 from mercurial import config
31 31 from mercurial import extensions
32 32 from mercurial import scmutil
33 33 from mercurial import archival
34 34 from mercurial import discovery
35 35 from mercurial import unionrepo
36 36 from mercurial import localrepo
37 37 from mercurial import merge as hg_merge
38 38 from mercurial import subrepo
39 39 from mercurial import tags as hg_tag
40 40
41 41 from mercurial.commands import clone, nullid, pull
42 42 from mercurial.context import memctx, memfilectx
43 43 from mercurial.error import (
44 44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
45 45 RequirementError)
46 46 from mercurial.hgweb import hgweb_mod
47 47 from mercurial.localrepo import localrepository
48 48 from mercurial.match import match
49 49 from mercurial.mdiff import diffopts
50 50 from mercurial.node import bin, hex
51 51 from mercurial.encoding import tolocal
52 52 from mercurial.discovery import findcommonoutgoing
53 53 from mercurial.hg import peer
54 54 from mercurial.httppeer import httppeer
55 55 from mercurial.util import url as hg_url
56 56 from mercurial.scmutil import revrange
57 57 from mercurial.node import nullrev
58 58 from mercurial import exchange
59 59 from hgext import largefiles
60 60
61 61 # those authnadlers are patched for python 2.6.5 bug an
62 62 # infinit looping when given invalid resources
63 63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
@@ -1,134 +1,134 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Adjustments to Mercurial
20 20
21 21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 22 be applied without having to import the whole Mercurial machinery.
23 23
24 24 Imports are function local, so that just importing this module does not cause
25 25 side-effects other than these functions being defined.
26 26 """
27 27
28 28 import logging
29 29
30 30
31 31 def patch_largefiles_capabilities():
32 32 """
33 33 Patches the capabilities function in the largefiles extension.
34 34 """
35 35 from vcsserver import hgcompat
36 36 lfproto = hgcompat.largefiles.proto
37 37 wrapper = _dynamic_capabilities_wrapper(
38 38 lfproto, hgcompat.extensions.extensions)
39 39 lfproto.capabilities = wrapper
40 40
41 41
42 42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43 43
44 44 wrapped_capabilities = lfproto.capabilities
45 45 logger = logging.getLogger('vcsserver.hg')
46 46
47 47 def _dynamic_capabilities(repo, proto):
48 48 """
49 49 Adds dynamic behavior, so that the capability is only added if the
50 50 extension is enabled in the current ui object.
51 51 """
52 52 if 'largefiles' in dict(extensions(repo.ui)):
53 53 logger.debug('Extension largefiles enabled')
54 54 calc_capabilities = wrapped_capabilities
55 55 else:
56 56 logger.debug('Extension largefiles disabled')
57 57 calc_capabilities = lfproto.capabilitiesorig
58 58 return calc_capabilities(repo, proto)
59 59
60 60 return _dynamic_capabilities
61 61
62 62
63 63 def patch_subrepo_type_mapping():
64 64 from collections import defaultdict
65 65 from hgcompat import subrepo
66 66 from exceptions import SubrepoMergeException
67 67
68 68 class NoOpSubrepo(subrepo.abstractsubrepo):
69 69
70 70 def __init__(self, ctx, path, *args, **kwargs):
71 71 """Initialize abstractsubrepo part
72 72
73 73 ``ctx`` is the context referring this subrepository in the
74 74 parent repository.
75 75
76 76 ``path`` is the path to this subrepository as seen from
77 77 innermost repository.
78 78 """
79 79 self.ui = ctx.repo().ui
80 80 self._ctx = ctx
81 81 self._path = path
82 82
83 83 def storeclean(self, path):
84 84 """
85 85 returns true if the repository has not changed since it was last
86 86 cloned from or pushed to a given repository.
87 87 """
88 88 return True
89 89
90 90 def dirty(self, ignoreupdate=False):
91 91 """returns true if the dirstate of the subrepo is dirty or does not
92 92 match current stored state. If ignoreupdate is true, only check
93 93 whether the subrepo has uncommitted changes in its dirstate.
94 94 """
95 95 return False
96 96
97 97 def basestate(self):
98 98 """current working directory base state, disregarding .hgsubstate
99 99 state and working directory modifications"""
100 100 substate = subrepo.state(self._ctx, self.ui)
101 101 file_system_path, rev, repotype = substate.get(self._path)
102 102 return rev
103 103
104 104 def remove(self):
105 105 """remove the subrepo
106 106
107 107 (should verify the dirstate is not dirty first)
108 108 """
109 109 pass
110 110
111 111 def get(self, state, overwrite=False):
112 112 """run whatever commands are needed to put the subrepo into
113 113 this state
114 114 """
115 115 pass
116 116
117 117 def merge(self, state):
118 118 """merge currently-saved state with the new state."""
119 119 raise SubrepoMergeException()
120 120
121 121 def push(self, opts):
122 122 """perform whatever action is analogous to 'hg push'
123 123
124 124 This may be a no-op on some systems.
125 125 """
126 126 pass
127 127
128 128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 129 # whenever a subrepo class is looked up.
130 130 subrepo.types = {
131 131 'hg': NoOpSubrepo,
132 132 'git': NoOpSubrepo,
133 133 'svn': NoOpSubrepo
134 134 }
@@ -1,482 +1,482 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2017 RodeCode GmbH
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import json
24 24 import logging
25 25 import collections
26 26 import importlib
27 27 import subprocess
28 28
29 29 from httplib import HTTPConnection
30 30
31 31
32 32 import mercurial.scmutil
33 33 import mercurial.node
34 34 import simplejson as json
35 35
36 36 from vcsserver import exceptions
37 37
38 38 log = logging.getLogger(__name__)
39 39
40 40
41 41 class HooksHttpClient(object):
42 42 connection = None
43 43
44 44 def __init__(self, hooks_uri):
45 45 self.hooks_uri = hooks_uri
46 46
47 47 def __call__(self, method, extras):
48 48 connection = HTTPConnection(self.hooks_uri)
49 49 body = self._serialize(method, extras)
50 50 connection.request('POST', '/', body)
51 51 response = connection.getresponse()
52 52 return json.loads(response.read())
53 53
54 54 def _serialize(self, hook_name, extras):
55 55 data = {
56 56 'method': hook_name,
57 57 'extras': extras
58 58 }
59 59 return json.dumps(data)
60 60
61 61
62 62 class HooksDummyClient(object):
63 63 def __init__(self, hooks_module):
64 64 self._hooks_module = importlib.import_module(hooks_module)
65 65
66 66 def __call__(self, hook_name, extras):
67 67 with self._hooks_module.Hooks() as hooks:
68 68 return getattr(hooks, hook_name)(extras)
69 69
70 70
71 71 class RemoteMessageWriter(object):
72 72 """Writer base class."""
73 73 def write(self, message):
74 74 raise NotImplementedError()
75 75
76 76
77 77 class HgMessageWriter(RemoteMessageWriter):
78 78 """Writer that knows how to send messages to mercurial clients."""
79 79
80 80 def __init__(self, ui):
81 81 self.ui = ui
82 82
83 83 def write(self, message):
84 84 # TODO: Check why the quiet flag is set by default.
85 85 old = self.ui.quiet
86 86 self.ui.quiet = False
87 87 self.ui.status(message.encode('utf-8'))
88 88 self.ui.quiet = old
89 89
90 90
91 91 class GitMessageWriter(RemoteMessageWriter):
92 92 """Writer that knows how to send messages to git clients."""
93 93
94 94 def __init__(self, stdout=None):
95 95 self.stdout = stdout or sys.stdout
96 96
97 97 def write(self, message):
98 98 self.stdout.write(message.encode('utf-8'))
99 99
100 100
101 101 def _handle_exception(result):
102 102 exception_class = result.get('exception')
103 103 exception_traceback = result.get('exception_traceback')
104 104
105 105 if exception_traceback:
106 106 log.error('Got traceback from remote call:%s', exception_traceback)
107 107
108 108 if exception_class == 'HTTPLockedRC':
109 109 raise exceptions.RepositoryLockedException(*result['exception_args'])
110 110 elif exception_class == 'RepositoryError':
111 111 raise exceptions.VcsException(*result['exception_args'])
112 112 elif exception_class:
113 113 raise Exception('Got remote exception "%s" with args "%s"' %
114 114 (exception_class, result['exception_args']))
115 115
116 116
117 117 def _get_hooks_client(extras):
118 118 if 'hooks_uri' in extras:
119 119 protocol = extras.get('hooks_protocol')
120 120 return HooksHttpClient(extras['hooks_uri'])
121 121 else:
122 122 return HooksDummyClient(extras['hooks_module'])
123 123
124 124
125 125 def _call_hook(hook_name, extras, writer):
126 126 hooks = _get_hooks_client(extras)
127 127 result = hooks(hook_name, extras)
128 128 log.debug('Hooks got result: %s', result)
129 129 writer.write(result['output'])
130 130 _handle_exception(result)
131 131
132 132 return result['status']
133 133
134 134
135 135 def _extras_from_ui(ui):
136 136 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
137 137 if not hook_data:
138 138 # maybe it's inside environ ?
139 139 env_hook_data = os.environ.get('RC_SCM_DATA')
140 140 if env_hook_data:
141 141 hook_data = env_hook_data
142 142
143 143 extras = {}
144 144 if hook_data:
145 145 extras = json.loads(hook_data)
146 146 return extras
147 147
148 148
149 149 def _rev_range_hash(repo, node):
150 150
151 151 commits = []
152 152 for rev in xrange(repo[node], len(repo)):
153 153 ctx = repo[rev]
154 154 commit_id = mercurial.node.hex(ctx.node())
155 155 branch = ctx.branch()
156 156 commits.append((commit_id, branch))
157 157
158 158 return commits
159 159
160 160
161 161 def repo_size(ui, repo, **kwargs):
162 162 extras = _extras_from_ui(ui)
163 163 return _call_hook('repo_size', extras, HgMessageWriter(ui))
164 164
165 165
166 166 def pre_pull(ui, repo, **kwargs):
167 167 extras = _extras_from_ui(ui)
168 168 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
169 169
170 170
171 171 def pre_pull_ssh(ui, repo, **kwargs):
172 172 extras = _extras_from_ui(ui)
173 173 if extras and extras.get('SSH'):
174 174 return pre_pull(ui, repo, **kwargs)
175 175 return 0
176 176
177 177
178 178 def post_pull(ui, repo, **kwargs):
179 179 extras = _extras_from_ui(ui)
180 180 return _call_hook('post_pull', extras, HgMessageWriter(ui))
181 181
182 182
183 183 def post_pull_ssh(ui, repo, **kwargs):
184 184 extras = _extras_from_ui(ui)
185 185 if extras and extras.get('SSH'):
186 186 return post_pull(ui, repo, **kwargs)
187 187 return 0
188 188
189 189
190 190 def pre_push(ui, repo, node=None, **kwargs):
191 191 extras = _extras_from_ui(ui)
192 192
193 193 rev_data = []
194 194 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
195 195 branches = collections.defaultdict(list)
196 196 for commit_id, branch in _rev_range_hash(repo, node):
197 197 branches[branch].append(commit_id)
198 198
199 199 for branch, commits in branches.iteritems():
200 200 old_rev = kwargs.get('node_last') or commits[0]
201 201 rev_data.append({
202 202 'old_rev': old_rev,
203 203 'new_rev': commits[-1],
204 204 'ref': '',
205 205 'type': 'branch',
206 206 'name': branch,
207 207 })
208 208
209 209 extras['commit_ids'] = rev_data
210 210 return _call_hook('pre_push', extras, HgMessageWriter(ui))
211 211
212 212
213 213 def pre_push_ssh(ui, repo, node=None, **kwargs):
214 214 if _extras_from_ui(ui).get('SSH'):
215 215 return pre_push(ui, repo, node, **kwargs)
216 216
217 217 return 0
218 218
219 219
220 220 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
221 221 extras = _extras_from_ui(ui)
222 222 if extras.get('SSH'):
223 223 permission = extras['SSH_PERMISSIONS']
224 224
225 225 if 'repository.write' == permission or 'repository.admin' == permission:
226 226 return 0
227 227
228 228 # non-zero ret code
229 229 return 1
230 230
231 231 return 0
232 232
233 233
234 234 def post_push(ui, repo, node, **kwargs):
235 235 extras = _extras_from_ui(ui)
236 236
237 237 commit_ids = []
238 238 branches = []
239 239 bookmarks = []
240 240 tags = []
241 241
242 242 for commit_id, branch in _rev_range_hash(repo, node):
243 243 commit_ids.append(commit_id)
244 244 if branch not in branches:
245 245 branches.append(branch)
246 246
247 247 if hasattr(ui, '_rc_pushkey_branches'):
248 248 bookmarks = ui._rc_pushkey_branches
249 249
250 250 extras['commit_ids'] = commit_ids
251 251 extras['new_refs'] = {
252 252 'branches': branches,
253 253 'bookmarks': bookmarks,
254 254 'tags': tags
255 255 }
256 256
257 257 return _call_hook('post_push', extras, HgMessageWriter(ui))
258 258
259 259
260 260 def post_push_ssh(ui, repo, node, **kwargs):
261 261 if _extras_from_ui(ui).get('SSH'):
262 262 return post_push(ui, repo, node, **kwargs)
263 263 return 0
264 264
265 265
266 266 def key_push(ui, repo, **kwargs):
267 267 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
268 268 # store new bookmarks in our UI object propagated later to post_push
269 269 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
270 270 return
271 271
272 272
273 273 # backward compat
274 274 log_pull_action = post_pull
275 275
276 276 # backward compat
277 277 log_push_action = post_push
278 278
279 279
280 280 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
281 281 """
282 282 Old hook name: keep here for backward compatibility.
283 283
284 284 This is only required when the installed git hooks are not upgraded.
285 285 """
286 286 pass
287 287
288 288
289 289 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
290 290 """
291 291 Old hook name: keep here for backward compatibility.
292 292
293 293 This is only required when the installed git hooks are not upgraded.
294 294 """
295 295 pass
296 296
297 297
298 298 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
299 299
300 300
301 301 def git_pre_pull(extras):
302 302 """
303 303 Pre pull hook.
304 304
305 305 :param extras: dictionary containing the keys defined in simplevcs
306 306 :type extras: dict
307 307
308 308 :return: status code of the hook. 0 for success.
309 309 :rtype: int
310 310 """
311 311 if 'pull' not in extras['hooks']:
312 312 return HookResponse(0, '')
313 313
314 314 stdout = io.BytesIO()
315 315 try:
316 316 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
317 317 except Exception as error:
318 318 status = 128
319 319 stdout.write('ERROR: %s\n' % str(error))
320 320
321 321 return HookResponse(status, stdout.getvalue())
322 322
323 323
324 324 def git_post_pull(extras):
325 325 """
326 326 Post pull hook.
327 327
328 328 :param extras: dictionary containing the keys defined in simplevcs
329 329 :type extras: dict
330 330
331 331 :return: status code of the hook. 0 for success.
332 332 :rtype: int
333 333 """
334 334 if 'pull' not in extras['hooks']:
335 335 return HookResponse(0, '')
336 336
337 337 stdout = io.BytesIO()
338 338 try:
339 339 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
340 340 except Exception as error:
341 341 status = 128
342 342 stdout.write('ERROR: %s\n' % error)
343 343
344 344 return HookResponse(status, stdout.getvalue())
345 345
346 346
347 347 def _parse_git_ref_lines(revision_lines):
348 348 rev_data = []
349 349 for revision_line in revision_lines or []:
350 350 old_rev, new_rev, ref = revision_line.strip().split(' ')
351 351 ref_data = ref.split('/', 2)
352 352 if ref_data[1] in ('tags', 'heads'):
353 353 rev_data.append({
354 354 'old_rev': old_rev,
355 355 'new_rev': new_rev,
356 356 'ref': ref,
357 357 'type': ref_data[1],
358 358 'name': ref_data[2],
359 359 })
360 360 return rev_data
361 361
362 362
363 363 def git_pre_receive(unused_repo_path, revision_lines, env):
364 364 """
365 365 Pre push hook.
366 366
367 367 :param extras: dictionary containing the keys defined in simplevcs
368 368 :type extras: dict
369 369
370 370 :return: status code of the hook. 0 for success.
371 371 :rtype: int
372 372 """
373 373 extras = json.loads(env['RC_SCM_DATA'])
374 374 rev_data = _parse_git_ref_lines(revision_lines)
375 375 if 'push' not in extras['hooks']:
376 376 return 0
377 377 extras['commit_ids'] = rev_data
378 378 return _call_hook('pre_push', extras, GitMessageWriter())
379 379
380 380
381 381 def _run_command(arguments):
382 382 """
383 383 Run the specified command and return the stdout.
384 384
385 385 :param arguments: sequence of program arguments (including the program name)
386 386 :type arguments: list[str]
387 387 """
388 388 # TODO(skreft): refactor this method and all the other similar ones.
389 389 # Probably this should be using subprocessio.
390 390 process = subprocess.Popen(
391 391 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
392 392 stdout, stderr = process.communicate()
393 393
394 394 if process.returncode != 0:
395 395 raise Exception(
396 396 'Command %s exited with exit code %s: stderr:%s' % (
397 397 arguments, process.returncode, stderr))
398 398
399 399 return stdout
400 400
401 401
402 402 def git_post_receive(unused_repo_path, revision_lines, env):
403 403 """
404 404 Post push hook.
405 405
406 406 :param extras: dictionary containing the keys defined in simplevcs
407 407 :type extras: dict
408 408
409 409 :return: status code of the hook. 0 for success.
410 410 :rtype: int
411 411 """
412 412 extras = json.loads(env['RC_SCM_DATA'])
413 413 if 'push' not in extras['hooks']:
414 414 return 0
415 415
416 416 rev_data = _parse_git_ref_lines(revision_lines)
417 417
418 418 git_revs = []
419 419
420 420 # N.B.(skreft): it is ok to just call git, as git before calling a
421 421 # subcommand sets the PATH environment variable so that it point to the
422 422 # correct version of the git executable.
423 423 empty_commit_id = '0' * 40
424 424 branches = []
425 425 tags = []
426 426 for push_ref in rev_data:
427 427 type_ = push_ref['type']
428 428
429 429 if type_ == 'heads':
430 430 if push_ref['old_rev'] == empty_commit_id:
431 431 # starting new branch case
432 432 if push_ref['name'] not in branches:
433 433 branches.append(push_ref['name'])
434 434
435 435 # Fix up head revision if needed
436 436 cmd = ['git', 'show', 'HEAD']
437 437 try:
438 438 _run_command(cmd)
439 439 except Exception:
440 440 cmd = ['git', 'symbolic-ref', 'HEAD',
441 441 'refs/heads/%s' % push_ref['name']]
442 442 print("Setting default branch to %s" % push_ref['name'])
443 443 _run_command(cmd)
444 444
445 445 cmd = ['git', 'for-each-ref', '--format=%(refname)',
446 446 'refs/heads/*']
447 447 heads = _run_command(cmd)
448 448 heads = heads.replace(push_ref['ref'], '')
449 449 heads = ' '.join(head for head in heads.splitlines() if head)
450 450 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
451 451 '--', push_ref['new_rev'], '--not', heads]
452 452 git_revs.extend(_run_command(cmd).splitlines())
453 453 elif push_ref['new_rev'] == empty_commit_id:
454 454 # delete branch case
455 455 git_revs.append('delete_branch=>%s' % push_ref['name'])
456 456 else:
457 457 if push_ref['name'] not in branches:
458 458 branches.append(push_ref['name'])
459 459
460 460 cmd = ['git', 'log',
461 461 '{old_rev}..{new_rev}'.format(**push_ref),
462 462 '--reverse', '--pretty=format:%H']
463 463 git_revs.extend(_run_command(cmd).splitlines())
464 464 elif type_ == 'tags':
465 465 if push_ref['name'] not in tags:
466 466 tags.append(push_ref['name'])
467 467 git_revs.append('tag=>%s' % push_ref['name'])
468 468
469 469 extras['commit_ids'] = git_revs
470 470 extras['new_refs'] = {
471 471 'branches': branches,
472 472 'bookmarks': [],
473 473 'tags': tags,
474 474 }
475 475
476 476 if 'repo_size' in extras['hooks']:
477 477 try:
478 478 _call_hook('repo_size', extras, GitMessageWriter())
479 479 except:
480 480 pass
481 481
482 482 return _call_hook('post_push', extras, GitMessageWriter())
@@ -1,476 +1,478 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import base64
19 19 import locale
20 20 import logging
21 21 import uuid
22 22 import wsgiref.util
23 23 import traceback
24 24 from itertools import chain
25 25
26 26 import simplejson as json
27 27 import msgpack
28 28 from beaker.cache import CacheManager
29 29 from beaker.util import parse_cache_config_options
30 30 from pyramid.config import Configurator
31 31 from pyramid.wsgi import wsgiapp
32 32
33 33 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
34 34 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
35 35 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
36 36 from vcsserver.echo_stub.echo_app import EchoApp
37 37 from vcsserver.exceptions import HTTPRepoLocked
38 38 from vcsserver.server import VcsServer
39 39
40 40 try:
41 41 from vcsserver.git import GitFactory, GitRemote
42 42 except ImportError:
43 43 GitFactory = None
44 44 GitRemote = None
45 45
46 46 try:
47 47 from vcsserver.hg import MercurialFactory, HgRemote
48 48 except ImportError:
49 49 MercurialFactory = None
50 50 HgRemote = None
51 51
52 52 try:
53 53 from vcsserver.svn import SubversionFactory, SvnRemote
54 54 except ImportError:
55 55 SubversionFactory = None
56 56 SvnRemote = None
57 57
58 58 log = logging.getLogger(__name__)
59 59
60 60
61 61 def _is_request_chunked(environ):
62 62 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
63 63 return stream
64 64
65 65
66 66 class VCS(object):
67 67 def __init__(self, locale=None, cache_config=None):
68 68 self.locale = locale
69 69 self.cache_config = cache_config
70 70 self._configure_locale()
71 71 self._initialize_cache()
72 72
73 73 if GitFactory and GitRemote:
74 74 git_repo_cache = self.cache.get_cache_region(
75 75 'git', region='repo_object')
76 76 git_factory = GitFactory(git_repo_cache)
77 77 self._git_remote = GitRemote(git_factory)
78 78 else:
79 79 log.info("Git client import failed")
80 80
81 81 if MercurialFactory and HgRemote:
82 82 hg_repo_cache = self.cache.get_cache_region(
83 83 'hg', region='repo_object')
84 84 hg_factory = MercurialFactory(hg_repo_cache)
85 85 self._hg_remote = HgRemote(hg_factory)
86 86 else:
87 87 log.info("Mercurial client import failed")
88 88
89 89 if SubversionFactory and SvnRemote:
90 90 svn_repo_cache = self.cache.get_cache_region(
91 91 'svn', region='repo_object')
92 92 svn_factory = SubversionFactory(svn_repo_cache)
93 93 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
94 94 else:
95 95 log.info("Subversion client import failed")
96 96
97 97 self._vcsserver = VcsServer()
98 98
99 99 def _initialize_cache(self):
100 100 cache_config = parse_cache_config_options(self.cache_config)
101 101 log.info('Initializing beaker cache: %s' % cache_config)
102 102 self.cache = CacheManager(**cache_config)
103 103
104 104 def _configure_locale(self):
105 105 if self.locale:
106 106 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
107 107 else:
108 108 log.info(
109 109 'Configuring locale subsystem based on environment variables')
110 110 try:
111 111 # If self.locale is the empty string, then the locale
112 112 # module will use the environment variables. See the
113 113 # documentation of the package `locale`.
114 114 locale.setlocale(locale.LC_ALL, self.locale)
115 115
116 116 language_code, encoding = locale.getlocale()
117 117 log.info(
118 118 'Locale set to language code "%s" with encoding "%s".',
119 119 language_code, encoding)
120 120 except locale.Error:
121 121 log.exception(
122 122 'Cannot set locale, not configuring the locale system')
123 123
124 124
125 125 class WsgiProxy(object):
126 126 def __init__(self, wsgi):
127 127 self.wsgi = wsgi
128 128
129 129 def __call__(self, environ, start_response):
130 130 input_data = environ['wsgi.input'].read()
131 131 input_data = msgpack.unpackb(input_data)
132 132
133 133 error = None
134 134 try:
135 135 data, status, headers = self.wsgi.handle(
136 136 input_data['environment'], input_data['input_data'],
137 137 *input_data['args'], **input_data['kwargs'])
138 138 except Exception as e:
139 139 data, status, headers = [], None, None
140 140 error = {
141 141 'message': str(e),
142 142 '_vcs_kind': getattr(e, '_vcs_kind', None)
143 143 }
144 144
145 145 start_response(200, {})
146 146 return self._iterator(error, status, headers, data)
147 147
148 148 def _iterator(self, error, status, headers, data):
149 149 initial_data = [
150 150 error,
151 151 status,
152 152 headers,
153 153 ]
154 154
155 155 for d in chain(initial_data, data):
156 156 yield msgpack.packb(d)
157 157
158 158
159 159 class HTTPApplication(object):
160 160 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
161 161
162 162 remote_wsgi = remote_wsgi
163 163 _use_echo_app = False
164 164
165 165 def __init__(self, settings=None, global_config=None):
166 166 self.config = Configurator(settings=settings)
167 167 self.global_config = global_config
168 168
169 169 locale = settings.get('locale', '') or 'en_US.UTF-8'
170 170 vcs = VCS(locale=locale, cache_config=settings)
171 171 self._remotes = {
172 172 'hg': vcs._hg_remote,
173 173 'git': vcs._git_remote,
174 174 'svn': vcs._svn_remote,
175 175 'server': vcs._vcsserver,
176 176 }
177 177 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
178 178 self._use_echo_app = True
179 179 log.warning("Using EchoApp for VCS operations.")
180 180 self.remote_wsgi = remote_wsgi_stub
181 181 self._configure_settings(settings)
182 182 self._configure()
183 183
184 184 def _configure_settings(self, app_settings):
185 185 """
186 186 Configure the settings module.
187 187 """
188 188 git_path = app_settings.get('git_path', None)
189 189 if git_path:
190 190 settings.GIT_EXECUTABLE = git_path
191 191
192 192 def _configure(self):
193 193 self.config.add_renderer(
194 194 name='msgpack',
195 195 factory=self._msgpack_renderer_factory)
196 196
197 197 self.config.add_route('service', '/_service')
198 198 self.config.add_route('status', '/status')
199 199 self.config.add_route('hg_proxy', '/proxy/hg')
200 200 self.config.add_route('git_proxy', '/proxy/git')
201 201 self.config.add_route('vcs', '/{backend}')
202 202 self.config.add_route('stream_git', '/stream/git/*repo_name')
203 203 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
204 204
205 205 self.config.add_view(
206 206 self.status_view, route_name='status', renderer='json')
207 207 self.config.add_view(
208 208 self.service_view, route_name='service', renderer='msgpack')
209 209
210 210 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
211 211 self.config.add_view(self.git_proxy(), route_name='git_proxy')
212 212 self.config.add_view(
213 213 self.vcs_view, route_name='vcs', renderer='msgpack',
214 214 custom_predicates=[self.is_vcs_view])
215 215
216 216 self.config.add_view(self.hg_stream(), route_name='stream_hg')
217 217 self.config.add_view(self.git_stream(), route_name='stream_git')
218 218
219 219 def notfound(request):
220 220 return {'status': '404 NOT FOUND'}
221 221 self.config.add_notfound_view(notfound, renderer='json')
222 222
223 223 self.config.add_view(self.handle_vcs_exception, context=Exception)
224 224
225 225 self.config.add_tween(
226 226 'vcsserver.tweens.RequestWrapperTween',
227 227 )
228 228
229 229 def wsgi_app(self):
230 230 return self.config.make_wsgi_app()
231 231
232 232 def vcs_view(self, request):
233 233 remote = self._remotes[request.matchdict['backend']]
234 234 payload = msgpack.unpackb(request.body, use_list=True)
235 235 method = payload.get('method')
236 236 params = payload.get('params')
237 237 wire = params.get('wire')
238 238 args = params.get('args')
239 239 kwargs = params.get('kwargs')
240 240 if wire:
241 241 try:
242 242 wire['context'] = uuid.UUID(wire['context'])
243 243 except KeyError:
244 244 pass
245 245 args.insert(0, wire)
246 246
247 247 log.debug('method called:%s with kwargs:%s', method, kwargs)
248 248 try:
249 249 resp = getattr(remote, method)(*args, **kwargs)
250 250 except Exception as e:
251 251 tb_info = traceback.format_exc()
252 252
253 253 type_ = e.__class__.__name__
254 254 if type_ not in self.ALLOWED_EXCEPTIONS:
255 255 type_ = None
256 256
257 257 resp = {
258 258 'id': payload.get('id'),
259 259 'error': {
260 260 'message': e.message,
261 261 'traceback': tb_info,
262 262 'type': type_
263 263 }
264 264 }
265 265 try:
266 266 resp['error']['_vcs_kind'] = e._vcs_kind
267 267 except AttributeError:
268 268 pass
269 269 else:
270 270 resp = {
271 271 'id': payload.get('id'),
272 272 'result': resp
273 273 }
274 274
275 275 return resp
276 276
277 277 def status_view(self, request):
278 278 import vcsserver
279 279 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__}
280 280
281 281 def service_view(self, request):
282 282 import vcsserver
283 283 import ConfigParser as configparser
284 284
285 285 payload = msgpack.unpackb(request.body, use_list=True)
286 286
287 287 try:
288 288 path = self.global_config['__file__']
289 289 config = configparser.ConfigParser()
290 290 config.read(path)
291 291 parsed_ini = config
292 292 if parsed_ini.has_section('server:main'):
293 293 parsed_ini = dict(parsed_ini.items('server:main'))
294 294 except Exception:
295 295 log.exception('Failed to read .ini file for display')
296 296 parsed_ini = {}
297 297
298 298 resp = {
299 299 'id': payload.get('id'),
300 300 'result': dict(
301 301 version=vcsserver.__version__,
302 302 config=parsed_ini,
303 303 payload=payload,
304 304 )
305 305 }
306 306 return resp
307 307
308 308 def _msgpack_renderer_factory(self, info):
309 309 def _render(value, system):
310 310 value = msgpack.packb(value)
311 311 request = system.get('request')
312 312 if request is not None:
313 313 response = request.response
314 314 ct = response.content_type
315 315 if ct == response.default_content_type:
316 316 response.content_type = 'application/x-msgpack'
317 317 return value
318 318 return _render
319 319
320 320 def set_env_from_config(self, environ, config):
321 321 dict_conf = {}
322 322 try:
323 323 for elem in config:
324 324 if elem[0] == 'rhodecode':
325 325 dict_conf = json.loads(elem[2])
326 326 break
327 327 except Exception:
328 328 log.exception('Failed to fetch SCM CONFIG')
329 329 return
330 330
331 331 username = dict_conf.get('username')
332 332 if username:
333 333 environ['REMOTE_USER'] = username
334 # mercurial specific, some extension api rely on this
335 environ['HGUSER'] = username
334 336
335 337 ip = dict_conf.get('ip')
336 338 if ip:
337 339 environ['REMOTE_HOST'] = ip
338 340
339 341 if _is_request_chunked(environ):
340 342 # set the compatibility flag for webob
341 343 environ['wsgi.input_terminated'] = True
342 344
343 345 def hg_proxy(self):
344 346 @wsgiapp
345 347 def _hg_proxy(environ, start_response):
346 348 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
347 349 return app(environ, start_response)
348 350 return _hg_proxy
349 351
350 352 def git_proxy(self):
351 353 @wsgiapp
352 354 def _git_proxy(environ, start_response):
353 355 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
354 356 return app(environ, start_response)
355 357 return _git_proxy
356 358
357 359 def hg_stream(self):
358 360 if self._use_echo_app:
359 361 @wsgiapp
360 362 def _hg_stream(environ, start_response):
361 363 app = EchoApp('fake_path', 'fake_name', None)
362 364 return app(environ, start_response)
363 365 return _hg_stream
364 366 else:
365 367 @wsgiapp
366 368 def _hg_stream(environ, start_response):
367 369 log.debug('http-app: handling hg stream')
368 370 repo_path = environ['HTTP_X_RC_REPO_PATH']
369 371 repo_name = environ['HTTP_X_RC_REPO_NAME']
370 372 packed_config = base64.b64decode(
371 373 environ['HTTP_X_RC_REPO_CONFIG'])
372 374 config = msgpack.unpackb(packed_config)
373 375 app = scm_app.create_hg_wsgi_app(
374 376 repo_path, repo_name, config)
375 377
376 378 # Consistent path information for hgweb
377 379 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
378 380 environ['REPO_NAME'] = repo_name
379 381 self.set_env_from_config(environ, config)
380 382
381 383 log.debug('http-app: starting app handler '
382 384 'with %s and process request', app)
383 385 return app(environ, ResponseFilter(start_response))
384 386 return _hg_stream
385 387
386 388 def git_stream(self):
387 389 if self._use_echo_app:
388 390 @wsgiapp
389 391 def _git_stream(environ, start_response):
390 392 app = EchoApp('fake_path', 'fake_name', None)
391 393 return app(environ, start_response)
392 394 return _git_stream
393 395 else:
394 396 @wsgiapp
395 397 def _git_stream(environ, start_response):
396 398 log.debug('http-app: handling git stream')
397 399 repo_path = environ['HTTP_X_RC_REPO_PATH']
398 400 repo_name = environ['HTTP_X_RC_REPO_NAME']
399 401 packed_config = base64.b64decode(
400 402 environ['HTTP_X_RC_REPO_CONFIG'])
401 403 config = msgpack.unpackb(packed_config)
402 404
403 405 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
404 406 self.set_env_from_config(environ, config)
405 407
406 408 content_type = environ.get('CONTENT_TYPE', '')
407 409
408 410 path = environ['PATH_INFO']
409 411 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
410 412 log.debug(
411 413 'LFS: Detecting if request `%s` is LFS server path based '
412 414 'on content type:`%s`, is_lfs:%s',
413 415 path, content_type, is_lfs_request)
414 416
415 417 if not is_lfs_request:
416 418 # fallback detection by path
417 419 if GIT_LFS_PROTO_PAT.match(path):
418 420 is_lfs_request = True
419 421 log.debug(
420 422 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
421 423 path, is_lfs_request)
422 424
423 425 if is_lfs_request:
424 426 app = scm_app.create_git_lfs_wsgi_app(
425 427 repo_path, repo_name, config)
426 428 else:
427 429 app = scm_app.create_git_wsgi_app(
428 430 repo_path, repo_name, config)
429 431
430 432 log.debug('http-app: starting app handler '
431 433 'with %s and process request', app)
432 434
433 435 return app(environ, start_response)
434 436
435 437 return _git_stream
436 438
437 439 def is_vcs_view(self, context, request):
438 440 """
439 441 View predicate that returns true if given backend is supported by
440 442 defined remotes.
441 443 """
442 444 backend = request.matchdict.get('backend')
443 445 return backend in self._remotes
444 446
445 447 def handle_vcs_exception(self, exception, request):
446 448 _vcs_kind = getattr(exception, '_vcs_kind', '')
447 449 if _vcs_kind == 'repo_locked':
448 450 # Get custom repo-locked status code if present.
449 451 status_code = request.headers.get('X-RC-Locked-Status-Code')
450 452 return HTTPRepoLocked(
451 453 title=exception.message, status_code=status_code)
452 454
453 455 # Re-raise exception if we can not handle it.
454 456 log.exception(
455 457 'error occurred handling this request for path: %s', request.path)
456 458 raise exception
457 459
458 460
459 461 class ResponseFilter(object):
460 462
461 463 def __init__(self, start_response):
462 464 self._start_response = start_response
463 465
464 466 def __call__(self, status, response_headers, exc_info=None):
465 467 headers = tuple(
466 468 (h, v) for h, v in response_headers
467 469 if not wsgiref.util.is_hop_by_hop(h))
468 470 return self._start_response(status, headers, exc_info)
469 471
470 472
471 473 def main(global_config, **settings):
472 474 if MercurialFactory:
473 475 hgpatches.patch_largefiles_capabilities()
474 476 hgpatches.patch_subrepo_type_mapping()
475 477 app = HTTPApplication(settings=settings, global_config=global_config)
476 478 return app.wsgi_app()
@@ -1,386 +1,386 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """Handles the Git smart protocol."""
19 19
20 20 import os
21 21 import socket
22 22 import logging
23 23
24 24 import simplejson as json
25 25 import dulwich.protocol
26 26 from webob import Request, Response, exc
27 27
28 28 from vcsserver import hooks, subprocessio
29 29
30 30
31 31 log = logging.getLogger(__name__)
32 32
33 33
34 34 class FileWrapper(object):
35 35 """File wrapper that ensures how much data is read from it."""
36 36
37 37 def __init__(self, fd, content_length):
38 38 self.fd = fd
39 39 self.content_length = content_length
40 40 self.remain = content_length
41 41
42 42 def read(self, size):
43 43 if size <= self.remain:
44 44 try:
45 45 data = self.fd.read(size)
46 46 except socket.error:
47 47 raise IOError(self)
48 48 self.remain -= size
49 49 elif self.remain:
50 50 data = self.fd.read(self.remain)
51 51 self.remain = 0
52 52 else:
53 53 data = None
54 54 return data
55 55
56 56 def __repr__(self):
57 57 return '<FileWrapper %s len: %s, read: %s>' % (
58 58 self.fd, self.content_length, self.content_length - self.remain
59 59 )
60 60
61 61
62 62 class GitRepository(object):
63 63 """WSGI app for handling Git smart protocol endpoints."""
64 64
65 65 git_folder_signature = frozenset(
66 66 ('config', 'head', 'info', 'objects', 'refs'))
67 67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 68 valid_accepts = frozenset(('application/x-%s-result' %
69 69 c for c in commands))
70 70
71 71 # The last bytes are the SHA1 of the first 12 bytes.
72 72 EMPTY_PACK = (
73 73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
74 74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 75 )
76 76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
77 77
78 78 def __init__(self, repo_name, content_path, git_path, update_server_info,
79 79 extras):
80 80 files = frozenset(f.lower() for f in os.listdir(content_path))
81 81 valid_dir_signature = self.git_folder_signature.issubset(files)
82 82
83 83 if not valid_dir_signature:
84 84 raise OSError('%s missing git signature' % content_path)
85 85
86 86 self.content_path = content_path
87 87 self.repo_name = repo_name
88 88 self.extras = extras
89 89 self.git_path = git_path
90 90 self.update_server_info = update_server_info
91 91
92 92 def _get_fixedpath(self, path):
93 93 """
94 94 Small fix for repo_path
95 95
96 96 :param path:
97 97 """
98 98 path = path.split(self.repo_name, 1)[-1]
99 99 if path.startswith('.git'):
100 100 # for bare repos we still get the .git prefix inside, we skip it
101 101 # here, and remove from the service command
102 102 path = path[4:]
103 103
104 104 return path.strip('/')
105 105
106 106 def inforefs(self, request, unused_environ):
107 107 """
108 108 WSGI Response producer for HTTP GET Git Smart
109 109 HTTP /info/refs request.
110 110 """
111 111
112 112 git_command = request.GET.get('service')
113 113 if git_command not in self.commands:
114 114 log.debug('command %s not allowed', git_command)
115 115 return exc.HTTPForbidden()
116 116
117 117 # please, resist the urge to add '\n' to git capture and increment
118 118 # line count by 1.
119 119 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
120 120 # a part of protocol.
121 121 # The code in Git client not only does NOT need '\n', but actually
122 122 # blows up if you sprinkle "flush" (0000) as "0001\n".
123 123 # It reads binary, per number of bytes specified.
124 124 # if you do add '\n' as part of data, count it.
125 125 server_advert = '# service=%s\n' % git_command
126 126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
127 127 try:
128 128 gitenv = dict(os.environ)
129 129 # forget all configs
130 130 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
131 131 command = [self.git_path, git_command[4:], '--stateless-rpc',
132 132 '--advertise-refs', self.content_path]
133 133 out = subprocessio.SubprocessIOChunker(
134 134 command,
135 135 env=gitenv,
136 136 starting_values=[packet_len + server_advert + '0000'],
137 137 shell=False
138 138 )
139 139 except EnvironmentError:
140 140 log.exception('Error processing command')
141 141 raise exc.HTTPExpectationFailed()
142 142
143 143 resp = Response()
144 144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
145 145 resp.charset = None
146 146 resp.app_iter = out
147 147
148 148 return resp
149 149
150 150 def _get_want_capabilities(self, request):
151 151 """Read the capabilities found in the first want line of the request."""
152 152 pos = request.body_file_seekable.tell()
153 153 first_line = request.body_file_seekable.readline()
154 154 request.body_file_seekable.seek(pos)
155 155
156 156 return frozenset(
157 157 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
158 158
159 159 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
160 160 """
161 161 Construct a response with an empty PACK file.
162 162
163 163 We use an empty PACK file, as that would trigger the failure of the pull
164 164 or clone command.
165 165
166 166 We also print in the error output a message explaining why the command
167 167 was aborted.
168 168
169 169 If aditionally, the user is accepting messages we send them the output
170 170 of the pre-pull hook.
171 171
172 172 Note that for clients not supporting side-band we just send them the
173 173 emtpy PACK file.
174 174 """
175 175 if self.SIDE_BAND_CAPS.intersection(capabilities):
176 176 response = []
177 177 proto = dulwich.protocol.Protocol(None, response.append)
178 178 proto.write_pkt_line('NAK\n')
179 179 self._write_sideband_to_proto(pre_pull_messages, proto,
180 180 capabilities)
181 181 # N.B.(skreft): Do not change the sideband channel to 3, as that
182 182 # produces a fatal error in the client:
183 183 # fatal: error in sideband demultiplexer
184 184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
185 185 proto.write_sideband(1, self.EMPTY_PACK)
186 186
187 187 # writes 0000
188 188 proto.write_pkt_line(None)
189 189
190 190 return response
191 191 else:
192 192 return [self.EMPTY_PACK]
193 193
194 194 def _write_sideband_to_proto(self, data, proto, capabilities):
195 195 """
196 196 Write the data to the proto's sideband number 2.
197 197
198 198 We do not use dulwich's write_sideband directly as it only supports
199 199 side-band-64k.
200 200 """
201 201 if not data:
202 202 return
203 203
204 204 # N.B.(skreft): The values below are explained in the pack protocol
205 205 # documentation, section Packfile Data.
206 206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
207 207 if 'side-band-64k' in capabilities:
208 208 chunk_size = 65515
209 209 elif 'side-band' in capabilities:
210 210 chunk_size = 995
211 211 else:
212 212 return
213 213
214 214 chunker = (
215 215 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
216 216
217 217 for chunk in chunker:
218 218 proto.write_sideband(2, chunk)
219 219
220 220 def _get_messages(self, data, capabilities):
221 221 """Return a list with packets for sending data in sideband number 2."""
222 222 response = []
223 223 proto = dulwich.protocol.Protocol(None, response.append)
224 224
225 225 self._write_sideband_to_proto(data, proto, capabilities)
226 226
227 227 return response
228 228
229 229 def _inject_messages_to_response(self, response, capabilities,
230 230 start_messages, end_messages):
231 231 """
232 232 Given a list response we inject the pre/post-pull messages.
233 233
234 234 We only inject the messages if the client supports sideband, and the
235 235 response has the format:
236 236 0008NAK\n...0000
237 237
238 238 Note that we do not check the no-progress capability as by default, git
239 239 sends it, which effectively would block all messages.
240 240 """
241 241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
242 242 return response
243 243
244 244 if not start_messages and not end_messages:
245 245 return response
246 246
247 247 # make a list out of response if it's an iterator
248 248 # so we can investigate it for message injection.
249 249 if hasattr(response, '__iter__'):
250 250 response = list(response)
251 251
252 252 if (not response[0].startswith('0008NAK\n') or
253 253 not response[-1].endswith('0000')):
254 254 return response
255 255
256 256 new_response = ['0008NAK\n']
257 257 new_response.extend(self._get_messages(start_messages, capabilities))
258 258 if len(response) == 1:
259 259 new_response.append(response[0][8:-4])
260 260 else:
261 261 new_response.append(response[0][8:])
262 262 new_response.extend(response[1:-1])
263 263 new_response.append(response[-1][:-4])
264 264 new_response.extend(self._get_messages(end_messages, capabilities))
265 265 new_response.append('0000')
266 266
267 267 return new_response
268 268
269 269 def backend(self, request, environ):
270 270 """
271 271 WSGI Response producer for HTTP POST Git Smart HTTP requests.
272 272 Reads commands and data from HTTP POST's body.
273 273 returns an iterator obj with contents of git command's
274 274 response to stdout
275 275 """
276 276 # TODO(skreft): think how we could detect an HTTPLockedException, as
277 277 # we probably want to have the same mechanism used by mercurial and
278 278 # simplevcs.
279 279 # For that we would need to parse the output of the command looking for
280 280 # some signs of the HTTPLockedError, parse the data and reraise it in
281 281 # pygrack. However, that would interfere with the streaming.
282 282 #
283 283 # Now the output of a blocked push is:
284 284 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
285 285 # POST git-receive-pack (1047 bytes)
286 286 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
287 287 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
288 288 # ! [remote rejected] master -> master (pre-receive hook declined)
289 289 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
290 290
291 291 git_command = self._get_fixedpath(request.path_info)
292 292 if git_command not in self.commands:
293 293 log.debug('command %s not allowed', git_command)
294 294 return exc.HTTPForbidden()
295 295
296 296 capabilities = None
297 297 if git_command == 'git-upload-pack':
298 298 capabilities = self._get_want_capabilities(request)
299 299
300 300 if 'CONTENT_LENGTH' in environ:
301 301 inputstream = FileWrapper(request.body_file_seekable,
302 302 request.content_length)
303 303 else:
304 304 inputstream = request.body_file_seekable
305 305
306 306 resp = Response()
307 307 resp.content_type = ('application/x-%s-result' %
308 308 git_command.encode('utf8'))
309 309 resp.charset = None
310 310
311 311 pre_pull_messages = ''
312 312 if git_command == 'git-upload-pack':
313 313 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
314 314 if status != 0:
315 315 resp.app_iter = self._build_failed_pre_pull_response(
316 316 capabilities, pre_pull_messages)
317 317 return resp
318 318
319 319 gitenv = dict(os.environ)
320 320 # forget all configs
321 321 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
322 322 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
323 323 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
324 324 self.content_path]
325 325 log.debug('handling cmd %s', cmd)
326 326
327 327 out = subprocessio.SubprocessIOChunker(
328 328 cmd,
329 329 inputstream=inputstream,
330 330 env=gitenv,
331 331 cwd=self.content_path,
332 332 shell=False,
333 333 fail_on_stderr=False,
334 334 fail_on_return_code=False
335 335 )
336 336
337 337 if self.update_server_info and git_command == 'git-receive-pack':
338 338 # We need to fully consume the iterator here, as the
339 339 # update-server-info command needs to be run after the push.
340 340 out = list(out)
341 341
342 342 # Updating refs manually after each push.
343 343 # This is required as some clients are exposing Git repos internally
344 344 # with the dumb protocol.
345 345 cmd = [self.git_path, 'update-server-info']
346 346 log.debug('handling cmd %s', cmd)
347 347 output = subprocessio.SubprocessIOChunker(
348 348 cmd,
349 349 inputstream=inputstream,
350 350 env=gitenv,
351 351 cwd=self.content_path,
352 352 shell=False,
353 353 fail_on_stderr=False,
354 354 fail_on_return_code=False
355 355 )
356 356 # Consume all the output so the subprocess finishes
357 357 for _ in output:
358 358 pass
359 359
360 360 if git_command == 'git-upload-pack':
361 361 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
362 362 resp.app_iter = self._inject_messages_to_response(
363 363 out, capabilities, pre_pull_messages, post_pull_messages)
364 364 else:
365 365 resp.app_iter = out
366 366
367 367 return resp
368 368
369 369 def __call__(self, environ, start_response):
370 370 request = Request(environ)
371 371 _path = self._get_fixedpath(request.path_info)
372 372 if _path.startswith('info/refs'):
373 373 app = self.inforefs
374 374 else:
375 375 app = self.backend
376 376
377 377 try:
378 378 resp = app(request, environ)
379 379 except exc.HTTPException as error:
380 380 log.exception('HTTP Error')
381 381 resp = error
382 382 except Exception:
383 383 log.exception('Unknown error')
384 384 resp = exc.HTTPInternalServerError()
385 385
386 386 return resp(environ, start_response)
@@ -1,34 +1,34 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from vcsserver import scm_app, wsgi_app_caller
19 19
20 20
21 21 class GitRemoteWsgi(object):
22 22 def handle(self, environ, input_data, *args, **kwargs):
23 23 app = wsgi_app_caller.WSGIAppCaller(
24 24 scm_app.create_git_wsgi_app(*args, **kwargs))
25 25
26 26 return app.handle(environ, input_data)
27 27
28 28
29 29 class HgRemoteWsgi(object):
30 30 def handle(self, environ, input_data, *args, **kwargs):
31 31 app = wsgi_app_caller.WSGIAppCaller(
32 32 scm_app.create_hg_wsgi_app(*args, **kwargs))
33 33
34 34 return app.handle(environ, input_data)
@@ -1,229 +1,229 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import logging
20 20 import itertools
21 21
22 22 import mercurial
23 23 import mercurial.error
24 24 import mercurial.hgweb.common
25 25 import mercurial.hgweb.hgweb_mod
26 26 import mercurial.hgweb.protocol
27 27 import webob.exc
28 28
29 29 from vcsserver import pygrack, exceptions, settings, git_lfs
30 30
31 31
32 32 log = logging.getLogger(__name__)
33 33
34 34
35 35 # propagated from mercurial documentation
36 36 HG_UI_SECTIONS = [
37 37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 40 ]
41 41
42 42
43 43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 44 """Extension of hgweb that simplifies some functions."""
45 45
46 46 def _get_view(self, repo):
47 47 """Views are not supported."""
48 48 return repo
49 49
50 50 def loadsubweb(self):
51 51 """The result is only used in the templater method which is not used."""
52 52 return None
53 53
54 54 def run(self):
55 55 """Unused function so raise an exception if accidentally called."""
56 56 raise NotImplementedError
57 57
58 58 def templater(self, req):
59 59 """Function used in an unreachable code path.
60 60
61 61 This code is unreachable because we guarantee that the HTTP request,
62 62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 63 never going to get a user-visible url.
64 64 """
65 65 raise NotImplementedError
66 66
67 67 def archivelist(self, nodeid):
68 68 """Unused function so raise an exception if accidentally called."""
69 69 raise NotImplementedError
70 70
71 71 def __call__(self, environ, start_response):
72 72 """Run the WSGI application.
73 73
74 74 This may be called by multiple threads.
75 75 """
76 76 req = mercurial.hgweb.request.wsgirequest(environ, start_response)
77 77 gen = self.run_wsgi(req)
78 78
79 79 first_chunk = None
80 80
81 81 try:
82 82 data = gen.next()
83 83 def first_chunk(): yield data
84 84 except StopIteration:
85 85 pass
86 86
87 87 if first_chunk:
88 88 return itertools.chain(first_chunk(), gen)
89 89 return gen
90 90
91 91 def _runwsgi(self, req, repo):
92 92 cmd = req.form.get('cmd', [''])[0]
93 93 if not mercurial.hgweb.protocol.iscmd(cmd):
94 94 req.respond(
95 95 mercurial.hgweb.common.ErrorResponse(
96 96 mercurial.hgweb.common.HTTP_BAD_REQUEST),
97 97 mercurial.hgweb.protocol.HGTYPE
98 98 )
99 99 return ['']
100 100
101 101 return super(HgWeb, self)._runwsgi(req, repo)
102 102
103 103
104 104 def make_hg_ui_from_config(repo_config):
105 105 baseui = mercurial.ui.ui()
106 106
107 107 # clean the baseui object
108 108 baseui._ocfg = mercurial.config.config()
109 109 baseui._ucfg = mercurial.config.config()
110 110 baseui._tcfg = mercurial.config.config()
111 111
112 112 for section, option, value in repo_config:
113 113 baseui.setconfig(section, option, value)
114 114
115 115 # make our hgweb quiet so it doesn't print output
116 116 baseui.setconfig('ui', 'quiet', 'true')
117 117
118 118 return baseui
119 119
120 120
121 121 def update_hg_ui_from_hgrc(baseui, repo_path):
122 122 path = os.path.join(repo_path, '.hg', 'hgrc')
123 123
124 124 if not os.path.isfile(path):
125 125 log.debug('hgrc file is not present at %s, skipping...', path)
126 126 return
127 127 log.debug('reading hgrc from %s', path)
128 128 cfg = mercurial.config.config()
129 129 cfg.read(path)
130 130 for section in HG_UI_SECTIONS:
131 131 for k, v in cfg.items(section):
132 132 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
133 133 baseui.setconfig(section, k, v)
134 134
135 135
136 136 def create_hg_wsgi_app(repo_path, repo_name, config):
137 137 """
138 138 Prepares a WSGI application to handle Mercurial requests.
139 139
140 140 :param config: is a list of 3-item tuples representing a ConfigObject
141 141 (it is the serialized version of the config object).
142 142 """
143 143 log.debug("Creating Mercurial WSGI application")
144 144
145 145 baseui = make_hg_ui_from_config(config)
146 146 update_hg_ui_from_hgrc(baseui, repo_path)
147 147
148 148 try:
149 149 return HgWeb(repo_path, name=repo_name, baseui=baseui)
150 150 except mercurial.error.RequirementError as exc:
151 151 raise exceptions.RequirementException(exc)
152 152
153 153
154 154 class GitHandler(object):
155 155 """
156 156 Handler for Git operations like push/pull etc
157 157 """
158 158 def __init__(self, repo_location, repo_name, git_path, update_server_info,
159 159 extras):
160 160 if not os.path.isdir(repo_location):
161 161 raise OSError(repo_location)
162 162 self.content_path = repo_location
163 163 self.repo_name = repo_name
164 164 self.repo_location = repo_location
165 165 self.extras = extras
166 166 self.git_path = git_path
167 167 self.update_server_info = update_server_info
168 168
169 169 def __call__(self, environ, start_response):
170 170 app = webob.exc.HTTPNotFound()
171 171 candidate_paths = (
172 172 self.content_path, os.path.join(self.content_path, '.git'))
173 173
174 174 for content_path in candidate_paths:
175 175 try:
176 176 app = pygrack.GitRepository(
177 177 self.repo_name, content_path, self.git_path,
178 178 self.update_server_info, self.extras)
179 179 break
180 180 except OSError:
181 181 continue
182 182
183 183 return app(environ, start_response)
184 184
185 185
186 186 def create_git_wsgi_app(repo_path, repo_name, config):
187 187 """
188 188 Creates a WSGI application to handle Git requests.
189 189
190 190 :param config: is a dictionary holding the extras.
191 191 """
192 192 git_path = settings.GIT_EXECUTABLE
193 193 update_server_info = config.pop('git_update_server_info')
194 194 app = GitHandler(
195 195 repo_path, repo_name, git_path, update_server_info, config)
196 196
197 197 return app
198 198
199 199
200 200 class GitLFSHandler(object):
201 201 """
202 202 Handler for Git LFS operations
203 203 """
204 204
205 205 def __init__(self, repo_location, repo_name, git_path, update_server_info,
206 206 extras):
207 207 if not os.path.isdir(repo_location):
208 208 raise OSError(repo_location)
209 209 self.content_path = repo_location
210 210 self.repo_name = repo_name
211 211 self.repo_location = repo_location
212 212 self.extras = extras
213 213 self.git_path = git_path
214 214 self.update_server_info = update_server_info
215 215
216 216 def get_app(self, git_lfs_enabled, git_lfs_store_path):
217 217 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
218 218 return app
219 219
220 220
221 221 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
222 222 git_path = settings.GIT_EXECUTABLE
223 223 update_server_info = config.pop('git_update_server_info')
224 224 git_lfs_enabled = config.pop('git_lfs_enabled')
225 225 git_lfs_store_path = config.pop('git_lfs_store_path')
226 226 app = GitLFSHandler(
227 227 repo_path, repo_name, git_path, update_server_info, config)
228 228
229 229 return app.get_app(git_lfs_enabled, git_lfs_store_path)
@@ -1,78 +1,78 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import gc
19 19 import logging
20 20 import os
21 21 import time
22 22
23 23
24 24 log = logging.getLogger(__name__)
25 25
26 26
27 27 class VcsServer(object):
28 28 """
29 29 Exposed remote interface of the vcsserver itself.
30 30
31 31 This object can be used to manage the server remotely. Right now the main
32 32 use case is to allow to shut down the server.
33 33 """
34 34
35 35 _shutdown = False
36 36
37 37 def shutdown(self):
38 38 self._shutdown = True
39 39
40 40 def ping(self):
41 41 """
42 42 Utility to probe a server connection.
43 43 """
44 44 log.debug("Received server ping.")
45 45
46 46 def echo(self, data):
47 47 """
48 48 Utility for performance testing.
49 49
50 50 Allows to pass in arbitrary data and will return this data.
51 51 """
52 52 log.debug("Received server echo.")
53 53 return data
54 54
55 55 def sleep(self, seconds):
56 56 """
57 57 Utility to simulate long running server interaction.
58 58 """
59 59 log.debug("Sleeping %s seconds", seconds)
60 60 time.sleep(seconds)
61 61
62 62 def get_pid(self):
63 63 """
64 64 Allows to discover the PID based on a proxy object.
65 65 """
66 66 return os.getpid()
67 67
68 68 def run_gc(self):
69 69 """
70 70 Allows to trigger the garbage collector.
71 71
72 72 Main intention is to support statistics gathering during test runs.
73 73 """
74 74 freed_objects = gc.collect()
75 75 return {
76 76 'freed_objects': freed_objects,
77 77 'garbage': len(gc.garbage),
78 78 }
@@ -1,19 +1,19 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 WIRE_ENCODING = 'UTF-8'
19 19 GIT_EXECUTABLE = 'git'
@@ -1,476 +1,481 b''
1 1 """
2 2 Module provides a class allowing to wrap communication over subprocess.Popen
3 3 input, output, error streams into a meaningfull, non-blocking, concurrent
4 4 stream processor exposing the output data as an iterator fitting to be a
5 5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
6 6
7 7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
8 8
9 9 This file is part of git_http_backend.py Project.
10 10
11 11 git_http_backend.py Project is free software: you can redistribute it and/or
12 12 modify it under the terms of the GNU Lesser General Public License as
13 13 published by the Free Software Foundation, either version 2.1 of the License,
14 14 or (at your option) any later version.
15 15
16 16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 19 GNU Lesser General Public License for more details.
20 20
21 21 You should have received a copy of the GNU Lesser General Public License
22 22 along with git_http_backend.py Project.
23 23 If not, see <http://www.gnu.org/licenses/>.
24 24 """
25 25 import os
26 26 import subprocess32 as subprocess
27 27 from collections import deque
28 28 from threading import Event, Thread
29 29
30 30
31 31 class StreamFeeder(Thread):
32 32 """
33 33 Normal writing into pipe-like is blocking once the buffer is filled.
34 34 This thread allows a thread to seep data from a file-like into a pipe
35 35 without blocking the main thread.
36 36 We close inpipe once the end of the source stream is reached.
37 37 """
38 38
39 39 def __init__(self, source):
40 40 super(StreamFeeder, self).__init__()
41 41 self.daemon = True
42 42 filelike = False
43 43 self.bytes = bytes()
44 44 if type(source) in (type(''), bytes, bytearray): # string-like
45 45 self.bytes = bytes(source)
46 46 else: # can be either file pointer or file-like
47 47 if type(source) in (int, long): # file pointer it is
48 ## converting file descriptor (int) stdin into file-like
48 # converting file descriptor (int) stdin into file-like
49 49 try:
50 50 source = os.fdopen(source, 'rb', 16384)
51 51 except Exception:
52 52 pass
53 53 # let's see if source is file-like by now
54 54 try:
55 55 filelike = source.read
56 56 except Exception:
57 57 pass
58 58 if not filelike and not self.bytes:
59 59 raise TypeError("StreamFeeder's source object must be a readable "
60 60 "file-like, a file descriptor, or a string-like.")
61 61 self.source = source
62 62 self.readiface, self.writeiface = os.pipe()
63 63
64 64 def run(self):
65 65 t = self.writeiface
66 66 if self.bytes:
67 67 os.write(t, self.bytes)
68 68 else:
69 69 s = self.source
70 70 b = s.read(4096)
71 71 while b:
72 72 os.write(t, b)
73 73 b = s.read(4096)
74 74 os.close(t)
75 75
76 76 @property
77 77 def output(self):
78 78 return self.readiface
79 79
80 80
81 81 class InputStreamChunker(Thread):
82 82 def __init__(self, source, target, buffer_size, chunk_size):
83 83
84 84 super(InputStreamChunker, self).__init__()
85 85
86 86 self.daemon = True # die die die.
87 87
88 88 self.source = source
89 89 self.target = target
90 90 self.chunk_count_max = int(buffer_size / chunk_size) + 1
91 91 self.chunk_size = chunk_size
92 92
93 93 self.data_added = Event()
94 94 self.data_added.clear()
95 95
96 96 self.keep_reading = Event()
97 97 self.keep_reading.set()
98 98
99 99 self.EOF = Event()
100 100 self.EOF.clear()
101 101
102 102 self.go = Event()
103 103 self.go.set()
104 104
105 105 def stop(self):
106 106 self.go.clear()
107 107 self.EOF.set()
108 108 try:
109 109 # this is not proper, but is done to force the reader thread let
110 110 # go of the input because, if successful, .close() will send EOF
111 111 # down the pipe.
112 112 self.source.close()
113 113 except:
114 114 pass
115 115
116 116 def run(self):
117 117 s = self.source
118 118 t = self.target
119 119 cs = self.chunk_size
120 120 ccm = self.chunk_count_max
121 kr = self.keep_reading
121 keep_reading = self.keep_reading
122 122 da = self.data_added
123 123 go = self.go
124 124
125 125 try:
126 126 b = s.read(cs)
127 127 except ValueError:
128 128 b = ''
129 129
130 130 while b and go.is_set():
131 131 if len(t) > ccm:
132 kr.clear()
133 kr.wait(2)
134 # # this only works on 2.7.x and up
135 # if not kr.wait(10):
136 # raise Exception("Timed out while waiting for input to be read.")
137 # instead we'll use this
138 if len(t) > ccm + 3:
139 raise IOError(
140 "Timed out while waiting for input from subprocess.")
132 keep_reading.clear()
133 keep_reading.wait(2)
134
135 if not keep_reading.wait(10):
136 raise Exception(
137 "Timed out while waiting for input to be read.")
138
141 139 t.append(b)
142 140 da.set()
143 141 b = s.read(cs)
144 142 self.EOF.set()
145 143 da.set() # for cases when done but there was no input.
146 144
147 145
148 146 class BufferedGenerator(object):
149 147 """
150 148 Class behaves as a non-blocking, buffered pipe reader.
151 149 Reads chunks of data (through a thread)
152 150 from a blocking pipe, and attaches these to an array (Deque) of chunks.
153 151 Reading is halted in the thread when max chunks is internally buffered.
154 152 The .next() may operate in blocking or non-blocking fashion by yielding
155 153 '' if no data is ready
156 154 to be sent or by not returning until there is some data to send
157 155 When we get EOF from underlying source pipe we raise the marker to raise
158 156 StopIteration after the last chunk of data is yielded.
159 157 """
160 158
161 159 def __init__(self, source, buffer_size=65536, chunk_size=4096,
162 starting_values=[], bottomless=False):
160 starting_values=None, bottomless=False):
161 starting_values = starting_values or []
163 162
164 163 if bottomless:
165 164 maxlen = int(buffer_size / chunk_size)
166 165 else:
167 166 maxlen = None
168 167
169 168 self.data = deque(starting_values, maxlen)
170 169 self.worker = InputStreamChunker(source, self.data, buffer_size,
171 170 chunk_size)
172 171 if starting_values:
173 172 self.worker.data_added.set()
174 173 self.worker.start()
175 174
176 175 ####################
177 176 # Generator's methods
178 177 ####################
179 178
180 179 def __iter__(self):
181 180 return self
182 181
183 182 def next(self):
184 183 while not len(self.data) and not self.worker.EOF.is_set():
185 184 self.worker.data_added.clear()
186 185 self.worker.data_added.wait(0.2)
187 186 if len(self.data):
188 187 self.worker.keep_reading.set()
189 188 return bytes(self.data.popleft())
190 189 elif self.worker.EOF.is_set():
191 190 raise StopIteration
192 191
193 def throw(self, type, value=None, traceback=None):
192 def throw(self, exc_type, value=None, traceback=None):
194 193 if not self.worker.EOF.is_set():
195 raise type(value)
194 raise exc_type(value)
196 195
197 196 def start(self):
198 197 self.worker.start()
199 198
200 199 def stop(self):
201 200 self.worker.stop()
202 201
203 202 def close(self):
204 203 try:
205 204 self.worker.stop()
206 205 self.throw(GeneratorExit)
207 206 except (GeneratorExit, StopIteration):
208 207 pass
209 208
210 209 def __del__(self):
211 210 self.close()
212 211
213 212 ####################
214 213 # Threaded reader's infrastructure.
215 214 ####################
216 215 @property
217 216 def input(self):
218 217 return self.worker.w
219 218
220 219 @property
221 220 def data_added_event(self):
222 221 return self.worker.data_added
223 222
224 223 @property
225 224 def data_added(self):
226 225 return self.worker.data_added.is_set()
227 226
228 227 @property
229 228 def reading_paused(self):
230 229 return not self.worker.keep_reading.is_set()
231 230
232 231 @property
233 232 def done_reading_event(self):
234 233 """
235 234 Done_reding does not mean that the iterator's buffer is empty.
236 235 Iterator might have done reading from underlying source, but the read
237 236 chunks might still be available for serving through .next() method.
238 237
239 238 :returns: An Event class instance.
240 239 """
241 240 return self.worker.EOF
242 241
243 242 @property
244 243 def done_reading(self):
245 244 """
246 245 Done_reding does not mean that the iterator's buffer is empty.
247 246 Iterator might have done reading from underlying source, but the read
248 247 chunks might still be available for serving through .next() method.
249 248
250 249 :returns: An Bool value.
251 250 """
252 251 return self.worker.EOF.is_set()
253 252
254 253 @property
255 254 def length(self):
256 255 """
257 256 returns int.
258 257
259 258 This is the lenght of the que of chunks, not the length of
260 259 the combined contents in those chunks.
261 260
262 261 __len__() cannot be meaningfully implemented because this
263 262 reader is just flying throuh a bottomless pit content and
264 263 can only know the lenght of what it already saw.
265 264
266 265 If __len__() on WSGI server per PEP 3333 returns a value,
267 266 the responce's length will be set to that. In order not to
268 267 confuse WSGI PEP3333 servers, we will not implement __len__
269 268 at all.
270 269 """
271 270 return len(self.data)
272 271
273 272 def prepend(self, x):
274 273 self.data.appendleft(x)
275 274
276 275 def append(self, x):
277 276 self.data.append(x)
278 277
279 278 def extend(self, o):
280 279 self.data.extend(o)
281 280
282 281 def __getitem__(self, i):
283 282 return self.data[i]
284 283
285 284
286 285 class SubprocessIOChunker(object):
287 286 """
288 287 Processor class wrapping handling of subprocess IO.
289 288
290 289 .. important::
291 290
292 291 Watch out for the method `__del__` on this class. If this object
293 292 is deleted, it will kill the subprocess, so avoid to
294 293 return the `output` attribute or usage of it like in the following
295 294 example::
296 295
297 296 # `args` expected to run a program that produces a lot of output
298 297 output = ''.join(SubprocessIOChunker(
299 298 args, shell=False, inputstream=inputstream, env=environ).output)
300 299
301 300 # `output` will not contain all the data, because the __del__ method
302 301 # has already killed the subprocess in this case before all output
303 302 # has been consumed.
304 303
305 304
306 305
307 306 In a way, this is a "communicate()" replacement with a twist.
308 307
309 308 - We are multithreaded. Writing in and reading out, err are all sep threads.
310 309 - We support concurrent (in and out) stream processing.
311 310 - The output is not a stream. It's a queue of read string (bytes, not unicode)
312 311 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
313 312 - We are non-blocking in more respects than communicate()
314 313 (reading from subprocess out pauses when internal buffer is full, but
315 314 does not block the parent calling code. On the flip side, reading from
316 315 slow-yielding subprocess may block the iteration until data shows up. This
317 316 does not block the parallel inpipe reading occurring parallel thread.)
318 317
319 318 The purpose of the object is to allow us to wrap subprocess interactions into
320 319 and interable that can be passed to a WSGI server as the application's return
321 320 value. Because of stream-processing-ability, WSGI does not have to read ALL
322 321 of the subprocess's output and buffer it, before handing it to WSGI server for
323 322 HTTP response. Instead, the class initializer reads just a bit of the stream
324 323 to figure out if error ocurred or likely to occur and if not, just hands the
325 324 further iteration over subprocess output to the server for completion of HTTP
326 325 response.
327 326
328 327 The real or perceived subprocess error is trapped and raised as one of
329 328 EnvironmentError family of exceptions
330 329
331 330 Example usage:
332 331 # try:
333 332 # answer = SubprocessIOChunker(
334 333 # cmd,
335 334 # input,
336 335 # buffer_size = 65536,
337 336 # chunk_size = 4096
338 337 # )
339 338 # except (EnvironmentError) as e:
340 339 # print str(e)
341 340 # raise e
342 341 #
343 342 # return answer
344 343
345 344
346 345 """
347 346
348 347 # TODO: johbo: This is used to make sure that the open end of the PIPE
349 348 # is closed in the end. It would be way better to wrap this into an
350 349 # object, so that it is closed automatically once it is consumed or
351 350 # something similar.
352 351 _close_input_fd = None
353 352
354 353 _closed = False
355 354
356 355 def __init__(self, cmd, inputstream=None, buffer_size=65536,
357 chunk_size=4096, starting_values=[], fail_on_stderr=True,
356 chunk_size=4096, starting_values=None, fail_on_stderr=True,
358 357 fail_on_return_code=True, **kwargs):
359 358 """
360 359 Initializes SubprocessIOChunker
361 360
362 361 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
363 362 :param inputstream: (Default: None) A file-like, string, or file pointer.
364 363 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
365 364 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
366 365 :param starting_values: (Default: []) An array of strings to put in front of output que.
367 366 :param fail_on_stderr: (Default: True) Whether to raise an exception in
368 367 case something is written to stderr.
369 368 :param fail_on_return_code: (Default: True) Whether to raise an
370 369 exception if the return code is not 0.
371 370 """
372 371
372 starting_values = starting_values or []
373 373 if inputstream:
374 374 input_streamer = StreamFeeder(inputstream)
375 375 input_streamer.start()
376 376 inputstream = input_streamer.output
377 377 self._close_input_fd = inputstream
378 378
379 379 self._fail_on_stderr = fail_on_stderr
380 380 self._fail_on_return_code = fail_on_return_code
381 381
382 382 _shell = kwargs.get('shell', True)
383 383 kwargs['shell'] = _shell
384 384
385 385 _p = subprocess.Popen(cmd, bufsize=-1,
386 386 stdin=inputstream,
387 387 stdout=subprocess.PIPE,
388 388 stderr=subprocess.PIPE,
389 389 **kwargs)
390 390
391 391 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
392 392 starting_values)
393 393 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
394 394
395 395 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
396 396 # doing this until we reach either end of file, or end of buffer.
397 397 bg_out.data_added_event.wait(1)
398 398 bg_out.data_added_event.clear()
399 399
400 400 # at this point it's still ambiguous if we are done reading or just full buffer.
401 401 # Either way, if error (returned by ended process, or implied based on
402 402 # presence of stuff in stderr output) we error out.
403 403 # Else, we are happy.
404 404 _returncode = _p.poll()
405 405
406 406 if ((_returncode and fail_on_return_code) or
407 407 (fail_on_stderr and _returncode is None and bg_err.length)):
408 408 try:
409 409 _p.terminate()
410 410 except Exception:
411 411 pass
412 412 bg_out.stop()
413 413 bg_err.stop()
414 414 if fail_on_stderr:
415 415 err = ''.join(bg_err)
416 416 raise EnvironmentError(
417 417 "Subprocess exited due to an error:\n" + err)
418 418 if _returncode and fail_on_return_code:
419 419 err = ''.join(bg_err)
420 if not err:
421 # maybe get empty stderr, try stdout instead
422 # in many cases git reports the errors on stdout too
423 err = ''.join(bg_out)
420 424 raise EnvironmentError(
421 425 "Subprocess exited with non 0 ret code:%s: stderr:%s" % (
422 426 _returncode, err))
423 427
424 428 self.process = _p
425 429 self.output = bg_out
426 430 self.error = bg_err
427 431
428 432 def __iter__(self):
429 433 return self
430 434
431 435 def next(self):
432 436 # Note: mikhail: We need to be sure that we are checking the return
433 437 # code after the stdout stream is closed. Some processes, e.g. git
434 438 # are doing some magic in between closing stdout and terminating the
435 439 # process and, as a result, we are not getting return code on "slow"
436 440 # systems.
441 result = None
437 442 stop_iteration = None
438 443 try:
439 444 result = self.output.next()
440 445 except StopIteration as e:
441 446 stop_iteration = e
442 447
443 448 if self.process.poll() and self._fail_on_return_code:
444 449 err = '%s' % ''.join(self.error)
445 450 raise EnvironmentError(
446 451 "Subprocess exited due to an error:\n" + err)
447 452
448 453 if stop_iteration:
449 454 raise stop_iteration
450 455 return result
451 456
452 457 def throw(self, type, value=None, traceback=None):
453 458 if self.output.length or not self.output.done_reading:
454 459 raise type(value)
455 460
456 461 def close(self):
457 462 if self._closed:
458 463 return
459 464 self._closed = True
460 465 try:
461 466 self.process.terminate()
462 467 except:
463 468 pass
464 469 if self._close_input_fd:
465 470 os.close(self._close_input_fd)
466 471 try:
467 472 self.output.close()
468 473 except:
469 474 pass
470 475 try:
471 476 self.error.close()
472 477 except:
473 478 pass
474 479
475 480 def __del__(self):
476 481 self.close()
@@ -1,679 +1,679 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 import os
21 21 from urllib2 import URLError
22 22 import logging
23 23 import posixpath as vcspath
24 24 import StringIO
25 25 import subprocess
26 26 import urllib
27 27 import traceback
28 28
29 29 import svn.client
30 30 import svn.core
31 31 import svn.delta
32 32 import svn.diff
33 33 import svn.fs
34 34 import svn.repos
35 35
36 36 from vcsserver import svn_diff
37 37 from vcsserver import exceptions
38 38 from vcsserver.base import RepoFactory, raise_from_original
39 39
40 40
41 41 log = logging.getLogger(__name__)
42 42
43 43
44 44 # Set of svn compatible version flags.
45 45 # Compare with subversion/svnadmin/svnadmin.c
46 46 svn_compatible_versions = set([
47 47 'pre-1.4-compatible',
48 48 'pre-1.5-compatible',
49 49 'pre-1.6-compatible',
50 50 'pre-1.8-compatible',
51 51 'pre-1.9-compatible',
52 52 ])
53 53
54 54 svn_compatible_versions_map = {
55 55 'pre-1.4-compatible': '1.3',
56 56 'pre-1.5-compatible': '1.4',
57 57 'pre-1.6-compatible': '1.5',
58 58 'pre-1.8-compatible': '1.7',
59 59 'pre-1.9-compatible': '1.8',
60 60 }
61 61
62 62
63 63 def reraise_safe_exceptions(func):
64 64 """Decorator for converting svn exceptions to something neutral."""
65 65 def wrapper(*args, **kwargs):
66 66 try:
67 67 return func(*args, **kwargs)
68 68 except Exception as e:
69 69 if not hasattr(e, '_vcs_kind'):
70 70 log.exception("Unhandled exception in hg remote call")
71 71 raise_from_original(exceptions.UnhandledException)
72 72 raise
73 73 return wrapper
74 74
75 75
76 76 class SubversionFactory(RepoFactory):
77 77
78 78 def _create_repo(self, wire, create, compatible_version):
79 79 path = svn.core.svn_path_canonicalize(wire['path'])
80 80 if create:
81 81 fs_config = {'compatible-version': '1.9'}
82 82 if compatible_version:
83 83 if compatible_version not in svn_compatible_versions:
84 84 raise Exception('Unknown SVN compatible version "{}"'
85 85 .format(compatible_version))
86 86 fs_config['compatible-version'] = \
87 87 svn_compatible_versions_map[compatible_version]
88 88
89 89 log.debug('Create SVN repo with config "%s"', fs_config)
90 90 repo = svn.repos.create(path, "", "", None, fs_config)
91 91 else:
92 92 repo = svn.repos.open(path)
93 93
94 94 log.debug('Got SVN object: %s', repo)
95 95 return repo
96 96
97 97 def repo(self, wire, create=False, compatible_version=None):
98 98 def create_new_repo():
99 99 return self._create_repo(wire, create, compatible_version)
100 100
101 101 return self._repo(wire, create_new_repo)
102 102
103 103
104 104 NODE_TYPE_MAPPING = {
105 105 svn.core.svn_node_file: 'file',
106 106 svn.core.svn_node_dir: 'dir',
107 107 }
108 108
109 109
110 110 class SvnRemote(object):
111 111
112 112 def __init__(self, factory, hg_factory=None):
113 113 self._factory = factory
114 114 # TODO: Remove once we do not use internal Mercurial objects anymore
115 115 # for subversion
116 116 self._hg_factory = hg_factory
117 117
118 118 @reraise_safe_exceptions
119 119 def discover_svn_version(self):
120 120 try:
121 121 import svn.core
122 122 svn_ver = svn.core.SVN_VERSION
123 123 except ImportError:
124 124 svn_ver = None
125 125 return svn_ver
126 126
127 127 def check_url(self, url, config_items):
128 128 # this can throw exception if not installed, but we detect this
129 129 from hgsubversion import svnrepo
130 130
131 131 baseui = self._hg_factory._create_config(config_items)
132 132 # uuid function get's only valid UUID from proper repo, else
133 133 # throws exception
134 134 try:
135 135 svnrepo.svnremoterepo(baseui, url).svn.uuid
136 136 except Exception:
137 137 tb = traceback.format_exc()
138 138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
139 139 raise URLError(
140 140 '"%s" is not a valid Subversion source url.' % (url, ))
141 141 return True
142 142
143 143 def is_path_valid_repository(self, wire, path):
144 144
145 145 # NOTE(marcink): short circuit the check for SVN repo
146 146 # the repos.open might be expensive to check, but we have one cheap
147 147 # pre condition that we can use, to check for 'format' file
148 148
149 149 if not os.path.isfile(os.path.join(path, 'format')):
150 150 return False
151 151
152 152 try:
153 153 svn.repos.open(path)
154 154 except svn.core.SubversionException:
155 155 tb = traceback.format_exc()
156 156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
157 157 return False
158 158 return True
159 159
160 160 @reraise_safe_exceptions
161 161 def verify(self, wire,):
162 162 repo_path = wire['path']
163 163 if not self.is_path_valid_repository(wire, repo_path):
164 164 raise Exception(
165 165 "Path %s is not a valid Subversion repository." % repo_path)
166 166
167 167 load = subprocess.Popen(
168 168 ['svnadmin', 'info', repo_path],
169 169 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
170 170 return ''.join(load.stdout)
171 171
172 172 def lookup(self, wire, revision):
173 173 if revision not in [-1, None, 'HEAD']:
174 174 raise NotImplementedError
175 175 repo = self._factory.repo(wire)
176 176 fs_ptr = svn.repos.fs(repo)
177 177 head = svn.fs.youngest_rev(fs_ptr)
178 178 return head
179 179
180 180 def lookup_interval(self, wire, start_ts, end_ts):
181 181 repo = self._factory.repo(wire)
182 182 fsobj = svn.repos.fs(repo)
183 183 start_rev = None
184 184 end_rev = None
185 185 if start_ts:
186 186 start_ts_svn = apr_time_t(start_ts)
187 187 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
188 188 else:
189 189 start_rev = 1
190 190 if end_ts:
191 191 end_ts_svn = apr_time_t(end_ts)
192 192 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
193 193 else:
194 194 end_rev = svn.fs.youngest_rev(fsobj)
195 195 return start_rev, end_rev
196 196
197 197 def revision_properties(self, wire, revision):
198 198 repo = self._factory.repo(wire)
199 199 fs_ptr = svn.repos.fs(repo)
200 200 return svn.fs.revision_proplist(fs_ptr, revision)
201 201
202 202 def revision_changes(self, wire, revision):
203 203
204 204 repo = self._factory.repo(wire)
205 205 fsobj = svn.repos.fs(repo)
206 206 rev_root = svn.fs.revision_root(fsobj, revision)
207 207
208 208 editor = svn.repos.ChangeCollector(fsobj, rev_root)
209 209 editor_ptr, editor_baton = svn.delta.make_editor(editor)
210 210 base_dir = ""
211 211 send_deltas = False
212 212 svn.repos.replay2(
213 213 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
214 214 editor_ptr, editor_baton, None)
215 215
216 216 added = []
217 217 changed = []
218 218 removed = []
219 219
220 220 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
221 221 for path, change in editor.changes.iteritems():
222 222 # TODO: Decide what to do with directory nodes. Subversion can add
223 223 # empty directories.
224 224
225 225 if change.item_kind == svn.core.svn_node_dir:
226 226 continue
227 227 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
228 228 added.append(path)
229 229 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
230 230 svn.repos.CHANGE_ACTION_REPLACE]:
231 231 changed.append(path)
232 232 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
233 233 removed.append(path)
234 234 else:
235 235 raise NotImplementedError(
236 236 "Action %s not supported on path %s" % (
237 237 change.action, path))
238 238
239 239 changes = {
240 240 'added': added,
241 241 'changed': changed,
242 242 'removed': removed,
243 243 }
244 244 return changes
245 245
246 246 def node_history(self, wire, path, revision, limit):
247 247 cross_copies = False
248 248 repo = self._factory.repo(wire)
249 249 fsobj = svn.repos.fs(repo)
250 250 rev_root = svn.fs.revision_root(fsobj, revision)
251 251
252 252 history_revisions = []
253 253 history = svn.fs.node_history(rev_root, path)
254 254 history = svn.fs.history_prev(history, cross_copies)
255 255 while history:
256 256 __, node_revision = svn.fs.history_location(history)
257 257 history_revisions.append(node_revision)
258 258 if limit and len(history_revisions) >= limit:
259 259 break
260 260 history = svn.fs.history_prev(history, cross_copies)
261 261 return history_revisions
262 262
263 263 def node_properties(self, wire, path, revision):
264 264 repo = self._factory.repo(wire)
265 265 fsobj = svn.repos.fs(repo)
266 266 rev_root = svn.fs.revision_root(fsobj, revision)
267 267 return svn.fs.node_proplist(rev_root, path)
268 268
269 269 def file_annotate(self, wire, path, revision):
270 270 abs_path = 'file://' + urllib.pathname2url(
271 271 vcspath.join(wire['path'], path))
272 272 file_uri = svn.core.svn_path_canonicalize(abs_path)
273 273
274 274 start_rev = svn_opt_revision_value_t(0)
275 275 peg_rev = svn_opt_revision_value_t(revision)
276 276 end_rev = peg_rev
277 277
278 278 annotations = []
279 279
280 280 def receiver(line_no, revision, author, date, line, pool):
281 281 annotations.append((line_no, revision, line))
282 282
283 283 # TODO: Cannot use blame5, missing typemap function in the swig code
284 284 try:
285 285 svn.client.blame2(
286 286 file_uri, peg_rev, start_rev, end_rev,
287 287 receiver, svn.client.create_context())
288 288 except svn.core.SubversionException as exc:
289 289 log.exception("Error during blame operation.")
290 290 raise Exception(
291 291 "Blame not supported or file does not exist at path %s. "
292 292 "Error %s." % (path, exc))
293 293
294 294 return annotations
295 295
296 296 def get_node_type(self, wire, path, rev=None):
297 297 repo = self._factory.repo(wire)
298 298 fs_ptr = svn.repos.fs(repo)
299 299 if rev is None:
300 300 rev = svn.fs.youngest_rev(fs_ptr)
301 301 root = svn.fs.revision_root(fs_ptr, rev)
302 302 node = svn.fs.check_path(root, path)
303 303 return NODE_TYPE_MAPPING.get(node, None)
304 304
305 305 def get_nodes(self, wire, path, revision=None):
306 306 repo = self._factory.repo(wire)
307 307 fsobj = svn.repos.fs(repo)
308 308 if revision is None:
309 309 revision = svn.fs.youngest_rev(fsobj)
310 310 root = svn.fs.revision_root(fsobj, revision)
311 311 entries = svn.fs.dir_entries(root, path)
312 312 result = []
313 313 for entry_path, entry_info in entries.iteritems():
314 314 result.append(
315 315 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
316 316 return result
317 317
318 318 def get_file_content(self, wire, path, rev=None):
319 319 repo = self._factory.repo(wire)
320 320 fsobj = svn.repos.fs(repo)
321 321 if rev is None:
322 322 rev = svn.fs.youngest_revision(fsobj)
323 323 root = svn.fs.revision_root(fsobj, rev)
324 324 content = svn.core.Stream(svn.fs.file_contents(root, path))
325 325 return content.read()
326 326
327 327 def get_file_size(self, wire, path, revision=None):
328 328 repo = self._factory.repo(wire)
329 329 fsobj = svn.repos.fs(repo)
330 330 if revision is None:
331 331 revision = svn.fs.youngest_revision(fsobj)
332 332 root = svn.fs.revision_root(fsobj, revision)
333 333 size = svn.fs.file_length(root, path)
334 334 return size
335 335
336 336 def create_repository(self, wire, compatible_version=None):
337 337 log.info('Creating Subversion repository in path "%s"', wire['path'])
338 338 self._factory.repo(wire, create=True,
339 339 compatible_version=compatible_version)
340 340
341 341 def import_remote_repository(self, wire, src_url):
342 342 repo_path = wire['path']
343 343 if not self.is_path_valid_repository(wire, repo_path):
344 344 raise Exception(
345 345 "Path %s is not a valid Subversion repository." % repo_path)
346 346 # TODO: johbo: URL checks ?
347 347 rdump = subprocess.Popen(
348 348 ['svnrdump', 'dump', '--non-interactive', src_url],
349 349 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
350 350 load = subprocess.Popen(
351 351 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
352 352
353 353 # TODO: johbo: This can be a very long operation, might be better
354 354 # to track some kind of status and provide an api to check if the
355 355 # import is done.
356 356 rdump.wait()
357 357 load.wait()
358 358
359 359 if rdump.returncode != 0:
360 360 errors = rdump.stderr.read()
361 361 log.error('svnrdump dump failed: statuscode %s: message: %s',
362 362 rdump.returncode, errors)
363 363 reason = 'UNKNOWN'
364 364 if 'svnrdump: E230001:' in errors:
365 365 reason = 'INVALID_CERTIFICATE'
366 366 raise Exception(
367 367 'Failed to dump the remote repository from %s.' % src_url,
368 368 reason)
369 369 if load.returncode != 0:
370 370 raise Exception(
371 371 'Failed to load the dump of remote repository from %s.' %
372 372 (src_url, ))
373 373
374 374 def commit(self, wire, message, author, timestamp, updated, removed):
375 375 assert isinstance(message, str)
376 376 assert isinstance(author, str)
377 377
378 378 repo = self._factory.repo(wire)
379 379 fsobj = svn.repos.fs(repo)
380 380
381 381 rev = svn.fs.youngest_rev(fsobj)
382 382 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
383 383 txn_root = svn.fs.txn_root(txn)
384 384
385 385 for node in updated:
386 386 TxnNodeProcessor(node, txn_root).update()
387 387 for node in removed:
388 388 TxnNodeProcessor(node, txn_root).remove()
389 389
390 390 commit_id = svn.repos.fs_commit_txn(repo, txn)
391 391
392 392 if timestamp:
393 393 apr_time = apr_time_t(timestamp)
394 394 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
395 395 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
396 396
397 397 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
398 398 return commit_id
399 399
400 400 def diff(self, wire, rev1, rev2, path1=None, path2=None,
401 401 ignore_whitespace=False, context=3):
402 402
403 403 wire.update(cache=False)
404 404 repo = self._factory.repo(wire)
405 405 diff_creator = SvnDiffer(
406 406 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
407 407 try:
408 408 return diff_creator.generate_diff()
409 409 except svn.core.SubversionException as e:
410 410 log.exception(
411 411 "Error during diff operation operation. "
412 412 "Path might not exist %s, %s" % (path1, path2))
413 413 return ""
414 414
415 415 @reraise_safe_exceptions
416 416 def is_large_file(self, wire, path):
417 417 return False
418 418
419 419
420 420 class SvnDiffer(object):
421 421 """
422 422 Utility to create diffs based on difflib and the Subversion api
423 423 """
424 424
425 425 binary_content = False
426 426
427 427 def __init__(
428 428 self, repo, src_rev, src_path, tgt_rev, tgt_path,
429 429 ignore_whitespace, context):
430 430 self.repo = repo
431 431 self.ignore_whitespace = ignore_whitespace
432 432 self.context = context
433 433
434 434 fsobj = svn.repos.fs(repo)
435 435
436 436 self.tgt_rev = tgt_rev
437 437 self.tgt_path = tgt_path or ''
438 438 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
439 439 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
440 440
441 441 self.src_rev = src_rev
442 442 self.src_path = src_path or self.tgt_path
443 443 self.src_root = svn.fs.revision_root(fsobj, src_rev)
444 444 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
445 445
446 446 self._validate()
447 447
448 448 def _validate(self):
449 449 if (self.tgt_kind != svn.core.svn_node_none and
450 450 self.src_kind != svn.core.svn_node_none and
451 451 self.src_kind != self.tgt_kind):
452 452 # TODO: johbo: proper error handling
453 453 raise Exception(
454 454 "Source and target are not compatible for diff generation. "
455 455 "Source type: %s, target type: %s" %
456 456 (self.src_kind, self.tgt_kind))
457 457
458 458 def generate_diff(self):
459 459 buf = StringIO.StringIO()
460 460 if self.tgt_kind == svn.core.svn_node_dir:
461 461 self._generate_dir_diff(buf)
462 462 else:
463 463 self._generate_file_diff(buf)
464 464 return buf.getvalue()
465 465
466 466 def _generate_dir_diff(self, buf):
467 467 editor = DiffChangeEditor()
468 468 editor_ptr, editor_baton = svn.delta.make_editor(editor)
469 469 svn.repos.dir_delta2(
470 470 self.src_root,
471 471 self.src_path,
472 472 '', # src_entry
473 473 self.tgt_root,
474 474 self.tgt_path,
475 475 editor_ptr, editor_baton,
476 476 authorization_callback_allow_all,
477 477 False, # text_deltas
478 478 svn.core.svn_depth_infinity, # depth
479 479 False, # entry_props
480 480 False, # ignore_ancestry
481 481 )
482 482
483 483 for path, __, change in sorted(editor.changes):
484 484 self._generate_node_diff(
485 485 buf, change, path, self.tgt_path, path, self.src_path)
486 486
487 487 def _generate_file_diff(self, buf):
488 488 change = None
489 489 if self.src_kind == svn.core.svn_node_none:
490 490 change = "add"
491 491 elif self.tgt_kind == svn.core.svn_node_none:
492 492 change = "delete"
493 493 tgt_base, tgt_path = vcspath.split(self.tgt_path)
494 494 src_base, src_path = vcspath.split(self.src_path)
495 495 self._generate_node_diff(
496 496 buf, change, tgt_path, tgt_base, src_path, src_base)
497 497
498 498 def _generate_node_diff(
499 499 self, buf, change, tgt_path, tgt_base, src_path, src_base):
500 500
501 501 if self.src_rev == self.tgt_rev and tgt_base == src_base:
502 502 # makes consistent behaviour with git/hg to return empty diff if
503 503 # we compare same revisions
504 504 return
505 505
506 506 tgt_full_path = vcspath.join(tgt_base, tgt_path)
507 507 src_full_path = vcspath.join(src_base, src_path)
508 508
509 509 self.binary_content = False
510 510 mime_type = self._get_mime_type(tgt_full_path)
511 511
512 512 if mime_type and not mime_type.startswith('text'):
513 513 self.binary_content = True
514 514 buf.write("=" * 67 + '\n')
515 515 buf.write("Cannot display: file marked as a binary type.\n")
516 516 buf.write("svn:mime-type = %s\n" % mime_type)
517 517 buf.write("Index: %s\n" % (tgt_path, ))
518 518 buf.write("=" * 67 + '\n')
519 519 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
520 520 'tgt_path': tgt_path})
521 521
522 522 if change == 'add':
523 523 # TODO: johbo: SVN is missing a zero here compared to git
524 524 buf.write("new file mode 10644\n")
525 525
526 526 #TODO(marcink): intro to binary detection of svn patches
527 527 # if self.binary_content:
528 528 # buf.write('GIT binary patch\n')
529 529
530 530 buf.write("--- /dev/null\t(revision 0)\n")
531 531 src_lines = []
532 532 else:
533 533 if change == 'delete':
534 534 buf.write("deleted file mode 10644\n")
535 535
536 536 #TODO(marcink): intro to binary detection of svn patches
537 537 # if self.binary_content:
538 538 # buf.write('GIT binary patch\n')
539 539
540 540 buf.write("--- a/%s\t(revision %s)\n" % (
541 541 src_path, self.src_rev))
542 542 src_lines = self._svn_readlines(self.src_root, src_full_path)
543 543
544 544 if change == 'delete':
545 545 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
546 546 tgt_lines = []
547 547 else:
548 548 buf.write("+++ b/%s\t(revision %s)\n" % (
549 549 tgt_path, self.tgt_rev))
550 550 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
551 551
552 552 if not self.binary_content:
553 553 udiff = svn_diff.unified_diff(
554 554 src_lines, tgt_lines, context=self.context,
555 555 ignore_blank_lines=self.ignore_whitespace,
556 556 ignore_case=False,
557 557 ignore_space_changes=self.ignore_whitespace)
558 558 buf.writelines(udiff)
559 559
560 560 def _get_mime_type(self, path):
561 561 try:
562 562 mime_type = svn.fs.node_prop(
563 563 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
564 564 except svn.core.SubversionException:
565 565 mime_type = svn.fs.node_prop(
566 566 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
567 567 return mime_type
568 568
569 569 def _svn_readlines(self, fs_root, node_path):
570 570 if self.binary_content:
571 571 return []
572 572 node_kind = svn.fs.check_path(fs_root, node_path)
573 573 if node_kind not in (
574 574 svn.core.svn_node_file, svn.core.svn_node_symlink):
575 575 return []
576 576 content = svn.core.Stream(
577 577 svn.fs.file_contents(fs_root, node_path)).read()
578 578 return content.splitlines(True)
579 579
580 580
581 581 class DiffChangeEditor(svn.delta.Editor):
582 582 """
583 583 Records changes between two given revisions
584 584 """
585 585
586 586 def __init__(self):
587 587 self.changes = []
588 588
589 589 def delete_entry(self, path, revision, parent_baton, pool=None):
590 590 self.changes.append((path, None, 'delete'))
591 591
592 592 def add_file(
593 593 self, path, parent_baton, copyfrom_path, copyfrom_revision,
594 594 file_pool=None):
595 595 self.changes.append((path, 'file', 'add'))
596 596
597 597 def open_file(self, path, parent_baton, base_revision, file_pool=None):
598 598 self.changes.append((path, 'file', 'change'))
599 599
600 600
601 601 def authorization_callback_allow_all(root, path, pool):
602 602 return True
603 603
604 604
605 605 class TxnNodeProcessor(object):
606 606 """
607 607 Utility to process the change of one node within a transaction root.
608 608
609 609 It encapsulates the knowledge of how to add, update or remove
610 610 a node for a given transaction root. The purpose is to support the method
611 611 `SvnRemote.commit`.
612 612 """
613 613
614 614 def __init__(self, node, txn_root):
615 615 assert isinstance(node['path'], str)
616 616
617 617 self.node = node
618 618 self.txn_root = txn_root
619 619
620 620 def update(self):
621 621 self._ensure_parent_dirs()
622 622 self._add_file_if_node_does_not_exist()
623 623 self._update_file_content()
624 624 self._update_file_properties()
625 625
626 626 def remove(self):
627 627 svn.fs.delete(self.txn_root, self.node['path'])
628 628 # TODO: Clean up directory if empty
629 629
630 630 def _ensure_parent_dirs(self):
631 631 curdir = vcspath.dirname(self.node['path'])
632 632 dirs_to_create = []
633 633 while not self._svn_path_exists(curdir):
634 634 dirs_to_create.append(curdir)
635 635 curdir = vcspath.dirname(curdir)
636 636
637 637 for curdir in reversed(dirs_to_create):
638 638 log.debug('Creating missing directory "%s"', curdir)
639 639 svn.fs.make_dir(self.txn_root, curdir)
640 640
641 641 def _svn_path_exists(self, path):
642 642 path_status = svn.fs.check_path(self.txn_root, path)
643 643 return path_status != svn.core.svn_node_none
644 644
645 645 def _add_file_if_node_does_not_exist(self):
646 646 kind = svn.fs.check_path(self.txn_root, self.node['path'])
647 647 if kind == svn.core.svn_node_none:
648 648 svn.fs.make_file(self.txn_root, self.node['path'])
649 649
650 650 def _update_file_content(self):
651 651 assert isinstance(self.node['content'], str)
652 652 handler, baton = svn.fs.apply_textdelta(
653 653 self.txn_root, self.node['path'], None, None)
654 654 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
655 655
656 656 def _update_file_properties(self):
657 657 properties = self.node.get('properties', {})
658 658 for key, value in properties.iteritems():
659 659 svn.fs.change_node_prop(
660 660 self.txn_root, self.node['path'], key, value)
661 661
662 662
663 663 def apr_time_t(timestamp):
664 664 """
665 665 Convert a Python timestamp into APR timestamp type apr_time_t
666 666 """
667 667 return timestamp * 1E6
668 668
669 669
670 670 def svn_opt_revision_value_t(num):
671 671 """
672 672 Put `num` into a `svn_opt_revision_value_t` structure.
673 673 """
674 674 value = svn.core.svn_opt_revision_value_t()
675 675 value.number = num
676 676 revision = svn.core.svn_opt_revision_t()
677 677 revision.kind = svn.core.svn_opt_revision_number
678 678 revision.value = value
679 679 return revision
@@ -1,57 +1,57 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import socket
19 19
20 20 import pytest
21 21
22 22
23 23 def pytest_addoption(parser):
24 24 parser.addoption(
25 25 '--repeat', type=int, default=100,
26 26 help="Number of repetitions in performance tests.")
27 27
28 28
29 29 @pytest.fixture(scope='session')
30 30 def repeat(request):
31 31 """
32 32 The number of repetitions is based on this fixture.
33 33
34 34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 35 tests are not too slow in our default test suite.
36 36 """
37 37 return request.config.getoption('--repeat')
38 38
39 39
40 40 @pytest.fixture(scope='session')
41 41 def vcsserver_port(request):
42 42 port = get_available_port()
43 43 print 'Using vcsserver port %s' % (port, )
44 44 return port
45 45
46 46
47 47 def get_available_port():
48 48 family = socket.AF_INET
49 49 socktype = socket.SOCK_STREAM
50 50 host = '127.0.0.1'
51 51
52 52 mysocket = socket.socket(family, socktype)
53 53 mysocket.bind((host, 0))
54 54 port = mysocket.getsockname()[1]
55 55 mysocket.close()
56 56 del mysocket
57 57 return port
@@ -1,71 +1,71 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import tempfile
21 21
22 22 import configobj
23 23
24 24
25 25 class ContextINI(object):
26 26 """
27 27 Allows to create a new test.ini file as a copy of existing one with edited
28 28 data. If existing file is not present, it creates a new one. Example usage::
29 29
30 30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 31 print 'vcsserver --config=%s' % new_test_ini
32 32 """
33 33
34 34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 35 destroy=True):
36 36 self.ini_file_path = ini_file_path
37 37 self.ini_params = ini_params
38 38 self.new_path = None
39 39 self.new_path_prefix = new_file_prefix or 'test'
40 40 self.destroy = destroy
41 41
42 42 def __enter__(self):
43 43 _, pref = tempfile.mkstemp()
44 44 loc = tempfile.gettempdir()
45 45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 46 pref, self.new_path_prefix, self.ini_file_path))
47 47
48 48 # copy ini file and modify according to the params, if we re-use a file
49 49 if os.path.isfile(self.ini_file_path):
50 50 shutil.copy(self.ini_file_path, self.new_path)
51 51 else:
52 52 # create new dump file for configObj to write to.
53 53 with open(self.new_path, 'wb'):
54 54 pass
55 55
56 56 config = configobj.ConfigObj(
57 57 self.new_path, file_error=True, write_empty_values=True)
58 58
59 59 for data in self.ini_params:
60 60 section, ini_params = data.items()[0]
61 61 key, val = ini_params.items()[0]
62 62 if section not in config:
63 63 config[section] = {}
64 64 config[section][key] = val
65 65
66 66 config.write()
67 67 return self.new_path
68 68
69 69 def __exit__(self, exc_type, exc_val, exc_tb):
70 70 if self.destroy:
71 71 os.remove(self.new_path)
@@ -1,162 +1,162 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver import git
25 25
26 26
27 27 SAMPLE_REFS = {
28 28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 33 }
34 34
35 35
36 36 @pytest.fixture
37 37 def git_remote():
38 38 """
39 39 A GitRemote instance with a mock factory.
40 40 """
41 41 factory = Mock()
42 42 remote = git.GitRemote(factory)
43 43 return remote
44 44
45 45
46 46 def test_discover_git_version(git_remote):
47 47 version = git_remote.discover_git_version()
48 48 assert version
49 49
50 50
51 51 class TestGitFetch(object):
52 52 def setup(self):
53 53 self.mock_repo = Mock()
54 54 factory = Mock()
55 55 factory.repo = Mock(return_value=self.mock_repo)
56 56 self.remote_git = git.GitRemote(factory)
57 57
58 58 def test_fetches_all_when_no_commit_ids_specified(self):
59 59 def side_effect(determine_wants, *args, **kwargs):
60 60 determine_wants(SAMPLE_REFS)
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
68 68 def test_fetches_specified_commits(self):
69 69 selected_refs = {
70 70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 72 }
73 73
74 74 def side_effect(determine_wants, *args, **kwargs):
75 75 result = determine_wants(SAMPLE_REFS)
76 76 assert sorted(result) == sorted(selected_refs.values())
77 77 return result
78 78
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 81 self.remote_git.fetch(
82 82 wire=None, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
86 86
87 87 def test_get_remote_refs(self):
88 88 factory = Mock()
89 89 remote_git = git.GitRemote(factory)
90 90 url = 'http://example.com/test/test.git'
91 91 sample_refs = {
92 92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 94 }
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
102 102 def test_remove_ref(self):
103 103 ref_to_remove = 'refs/tags/v0.1.9'
104 104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 105 self.remote_git.remove_ref(None, ref_to_remove)
106 106 assert ref_to_remove not in self.mock_repo.refs
107 107
108 108
109 109 class TestReraiseSafeExceptions(object):
110 110 def test_method_decorated_with_reraise_safe_exceptions(self):
111 111 factory = Mock()
112 112 git_remote = git.GitRemote(factory)
113 113
114 114 def fake_function():
115 115 return None
116 116
117 117 decorator = git.reraise_safe_exceptions(fake_function)
118 118
119 119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 120 for method_name, method in methods:
121 121 if not method_name.startswith('_'):
122 122 assert method.im_func.__code__ == decorator.__code__
123 123
124 124 @pytest.mark.parametrize('side_effect, expected_type', [
125 125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 129 (dulwich.errors.HangupException(), 'error'),
130 130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 131 ])
132 132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 133 @git.reraise_safe_exceptions
134 134 def fake_method():
135 135 raise side_effect
136 136
137 137 with pytest.raises(Exception) as exc_info:
138 138 fake_method()
139 139 assert type(exc_info.value) == Exception
140 140 assert exc_info.value._vcs_kind == expected_type
141 141
142 142
143 143 class TestDulwichRepoWrapper(object):
144 144 def test_calls_close_on_delete(self):
145 145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 146 with isdir_patcher:
147 147 repo = git.Repo('/tmp/abcde')
148 148 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 149 del repo
150 150 close_mock.assert_called_once_with()
151 151
152 152
153 153 class TestGitFactory(object):
154 154 def test_create_repo_returns_dulwich_wrapper(self):
155 155 factory = git.GitFactory(repo_cache=Mock())
156 156 wire = {
157 157 'path': '/tmp/abcde'
158 158 }
159 159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 160 with isdir_patcher:
161 161 result = factory._create_repo(wire, True)
162 162 assert isinstance(result, git.Repo)
@@ -1,127 +1,127 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19 import sys
20 20 import traceback
21 21
22 22 import pytest
23 23 from mercurial.error import LookupError
24 24 from mock import Mock, MagicMock, patch
25 25
26 26 from vcsserver import exceptions, hg, hgcompat
27 27
28 28
29 29 class TestHGLookup(object):
30 30 def setup(self):
31 31 self.mock_repo = MagicMock()
32 32 self.mock_repo.__getitem__.side_effect = LookupError(
33 33 'revision_or_commit_id', 'index', 'message')
34 34 factory = Mock()
35 35 factory.repo = Mock(return_value=self.mock_repo)
36 36 self.remote_hg = hg.HgRemote(factory)
37 37
38 38 def test_fail_lookup_hg(self):
39 39 with pytest.raises(Exception) as exc_info:
40 40 self.remote_hg.lookup(
41 41 wire=None, revision='revision_or_commit_id', both=True)
42 42
43 43 assert exc_info.value._vcs_kind == 'lookup'
44 44 assert 'revision_or_commit_id' in exc_info.value.args
45 45
46 46
47 47 class TestDiff(object):
48 48 def test_raising_safe_exception_when_lookup_failed(self):
49 49 repo = Mock()
50 50 factory = Mock()
51 51 factory.repo = Mock(return_value=repo)
52 52 hg_remote = hg.HgRemote(factory)
53 53 with patch('mercurial.patch.diff') as diff_mock:
54 54 diff_mock.side_effect = LookupError(
55 55 'deadbeef', 'index', 'message')
56 56 with pytest.raises(Exception) as exc_info:
57 57 hg_remote.diff(
58 58 wire=None, rev1='deadbeef', rev2='deadbee1',
59 59 file_filter=None, opt_git=True, opt_ignorews=True,
60 60 context=3)
61 61 assert type(exc_info.value) == Exception
62 62 assert exc_info.value._vcs_kind == 'lookup'
63 63
64 64
65 65 class TestReraiseSafeExceptions(object):
66 66 def test_method_decorated_with_reraise_safe_exceptions(self):
67 67 factory = Mock()
68 68 hg_remote = hg.HgRemote(factory)
69 69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
70 70 decorator = hg.reraise_safe_exceptions(None)
71 71 for method_name, method in methods:
72 72 if not method_name.startswith('_'):
73 73 assert method.im_func.__code__ == decorator.__code__
74 74
75 75 @pytest.mark.parametrize('side_effect, expected_type', [
76 76 (hgcompat.Abort(), 'abort'),
77 77 (hgcompat.InterventionRequired(), 'abort'),
78 78 (hgcompat.RepoLookupError(), 'lookup'),
79 79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
80 80 (hgcompat.RepoError(), 'error'),
81 81 (hgcompat.RequirementError(), 'requirement'),
82 82 ])
83 83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
84 84 @hg.reraise_safe_exceptions
85 85 def fake_method():
86 86 raise side_effect
87 87
88 88 with pytest.raises(Exception) as exc_info:
89 89 fake_method()
90 90 assert type(exc_info.value) == Exception
91 91 assert exc_info.value._vcs_kind == expected_type
92 92
93 93 def test_keeps_original_traceback(self):
94 94 @hg.reraise_safe_exceptions
95 95 def fake_method():
96 96 try:
97 97 raise hgcompat.Abort()
98 98 except:
99 99 self.original_traceback = traceback.format_tb(
100 100 sys.exc_info()[2])
101 101 raise
102 102
103 103 try:
104 104 fake_method()
105 105 except Exception:
106 106 new_traceback = traceback.format_tb(sys.exc_info()[2])
107 107
108 108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
109 109 assert new_traceback_tail == self.original_traceback
110 110
111 111 def test_maps_unknow_exceptions_to_unhandled(self):
112 112 @hg.reraise_safe_exceptions
113 113 def stub_method():
114 114 raise ValueError('stub')
115 115
116 116 with pytest.raises(Exception) as exc_info:
117 117 stub_method()
118 118 assert exc_info.value._vcs_kind == 'unhandled'
119 119
120 120 def test_does_not_map_known_exceptions(self):
121 121 @hg.reraise_safe_exceptions
122 122 def stub_method():
123 123 raise exceptions.LookupException('stub')
124 124
125 125 with pytest.raises(Exception) as exc_info:
126 126 stub_method()
127 127 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,125 +1,130 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import mock
19 19 import pytest
20 20
21 21 from vcsserver import hgcompat, hgpatches
22 22
23 23
24 24 LARGEFILES_CAPABILITY = 'largefiles=serve'
25 25
26 26
27 27 def test_patch_largefiles_capabilities_applies_patch(
28 28 patched_capabilities):
29 29 lfproto = hgcompat.largefiles.proto
30 30 hgpatches.patch_largefiles_capabilities()
31 31 assert lfproto.capabilities.func_name == '_dynamic_capabilities'
32 32
33 33
34 34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
36 36 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 37 hgcompat.largefiles.proto, stub_extensions)
38 38
39 39 caps = dynamic_capabilities(stub_repo, stub_proto)
40 40
41 41 stub_extensions.assert_called_once_with(stub_ui)
42 42 assert LARGEFILES_CAPABILITY not in caps
43 43
44 44
45 45 def test_dynamic_capabilities_uses_updated_capabilitiesorig(
46 46 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
47 47 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
48 48 hgcompat.largefiles.proto, stub_extensions)
49 49
50 50 # This happens when the extension is loaded for the first time, important
51 51 # to ensure that an updated function is correctly picked up.
52 52 hgcompat.largefiles.proto.capabilitiesorig = mock.Mock(
53 53 return_value='REPLACED')
54 54
55 55 caps = dynamic_capabilities(stub_repo, stub_proto)
56 56 assert 'REPLACED' == caps
57 57
58 58
59 59 def test_dynamic_capabilities_ignores_updated_capabilities(
60 60 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
61 61 stub_extensions.return_value = [('largefiles', mock.Mock())]
62 62 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
63 63 hgcompat.largefiles.proto, stub_extensions)
64 64
65 65 # This happens when the extension is loaded for the first time, important
66 66 # to ensure that an updated function is correctly picked up.
67 67 hgcompat.largefiles.proto.capabilities = mock.Mock(
68 68 side_effect=Exception('Must not be called'))
69 69
70 70 dynamic_capabilities(stub_repo, stub_proto)
71 71
72 72
73 73 def test_dynamic_capabilities_uses_largefiles_if_enabled(
74 74 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
75 75 stub_extensions.return_value = [('largefiles', mock.Mock())]
76 76
77 77 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
78 78 hgcompat.largefiles.proto, stub_extensions)
79 79
80 80 caps = dynamic_capabilities(stub_repo, stub_proto)
81 81
82 82 stub_extensions.assert_called_once_with(stub_ui)
83 83 assert LARGEFILES_CAPABILITY in caps
84 84
85 85
86 def test_hgsubversion_import():
87 from hgsubversion import svnrepo
88 assert svnrepo
89
90
86 91 @pytest.fixture
87 92 def patched_capabilities(request):
88 93 """
89 94 Patch in `capabilitiesorig` and restore both capability functions.
90 95 """
91 96 lfproto = hgcompat.largefiles.proto
92 97 orig_capabilities = lfproto.capabilities
93 98 orig_capabilitiesorig = lfproto.capabilitiesorig
94 99
95 100 lfproto.capabilitiesorig = mock.Mock(return_value='ORIG')
96 101
97 102 @request.addfinalizer
98 103 def restore():
99 104 lfproto.capabilities = orig_capabilities
100 105 lfproto.capabilitiesorig = orig_capabilitiesorig
101 106
102 107
103 108 @pytest.fixture
104 109 def stub_repo(stub_ui):
105 110 repo = mock.Mock()
106 111 repo.ui = stub_ui
107 112 return repo
108 113
109 114
110 115 @pytest.fixture
111 116 def stub_proto(stub_ui):
112 117 proto = mock.Mock()
113 118 proto.ui = stub_ui
114 119 return proto
115 120
116 121
117 122 @pytest.fixture
118 123 def stub_ui():
119 124 return hgcompat.ui.ui()
120 125
121 126
122 127 @pytest.fixture
123 128 def stub_extensions():
124 129 extensions = mock.Mock(return_value=tuple())
125 130 return extensions
@@ -1,241 +1,241 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import contextlib
19 19 import io
20 20 import threading
21 21 from BaseHTTPServer import BaseHTTPRequestHandler
22 22 from SocketServer import TCPServer
23 23
24 24 import mercurial.ui
25 25 import mock
26 26 import pytest
27 27 import simplejson as json
28 28
29 29 from vcsserver import hooks
30 30
31 31
32 32 def get_hg_ui(extras=None):
33 33 """Create a Config object with a valid RC_SCM_DATA entry."""
34 34 extras = extras or {}
35 35 required_extras = {
36 36 'username': '',
37 37 'repository': '',
38 38 'locked_by': '',
39 39 'scm': '',
40 40 'make_lock': '',
41 41 'action': '',
42 42 'ip': '',
43 43 'hooks_uri': 'fake_hooks_uri',
44 44 }
45 45 required_extras.update(extras)
46 46 hg_ui = mercurial.ui.ui()
47 47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
48 48
49 49 return hg_ui
50 50
51 51
52 52 def test_git_pre_receive_is_disabled():
53 53 extras = {'hooks': ['pull']}
54 54 response = hooks.git_pre_receive(None, None,
55 55 {'RC_SCM_DATA': json.dumps(extras)})
56 56
57 57 assert response == 0
58 58
59 59
60 60 def test_git_post_receive_is_disabled():
61 61 extras = {'hooks': ['pull']}
62 62 response = hooks.git_post_receive(None, '',
63 63 {'RC_SCM_DATA': json.dumps(extras)})
64 64
65 65 assert response == 0
66 66
67 67
68 68 def test_git_post_receive_calls_repo_size():
69 69 extras = {'hooks': ['push', 'repo_size']}
70 70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 71 hooks.git_post_receive(
72 72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
73 73 extras.update({'commit_ids': [],
74 74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
75 75 expected_calls = [
76 76 mock.call('repo_size', extras, mock.ANY),
77 77 mock.call('post_push', extras, mock.ANY),
78 78 ]
79 79 assert call_hook_mock.call_args_list == expected_calls
80 80
81 81
82 82 def test_git_post_receive_does_not_call_disabled_repo_size():
83 83 extras = {'hooks': ['push']}
84 84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 85 hooks.git_post_receive(
86 86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
87 87 extras.update({'commit_ids': [],
88 88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
89 89 expected_calls = [
90 90 mock.call('post_push', extras, mock.ANY)
91 91 ]
92 92 assert call_hook_mock.call_args_list == expected_calls
93 93
94 94
95 95 def test_repo_size_exception_does_not_affect_git_post_receive():
96 96 extras = {'hooks': ['push', 'repo_size']}
97 97 status = 0
98 98
99 99 def side_effect(name, *args, **kwargs):
100 100 if name == 'repo_size':
101 101 raise Exception('Fake exception')
102 102 else:
103 103 return status
104 104
105 105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
106 106 call_hook_mock.side_effect = side_effect
107 107 result = hooks.git_post_receive(
108 108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
109 109 assert result == status
110 110
111 111
112 112 def test_git_pre_pull_is_disabled():
113 113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
114 114
115 115
116 116 def test_git_post_pull_is_disabled():
117 117 assert (
118 118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
119 119
120 120
121 121 class TestGetHooksClient(object):
122 122
123 123 def test_returns_http_client_when_protocol_matches(self):
124 124 hooks_uri = 'localhost:8000'
125 125 result = hooks._get_hooks_client({
126 126 'hooks_uri': hooks_uri,
127 127 'hooks_protocol': 'http'
128 128 })
129 129 assert isinstance(result, hooks.HooksHttpClient)
130 130 assert result.hooks_uri == hooks_uri
131 131
132 132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
133 133 fake_module = mock.Mock()
134 134 import_patcher = mock.patch.object(
135 135 hooks.importlib, 'import_module', return_value=fake_module)
136 136 fake_module_name = 'fake.module'
137 137 with import_patcher as import_mock:
138 138 result = hooks._get_hooks_client(
139 139 {'hooks_module': fake_module_name})
140 140
141 141 import_mock.assert_called_once_with(fake_module_name)
142 142 assert isinstance(result, hooks.HooksDummyClient)
143 143 assert result._hooks_module == fake_module
144 144
145 145
146 146 class TestHooksHttpClient(object):
147 147 def test_init_sets_hooks_uri(self):
148 148 uri = 'localhost:3000'
149 149 client = hooks.HooksHttpClient(uri)
150 150 assert client.hooks_uri == uri
151 151
152 152 def test_serialize_returns_json_string(self):
153 153 client = hooks.HooksHttpClient('localhost:3000')
154 154 hook_name = 'test'
155 155 extras = {
156 156 'first': 1,
157 157 'second': 'two'
158 158 }
159 159 result = client._serialize(hook_name, extras)
160 160 expected_result = json.dumps({
161 161 'method': hook_name,
162 162 'extras': extras
163 163 })
164 164 assert result == expected_result
165 165
166 166 def test_call_queries_http_server(self, http_mirror):
167 167 client = hooks.HooksHttpClient(http_mirror.uri)
168 168 hook_name = 'test'
169 169 extras = {
170 170 'first': 1,
171 171 'second': 'two'
172 172 }
173 173 result = client(hook_name, extras)
174 174 expected_result = {
175 175 'method': hook_name,
176 176 'extras': extras
177 177 }
178 178 assert result == expected_result
179 179
180 180
181 181 class TestHooksDummyClient(object):
182 182 def test_init_imports_hooks_module(self):
183 183 hooks_module_name = 'rhodecode.fake.module'
184 184 hooks_module = mock.MagicMock()
185 185
186 186 import_patcher = mock.patch.object(
187 187 hooks.importlib, 'import_module', return_value=hooks_module)
188 188 with import_patcher as import_mock:
189 189 client = hooks.HooksDummyClient(hooks_module_name)
190 190 import_mock.assert_called_once_with(hooks_module_name)
191 191 assert client._hooks_module == hooks_module
192 192
193 193 def test_call_returns_hook_result(self):
194 194 hooks_module_name = 'rhodecode.fake.module'
195 195 hooks_module = mock.MagicMock()
196 196 import_patcher = mock.patch.object(
197 197 hooks.importlib, 'import_module', return_value=hooks_module)
198 198 with import_patcher:
199 199 client = hooks.HooksDummyClient(hooks_module_name)
200 200
201 201 result = client('post_push', {})
202 202 hooks_module.Hooks.assert_called_once_with()
203 203 assert result == hooks_module.Hooks().__enter__().post_push()
204 204
205 205
206 206 @pytest.fixture
207 207 def http_mirror(request):
208 208 server = MirrorHttpServer()
209 209 request.addfinalizer(server.stop)
210 210 return server
211 211
212 212
213 213 class MirrorHttpHandler(BaseHTTPRequestHandler):
214 214 def do_POST(self):
215 215 length = int(self.headers['Content-Length'])
216 216 body = self.rfile.read(length).decode('utf-8')
217 217 self.send_response(200)
218 218 self.end_headers()
219 219 self.wfile.write(body)
220 220
221 221
222 222 class MirrorHttpServer(object):
223 223 ip_address = '127.0.0.1'
224 224 port = 0
225 225
226 226 def __init__(self):
227 227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
228 228 _, self.port = self._daemon.server_address
229 229 self._thread = threading.Thread(target=self._daemon.serve_forever)
230 230 self._thread.daemon = True
231 231 self._thread.start()
232 232
233 233 def stop(self):
234 234 self._daemon.shutdown()
235 235 self._thread.join()
236 236 self._daemon = None
237 237 self._thread = None
238 238
239 239 @property
240 240 def uri(self):
241 241 return '{}:{}'.format(self.ip_address, self.port)
@@ -1,57 +1,57 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import mock
19 19 import pytest
20 20
21 21 from vcsserver import http_main
22 22 from vcsserver.base import obfuscate_qs
23 23
24 24
25 25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
26 26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
27 27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
28 28 http_main.main([])
29 29 patch_largefiles_capabilities.assert_called_once_with()
30 30
31 31
32 32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
33 33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
34 34 @mock.patch(
35 35 'vcsserver.hgpatches.patch_largefiles_capabilities',
36 36 mock.Mock(side_effect=Exception("Must not be called")))
37 37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
38 38 http_main.main([])
39 39
40 40
41 41 @pytest.mark.parametrize('given, expected', [
42 42 ('bad', 'bad'),
43 43 ('query&foo=bar', 'query&foo=bar'),
44 44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 45 ('a;b;c;query&foo=bar&auth_token=secret',
46 46 'a&b&c&query&foo=bar&auth_token=*****'),
47 47 ('', ''),
48 48 (None, None),
49 49 ('foo=bar', 'foo=bar'),
50 50 ('auth_token=secret', 'auth_token=*****'),
51 51 ('auth_token=secret&api_key=secret2',
52 52 'auth_token=*****&api_key=*****'),
53 53 ('auth_token=secret&api_key=secret2&param=value',
54 54 'auth_token=*****&api_key=*****&param=value'),
55 55 ])
56 56 def test_obfuscate_qs(given, expected):
57 57 assert expected == obfuscate_qs(given)
@@ -1,249 +1,249 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19
20 20 import dulwich.protocol
21 21 import mock
22 22 import pytest
23 23 import webob
24 24 import webtest
25 25
26 26 from vcsserver import hooks, pygrack
27 27
28 28 # pylint: disable=redefined-outer-name,protected-access
29 29
30 30
31 31 @pytest.fixture()
32 32 def pygrack_instance(tmpdir):
33 33 """
34 34 Creates a pygrack app instance.
35 35
36 36 Right now, it does not much helpful regarding the passed directory.
37 37 It just contains the required folders to pass the signature test.
38 38 """
39 39 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
40 40 tmpdir.mkdir(dir_name)
41 41
42 42 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
43 43
44 44
45 45 @pytest.fixture()
46 46 def pygrack_app(pygrack_instance):
47 47 """
48 48 Creates a pygrack app wrapped in webtest.TestApp.
49 49 """
50 50 return webtest.TestApp(pygrack_instance)
51 51
52 52
53 53 def test_invalid_service_info_refs_returns_403(pygrack_app):
54 54 response = pygrack_app.get('/info/refs?service=git-upload-packs',
55 55 expect_errors=True)
56 56
57 57 assert response.status_int == 403
58 58
59 59
60 60 def test_invalid_endpoint_returns_403(pygrack_app):
61 61 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
62 62
63 63 assert response.status_int == 403
64 64
65 65
66 66 @pytest.mark.parametrize('sideband', [
67 67 'side-band-64k',
68 68 'side-band',
69 69 'side-band no-progress',
70 70 ])
71 71 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
72 72 request = ''.join([
73 73 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
74 74 'multi_ack %s ofs-delta\n' % sideband,
75 75 '0000',
76 76 '0009done\n',
77 77 ])
78 78 with mock.patch('vcsserver.hooks.git_pre_pull',
79 79 return_value=hooks.HookResponse(1, 'foo')):
80 80 response = pygrack_app.post(
81 81 '/git-upload-pack', params=request,
82 82 content_type='application/x-git-upload-pack')
83 83
84 84 data = io.BytesIO(response.body)
85 85 proto = dulwich.protocol.Protocol(data.read, None)
86 86 packets = list(proto.read_pkt_seq())
87 87
88 88 expected_packets = [
89 89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
90 90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
91 91 ]
92 92 assert packets == expected_packets
93 93
94 94
95 95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
96 96 request = ''.join([
97 97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
98 98 'multi_ack ofs-delta\n'
99 99 '0000',
100 100 '0009done\n',
101 101 ])
102 102 with mock.patch('vcsserver.hooks.git_pre_pull',
103 103 return_value=hooks.HookResponse(1, 'foo')):
104 104 response = pygrack_app.post(
105 105 '/git-upload-pack', params=request,
106 106 content_type='application/x-git-upload-pack')
107 107
108 108 assert response.body == pygrack.GitRepository.EMPTY_PACK
109 109
110 110
111 111 def test_pull_has_hook_messages(pygrack_app):
112 112 request = ''.join([
113 113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
114 114 'multi_ack side-band-64k ofs-delta\n'
115 115 '0000',
116 116 '0009done\n',
117 117 ])
118 118 with mock.patch('vcsserver.hooks.git_pre_pull',
119 119 return_value=hooks.HookResponse(0, 'foo')):
120 120 with mock.patch('vcsserver.hooks.git_post_pull',
121 121 return_value=hooks.HookResponse(1, 'bar')):
122 122 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
123 123 return_value=['0008NAK\n0009subp\n0000']):
124 124 response = pygrack_app.post(
125 125 '/git-upload-pack', params=request,
126 126 content_type='application/x-git-upload-pack')
127 127
128 128 data = io.BytesIO(response.body)
129 129 proto = dulwich.protocol.Protocol(data.read, None)
130 130 packets = list(proto.read_pkt_seq())
131 131
132 132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
133 133
134 134
135 135 def test_get_want_capabilities(pygrack_instance):
136 136 data = io.BytesIO(
137 137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
138 138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
139 139
140 140 request = webob.Request({
141 141 'wsgi.input': data,
142 142 'REQUEST_METHOD': 'POST',
143 143 'webob.is_body_seekable': True
144 144 })
145 145
146 146 capabilities = pygrack_instance._get_want_capabilities(request)
147 147
148 148 assert capabilities == frozenset(
149 149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
150 150 assert data.tell() == 0
151 151
152 152
153 153 @pytest.mark.parametrize('data,capabilities,expected', [
154 154 ('foo', [], []),
155 155 ('', ['side-band-64k'], []),
156 156 ('', ['side-band'], []),
157 157 ('foo', ['side-band-64k'], ['0008\x02foo']),
158 158 ('foo', ['side-band'], ['0008\x02foo']),
159 159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
160 160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
161 161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
162 162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
163 163 ], ids=[
164 164 'foo-empty',
165 165 'empty-64k', 'empty',
166 166 'foo-64k', 'foo',
167 167 'f-1000-64k', 'f-1000',
168 168 'f-65520-64k', 'f-65520'])
169 169 def test_get_messages(pygrack_instance, data, capabilities, expected):
170 170 messages = pygrack_instance._get_messages(data, capabilities)
171 171
172 172 assert messages == expected
173 173
174 174
175 175 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
176 176 # Unexpected response
177 177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
178 178 # No sideband
179 179 ('no-sideband', [], 'foo', 'bar'),
180 180 # No messages
181 181 ('no-messages', ['side-band-64k'], '', ''),
182 182 ])
183 183 def test_inject_messages_to_response_nothing_to_do(
184 184 pygrack_instance, response, capabilities, pre_pull_messages,
185 185 post_pull_messages):
186 186 new_response = pygrack_instance._inject_messages_to_response(
187 187 response, capabilities, pre_pull_messages, post_pull_messages)
188 188
189 189 assert new_response == response
190 190
191 191
192 192 @pytest.mark.parametrize('capabilities', [
193 193 ['side-band'],
194 194 ['side-band-64k'],
195 195 ])
196 196 def test_inject_messages_to_response_single_element(pygrack_instance,
197 197 capabilities):
198 198 response = ['0008NAK\n0009subp\n0000']
199 199 new_response = pygrack_instance._inject_messages_to_response(
200 200 response, capabilities, 'foo', 'bar')
201 201
202 202 expected_response = [
203 203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
204 204
205 205 assert new_response == expected_response
206 206
207 207
208 208 @pytest.mark.parametrize('capabilities', [
209 209 ['side-band'],
210 210 ['side-band-64k'],
211 211 ])
212 212 def test_inject_messages_to_response_multi_element(pygrack_instance,
213 213 capabilities):
214 214 response = [
215 215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
216 216 new_response = pygrack_instance._inject_messages_to_response(
217 217 response, capabilities, 'foo', 'bar')
218 218
219 219 expected_response = [
220 220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
221 221 '000asubp4\n', '0008\x02bar', '0000'
222 222 ]
223 223
224 224 assert new_response == expected_response
225 225
226 226
227 227 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
228 228 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
229 229
230 230 assert response == [pygrack.GitRepository.EMPTY_PACK]
231 231
232 232
233 233 @pytest.mark.parametrize('capabilities', [
234 234 ['side-band'],
235 235 ['side-band-64k'],
236 236 ['side-band-64k', 'no-progress'],
237 237 ])
238 238 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
239 239 response = pygrack_instance._build_failed_pre_pull_response(
240 240 capabilities, 'foo')
241 241
242 242 expected_response = [
243 243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
244 244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
245 245 pygrack.GitRepository.EMPTY_PACK),
246 246 '0000',
247 247 ]
248 248
249 249 assert response == expected_response
@@ -1,86 +1,86 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19
20 20 import mercurial.hg
21 21 import mercurial.ui
22 22 import mercurial.error
23 23 import mock
24 24 import pytest
25 25 import webtest
26 26
27 27 from vcsserver import scm_app
28 28
29 29
30 30 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
32 32 app = webtest.TestApp(scm_app.HgWeb(repo))
33 33
34 34 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
35 35
36 36 assert response.status_int == 400
37 37
38 38
39 39 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
41 41 config = (
42 42 ('paths', 'default', ''),
43 43 )
44 44 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
45 45 hgweb_mock.side_effect = mercurial.error.RequirementError()
46 46 with pytest.raises(Exception):
47 47 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
48 48
49 49
50 50 def test_git_returns_not_found(tmpdir):
51 51 app = webtest.TestApp(
52 52 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
53 53
54 54 response = app.get('/repo_name/inforefs?service=git-upload-pack',
55 55 expect_errors=True)
56 56
57 57 assert response.status_int == 404
58 58
59 59
60 60 def test_git(tmpdir):
61 61 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
62 62 tmpdir.mkdir(dir_name)
63 63
64 64 app = webtest.TestApp(
65 65 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
66 66
67 67 # We set service to git-upload-packs to trigger a 403
68 68 response = app.get('/repo_name/inforefs?service=git-upload-packs',
69 69 expect_errors=True)
70 70
71 71 assert response.status_int == 403
72 72
73 73
74 74 def test_git_fallbacks_to_git_folder(tmpdir):
75 75 tmpdir.mkdir('.git')
76 76 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
77 77 tmpdir.mkdir(os.path.join('.git', dir_name))
78 78
79 79 app = webtest.TestApp(
80 80 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
81 81
82 82 # We set service to git-upload-packs to trigger a 403
83 83 response = app.get('/repo_name/inforefs?service=git-upload-packs',
84 84 expect_errors=True)
85 85
86 86 assert response.status_int == 403
@@ -1,39 +1,39 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19
20 20 import mock
21 21 import pytest
22 22
23 23 from vcsserver.server import VcsServer
24 24
25 25
26 26 def test_provides_the_pid(server):
27 27 pid = server.get_pid()
28 28 assert pid == os.getpid()
29 29
30 30
31 31 def test_allows_to_trigger_the_garbage_collector(server):
32 32 with mock.patch('gc.collect') as collect:
33 33 server.run_gc()
34 34 assert collect.called
35 35
36 36
37 37 @pytest.fixture
38 38 def server():
39 39 return VcsServer()
@@ -1,122 +1,122 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import os
20 20 import sys
21 21
22 22 import pytest
23 23
24 24 from vcsserver import subprocessio
25 25
26 26
27 27 @pytest.fixture(scope='module')
28 28 def environ():
29 29 """Delete coverage variables, as they make the tests fail."""
30 30 env = dict(os.environ)
31 31 for key in env.keys():
32 32 if key.startswith('COV_CORE_'):
33 33 del env[key]
34 34
35 35 return env
36 36
37 37
38 38 def _get_python_args(script):
39 39 return [sys.executable, '-c',
40 40 'import sys; import time; import shutil; ' + script]
41 41
42 42
43 43 def test_raise_exception_on_non_zero_return_code(environ):
44 44 args = _get_python_args('sys.exit(1)')
45 45 with pytest.raises(EnvironmentError):
46 46 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
47 47
48 48
49 49 def test_does_not_fail_on_non_zero_return_code(environ):
50 50 args = _get_python_args('sys.exit(1)')
51 51 output = ''.join(subprocessio.SubprocessIOChunker(
52 52 args, shell=False, fail_on_return_code=False, env=environ))
53 53
54 54 assert output == ''
55 55
56 56
57 57 def test_raise_exception_on_stderr(environ):
58 58 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
59 59 with pytest.raises(EnvironmentError) as excinfo:
60 60 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
61 61
62 62 assert 'exited due to an error:\nX' in str(excinfo.value)
63 63
64 64
65 65 def test_does_not_fail_on_stderr(environ):
66 66 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
67 67 output = ''.join(subprocessio.SubprocessIOChunker(
68 68 args, shell=False, fail_on_stderr=False, env=environ))
69 69
70 70 assert output == ''
71 71
72 72
73 73 @pytest.mark.parametrize('size', [1, 10**5])
74 74 def test_output_with_no_input(size, environ):
75 75 print type(environ)
76 76 data = 'X'
77 77 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
78 78 output = ''.join(subprocessio.SubprocessIOChunker(
79 79 args, shell=False, env=environ))
80 80
81 81 assert output == data * size
82 82
83 83
84 84 @pytest.mark.parametrize('size', [1, 10**5])
85 85 def test_output_with_no_input_does_not_fail(size, environ):
86 86 data = 'X'
87 87 args = _get_python_args(
88 88 'sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
89 89 output = ''.join(subprocessio.SubprocessIOChunker(
90 90 args, shell=False, fail_on_return_code=False, env=environ))
91 91
92 92 print len(data * size), len(output)
93 93 assert output == data * size
94 94
95 95
96 96 @pytest.mark.parametrize('size', [1, 10**5])
97 97 def test_output_with_input(size, environ):
98 98 data = 'X' * size
99 99 inputstream = io.BytesIO(data)
100 100 # This acts like the cat command.
101 101 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
102 102 output = ''.join(subprocessio.SubprocessIOChunker(
103 103 args, shell=False, inputstream=inputstream, env=environ))
104 104
105 105 print len(data), len(output)
106 106 assert output == data
107 107
108 108
109 109 @pytest.mark.parametrize('size', [1, 10**5])
110 110 def test_output_with_input_skipping_iterator(size, environ):
111 111 data = 'X' * size
112 112 inputstream = io.BytesIO(data)
113 113 # This acts like the cat command.
114 114 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
115 115
116 116 # Note: assigning the chunker makes sure that it is not deleted too early
117 117 chunker = subprocessio.SubprocessIOChunker(
118 118 args, shell=False, inputstream=inputstream, env=environ)
119 119 output = ''.join(chunker.output)
120 120
121 121 print len(data), len(output)
122 122 assert output == data
@@ -1,67 +1,67 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import mock
20 20 import pytest
21 21 import sys
22 22
23 23
24 24 class MockPopen(object):
25 25 def __init__(self, stderr):
26 26 self.stdout = io.BytesIO('')
27 27 self.stderr = io.BytesIO(stderr)
28 28 self.returncode = 1
29 29
30 30 def wait(self):
31 31 pass
32 32
33 33
34 34 INVALID_CERTIFICATE_STDERR = '\n'.join([
35 35 'svnrdump: E230001: Unable to connect to a repository at URL url',
36 36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
37 37 ])
38 38
39 39
40 40 @pytest.mark.parametrize('stderr,expected_reason', [
41 41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
42 42 ('svnrdump: E123456', 'UNKNOWN'),
43 43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
44 44 @pytest.mark.xfail(sys.platform == "cygwin",
45 45 reason="SVN not packaged for Cygwin")
46 46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 47 from vcsserver import svn
48 48
49 49 remote = svn.SvnRemote(None)
50 50 remote.is_path_valid_repository = lambda wire, path: True
51 51
52 52 with mock.patch('subprocess.Popen',
53 53 return_value=MockPopen(stderr)):
54 54 with pytest.raises(Exception) as excinfo:
55 55 remote.import_remote_repository({'path': 'path'}, 'url')
56 56
57 57 expected_error_args = (
58 58 'Failed to dump the remote repository from url.',
59 59 expected_reason)
60 60
61 61 assert excinfo.value.args == expected_error_args
62 62
63 63
64 64 def test_svn_libraries_can_be_imported():
65 65 import svn
66 66 import svn.client
67 67 assert svn.client is not None
@@ -1,96 +1,96 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import wsgiref.simple_server
19 19 import wsgiref.validate
20 20
21 21 from vcsserver import wsgi_app_caller
22 22
23 23
24 24 # pylint: disable=protected-access,too-many-public-methods
25 25
26 26
27 27 @wsgiref.validate.validator
28 28 def demo_app(environ, start_response):
29 29 """WSGI app used for testing."""
30 30 data = [
31 31 'Hello World!\n',
32 32 'input_data=%s\n' % environ['wsgi.input'].read(),
33 33 ]
34 34 for key, value in sorted(environ.items()):
35 35 data.append('%s=%s\n' % (key, value))
36 36
37 37 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 38 write('Old school write method\n')
39 39 write('***********************\n')
40 40 return data
41 41
42 42
43 43 BASE_ENVIRON = {
44 44 'REQUEST_METHOD': 'GET',
45 45 'SERVER_NAME': 'localhost',
46 46 'SERVER_PORT': '80',
47 47 'SCRIPT_NAME': '',
48 48 'PATH_INFO': '/',
49 49 'QUERY_STRING': '',
50 50 'foo.var': 'bla',
51 51 }
52 52
53 53
54 54 def test_complete_environ():
55 55 environ = dict(BASE_ENVIRON)
56 56 data = "data"
57 57 wsgi_app_caller._complete_environ(environ, data)
58 58 wsgiref.validate.check_environ(environ)
59 59
60 60 assert data == environ['wsgi.input'].read()
61 61
62 62
63 63 def test_start_response():
64 64 start_response = wsgi_app_caller._StartResponse()
65 65 status = '200 OK'
66 66 headers = [('Content-Type', 'text/plain')]
67 67 start_response(status, headers)
68 68
69 69 assert status == start_response.status
70 70 assert headers == start_response.headers
71 71
72 72
73 73 def test_start_response_with_error():
74 74 start_response = wsgi_app_caller._StartResponse()
75 75 status = '500 Internal Server Error'
76 76 headers = [('Content-Type', 'text/plain')]
77 77 start_response(status, headers, (None, None, None))
78 78
79 79 assert status == start_response.status
80 80 assert headers == start_response.headers
81 81
82 82
83 83 def test_wsgi_app_caller():
84 84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
85 85 environ = dict(BASE_ENVIRON)
86 86 input_data = 'some text'
87 87 responses, status, headers = caller.handle(environ, input_data)
88 88 response = ''.join(responses)
89 89
90 90 assert status == '200 OK'
91 91 assert headers == [('Content-Type', 'text/plain')]
92 92 assert response.startswith(
93 93 'Old school write method\n***********************\n')
94 94 assert 'Hello World!\n' in response
95 95 assert 'foo.var=bla\n' in response
96 96 assert 'input_data=%s\n' % input_data in response
@@ -1,60 +1,60 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 19
20 20 import time
21 21 import logging
22 22
23 23
24 24 from vcsserver.utils import safe_str
25 25
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 30 def get_access_path(request):
31 31 environ = request.environ
32 32 return environ.get('PATH_INFO')
33 33
34 34
35 35 class RequestWrapperTween(object):
36 36 def __init__(self, handler, registry):
37 37 self.handler = handler
38 38 self.registry = registry
39 39
40 40 # one-time configuration code goes here
41 41
42 42 def __call__(self, request):
43 43 start = time.time()
44 44 try:
45 45 response = self.handler(request)
46 46 finally:
47 47 end = time.time()
48 48
49 49 log.info('IP: %s Request to path: `%s` time: %.3fs' % (
50 50 '127.0.0.1',
51 51 safe_str(get_access_path(request)), end - start)
52 52 )
53 53
54 54 return response
55 55
56 56
57 57 def includeme(config):
58 58 config.add_tween(
59 59 'vcsserver.tweens.RequestWrapperTween',
60 60 )
@@ -1,72 +1,72 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 19 def safe_int(val, default=None):
20 20 """
21 21 Returns int() of val if val is not convertable to int use default
22 22 instead
23 23
24 24 :param val:
25 25 :param default:
26 26 """
27 27
28 28 try:
29 29 val = int(val)
30 30 except (ValueError, TypeError):
31 31 val = default
32 32
33 33 return val
34 34
35 35
36 36 def safe_str(unicode_, to_encoding=['utf8']):
37 37 """
38 38 safe str function. Does few trick to turn unicode_ into string
39 39
40 40 In case of UnicodeEncodeError, we try to return it with encoding detected
41 41 by chardet library if it fails fallback to string with errors replaced
42 42
43 43 :param unicode_: unicode to encode
44 44 :rtype: str
45 45 :returns: str object
46 46 """
47 47
48 48 # if it's not basestr cast to str
49 49 if not isinstance(unicode_, basestring):
50 50 return str(unicode_)
51 51
52 52 if isinstance(unicode_, str):
53 53 return unicode_
54 54
55 55 if not isinstance(to_encoding, (list, tuple)):
56 56 to_encoding = [to_encoding]
57 57
58 58 for enc in to_encoding:
59 59 try:
60 60 return unicode_.encode(enc)
61 61 except UnicodeEncodeError:
62 62 pass
63 63
64 64 try:
65 65 import chardet
66 66 encoding = chardet.detect(unicode_)['encoding']
67 67 if encoding is None:
68 68 raise UnicodeEncodeError()
69 69
70 70 return unicode_.encode(encoding)
71 71 except (ImportError, UnicodeEncodeError):
72 72 return unicode_.encode(to_encoding[0], 'replace')
@@ -1,116 +1,116 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """Extract the responses of a WSGI app."""
19 19
20 20 __all__ = ('WSGIAppCaller',)
21 21
22 22 import io
23 23 import logging
24 24 import os
25 25
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29 DEV_NULL = open(os.devnull)
30 30
31 31
32 32 def _complete_environ(environ, input_data):
33 33 """Update the missing wsgi.* variables of a WSGI environment.
34 34
35 35 :param environ: WSGI environment to update
36 36 :type environ: dict
37 37 :param input_data: data to be read by the app
38 38 :type input_data: str
39 39 """
40 40 environ.update({
41 41 'wsgi.version': (1, 0),
42 42 'wsgi.url_scheme': 'http',
43 43 'wsgi.multithread': True,
44 44 'wsgi.multiprocess': True,
45 45 'wsgi.run_once': False,
46 46 'wsgi.input': io.BytesIO(input_data),
47 47 'wsgi.errors': DEV_NULL,
48 48 })
49 49
50 50
51 51 # pylint: disable=too-few-public-methods
52 52 class _StartResponse(object):
53 53 """Save the arguments of a start_response call."""
54 54
55 55 __slots__ = ['status', 'headers', 'content']
56 56
57 57 def __init__(self):
58 58 self.status = None
59 59 self.headers = None
60 60 self.content = []
61 61
62 62 def __call__(self, status, headers, exc_info=None):
63 63 # TODO(skreft): do something meaningful with the exc_info
64 64 exc_info = None # avoid dangling circular reference
65 65 self.status = status
66 66 self.headers = headers
67 67
68 68 return self.write
69 69
70 70 def write(self, content):
71 71 """Write method returning when calling this object.
72 72
73 73 All the data written is then available in content.
74 74 """
75 75 self.content.append(content)
76 76
77 77
78 78 class WSGIAppCaller(object):
79 79 """Calls a WSGI app."""
80 80
81 81 def __init__(self, app):
82 82 """
83 83 :param app: WSGI app to call
84 84 """
85 85 self.app = app
86 86
87 87 def handle(self, environ, input_data):
88 88 """Process a request with the WSGI app.
89 89
90 90 The returned data of the app is fully consumed into a list.
91 91
92 92 :param environ: WSGI environment to update
93 93 :type environ: dict
94 94 :param input_data: data to be read by the app
95 95 :type input_data: str
96 96
97 97 :returns: a tuple with the contents, status and headers
98 98 :rtype: (list<str>, str, list<(str, str)>)
99 99 """
100 100 _complete_environ(environ, input_data)
101 101 start_response = _StartResponse()
102 102 log.debug("Calling wrapped WSGI application")
103 103 responses = self.app(environ, start_response)
104 104 responses_list = list(responses)
105 105 existing_responses = start_response.content
106 106 if existing_responses:
107 107 log.debug(
108 108 "Adding returned response to response written via write()")
109 109 existing_responses.extend(responses_list)
110 110 responses_list = existing_responses
111 111 if hasattr(responses, 'close'):
112 112 log.debug("Closing iterator from WSGI application")
113 113 responses.close()
114 114
115 115 log.debug("Handling of WSGI request done, returning response")
116 116 return responses_list, start_response.status, start_response.headers
General Comments 0
You need to be logged in to leave comments. Login now