##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r354:c835ad40 merge stable
parent child Browse files
Show More
@@ -1,6 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.10.6
2 current_version = 4.11.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
6
@@ -1,38 +1,37 b''
1 syntax: glob
1 syntax: glob
2 *.orig
2 *.orig
3 *.pyc
3 *.pyc
4 *.swp
4 *.swp
5 *.sqlite
5 *.sqlite
6 *.tox
6 *.tox
7 *.egg-info
7 *.egg-info
8 *.egg
8 *.egg
9 *.eggs
9 *.eggs
10 *.idea
10 *.idea
11 .DS_Store*
11 .DS_Store*
12
12
13
13
14 syntax: regexp
14 syntax: regexp
15
15
16 #.filename
16 #.filename
17 ^\.settings$
17 ^\.settings$
18 ^\.project$
18 ^\.project$
19 ^\.pydevproject$
19 ^\.pydevproject$
20 ^\.coverage$
20 ^\.coverage$
21 ^\.cache.*$
21 ^\.cache.*$
22 ^\.rhodecode$
22 ^\.rhodecode$
23
23
24 ^_dev
24 ^.dev
25 ^._dev
26 ^build/
25 ^build/
27 ^coverage\.xml$
26 ^coverage\.xml$
28 ^data$
27 ^data$
29 ^dev.ini$
28 ^dev.ini$
30 ^acceptance_tests/dev.*\.ini$
29 ^acceptance_tests/dev.*\.ini$
31 ^dist/
30 ^dist/
32 ^fabfile.py
31 ^fabfile.py
33 ^htmlcov
32 ^htmlcov
34 ^junit\.xml$
33 ^junit\.xml$
35 ^node_modules/
34 ^node_modules/
36 ^pylint.log$
35 ^pylint.log$
37 ^build$
36 ^build$
38 ^result$
37 ^result$
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.10.6
13 version = 4.11.0
16
14
@@ -1,165 +1,166 b''
1 # Nix environment for the community edition
1 # Nix environment for the community edition
2 #
2 #
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6
6
7 { pkgs ? (import <nixpkgs> {})
7 { pkgs ? (import <nixpkgs> {})
8 , pythonPackages ? "python27Packages"
8 , pythonPackages ? "python27Packages"
9 , pythonExternalOverrides ? self: super: {}
9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? true
10 , doCheck ? true
11 }:
11 }:
12
12
13 let pkgs_ = pkgs; in
13 let pkgs_ = pkgs; in
14
14
15 let
15 let
16 pkgs = pkgs_.overridePackages (self: super: {
16 pkgs = pkgs_.overridePackages (self: super: {
17 # bump GIT version
17 # bump GIT version
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 name = "git-2.13.5";
19 name = "git-2.13.5";
20 src = pkgs.fetchurl {
20 src = pkgs.fetchurl {
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.13.5.tar.xz";
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.13.5.tar.xz";
22 sha256 = "18fi18103n7grshm4ffb0fwsnvbl48sbqy5gqx528vf8maff5j91";
22 sha256 = "18fi18103n7grshm4ffb0fwsnvbl48sbqy5gqx528vf8maff5j91";
23 };
23 };
24
24
25 patches = [
25 patches = [
26 ./pkgs/git_patches/docbook2texi.patch
26 ./pkgs/git_patches/docbook2texi.patch
27 ./pkgs/git_patches/symlinks-in-bin.patch
27 ./pkgs/git_patches/symlinks-in-bin.patch
28 ./pkgs/git_patches/git-sh-i18n.patch
28 ./pkgs/git_patches/git-sh-i18n.patch
29 ./pkgs/git_patches/ssh-path.patch
29 ./pkgs/git_patches/ssh-path.patch
30 ];
30 ];
31
31
32 });
32 });
33
33
34 # Override subversion derivation to
34 # Override subversion derivation to
35 # - activate python bindings
35 # - activate python bindings
36 subversion = let
36 subversion = let
37 subversionWithPython = super.subversion.override {
37 subversionWithPython = super.subversion.override {
38 httpSupport = true;
38 httpSupport = true;
39 pythonBindings = true;
39 pythonBindings = true;
40 python = self.python27Packages.python;
40 python = self.python27Packages.python;
41 };
41 };
42
42
43 in
43 in
44
44
45 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
45 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
46 name = "subversion-1.9.7";
46 name = "subversion-1.9.7";
47 src = pkgs.fetchurl {
47 src = pkgs.fetchurl {
48 url = "https://www.apache.org/dist/subversion/subversion-1.9.7.tar.gz";
48 url = "https://www.apache.org/dist/subversion/subversion-1.9.7.tar.gz";
49 sha256 = "0g3cs2h008z8ymgkhbk54jp87bjh7y049rn42igj881yi2f20an7";
49 sha256 = "0g3cs2h008z8ymgkhbk54jp87bjh7y049rn42igj881yi2f20an7";
50 };
50 };
51
51
52 });
52 });
53
53
54 });
54 });
55
55
56 inherit (pkgs.lib) fix extends;
56 inherit (pkgs.lib) fix extends;
57 basePythonPackages = with builtins; if isAttrs pythonPackages
57 basePythonPackages = with builtins; if isAttrs pythonPackages
58 then pythonPackages
58 then pythonPackages
59 else getAttr pythonPackages pkgs;
59 else getAttr pythonPackages pkgs;
60
60
61 elem = builtins.elem;
61 elem = builtins.elem;
62 basename = path: with pkgs.lib; last (splitString "/" path);
62 basename = path: with pkgs.lib; last (splitString "/" path);
63 startsWith = prefix: full: let
63 startsWith = prefix: full: let
64 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
64 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
65 in actualPrefix == prefix;
65 in actualPrefix == prefix;
66
66
67 src-filter = path: type: with pkgs.lib;
67 src-filter = path: type: with pkgs.lib;
68 let
68 let
69 ext = last (splitString "." path);
69 ext = last (splitString "." path);
70 in
70 in
71 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
71 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
72 "node_modules" "build" "data" "tmp"] &&
72 "node_modules" "build" "data" "tmp"] &&
73 !elem ext ["egg-info" "pyc"] &&
73 !elem ext ["egg-info" "pyc"] &&
74 !startsWith "result" path;
74 !startsWith "result" path;
75
75
76 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
76 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
77
77
78 pythonGeneratedPackages = self: basePythonPackages.override (a: {
78 pythonGeneratedPackages = self: basePythonPackages.override (a: {
79 inherit self;
79 inherit self;
80 }) // (scopedImport {
80 }) // (scopedImport {
81 self = self;
81 self = self;
82 super = basePythonPackages;
82 super = basePythonPackages;
83 inherit pkgs;
83 inherit pkgs;
84 inherit (pkgs) fetchurl fetchgit;
84 inherit (pkgs) fetchurl fetchgit;
85 } ./pkgs/python-packages.nix);
85 } ./pkgs/python-packages.nix);
86
86
87 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
87 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
88 inherit basePythonPackages pkgs;
88 inherit basePythonPackages pkgs;
89 };
89 };
90
90
91 version = builtins.readFile ./vcsserver/VERSION;
91 version = builtins.readFile ./vcsserver/VERSION;
92
92
93 pythonLocalOverrides = self: super: {
93 pythonLocalOverrides = self: super: {
94 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
94 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
95 inherit doCheck version;
95 inherit doCheck version;
96
96
97 name = "rhodecode-vcsserver-${version}";
97 name = "rhodecode-vcsserver-${version}";
98 releaseName = "RhodeCodeVCSServer-${version}";
98 releaseName = "RhodeCodeVCSServer-${version}";
99 src = rhodecode-vcsserver-src;
99 src = rhodecode-vcsserver-src;
100 dontStrip = true; # prevent strip, we don't need it.
100 dontStrip = true; # prevent strip, we don't need it.
101
101
102 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
102 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
103 pkgs.git
103 pkgs.git
104 pkgs.subversion
104 pkgs.subversion
105 ]);
105 ]);
106
106
107 # TODO: johbo: Make a nicer way to expose the parts. Maybe
107 # TODO: johbo: Make a nicer way to expose the parts. Maybe
108 # pkgs/default.nix?
108 # pkgs/default.nix?
109 passthru = {
109 passthru = {
110 pythonPackages = self;
110 pythonPackages = self;
111 };
111 };
112
112
113 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
113 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
114 preCheck = ''
114 preCheck = ''
115 export PATH="$out/bin:$PATH"
115 export PATH="$out/bin:$PATH"
116 '';
116 '';
117
117
118 # put custom attrs here
118 # put custom attrs here
119 checkPhase = ''
119 checkPhase = ''
120 runHook preCheck
120 runHook preCheck
121 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
121 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
122 runHook postCheck
122 runHook postCheck
123 '';
123 '';
124
124
125 postInstall = ''
125 postInstall = ''
126 echo "Writing meta information for rccontrol to nix-support/rccontrol"
126 echo "Writing meta information for rccontrol to nix-support/rccontrol"
127 mkdir -p $out/nix-support/rccontrol
127 mkdir -p $out/nix-support/rccontrol
128 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
128 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
129 echo "DONE: Meta information for rccontrol written"
129 echo "DONE: Meta information for rccontrol written"
130
130
131 # python based programs need to be wrapped
131 # python based programs need to be wrapped
132 ln -s ${self.pyramid}/bin/* $out/bin/
132 ln -s ${self.pyramid}/bin/* $out/bin/
133 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
133 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
134
134
135 # Symlink version control utilities
135 # Symlink version control utilities
136 #
136 #
137 # We ensure that always the correct version is available as a symlink.
137 # We ensure that always the correct version is available as a symlink.
138 # So that users calling them via the profile path will always use the
138 # So that users calling them via the profile path will always use the
139 # correct version.
139 # correct version.
140 ln -s ${self.python}/bin/python $out/bin
140 ln -s ${pkgs.git}/bin/git $out/bin
141 ln -s ${pkgs.git}/bin/git $out/bin
141 ln -s ${self.mercurial}/bin/hg $out/bin
142 ln -s ${self.mercurial}/bin/hg $out/bin
142 ln -s ${pkgs.subversion}/bin/svn* $out/bin
143 ln -s ${pkgs.subversion}/bin/svn* $out/bin
143
144
144 for file in $out/bin/*;
145 for file in $out/bin/*;
145 do
146 do
146 wrapProgram $file \
147 wrapProgram $file \
147 --set PATH $PATH \
148 --set PATH $PATH \
148 --set PYTHONPATH $PYTHONPATH \
149 --set PYTHONPATH $PYTHONPATH \
149 --set PYTHONHASHSEED random
150 --set PYTHONHASHSEED random
150 done
151 done
151
152
152 '';
153 '';
153
154
154 });
155 });
155 };
156 };
156
157
157 # Apply all overrides and fix the final package set
158 # Apply all overrides and fix the final package set
158 myPythonPackages =
159 myPythonPackages =
159 (fix
160 (fix
160 (extends pythonExternalOverrides
161 (extends pythonExternalOverrides
161 (extends pythonLocalOverrides
162 (extends pythonLocalOverrides
162 (extends pythonOverrides
163 (extends pythonOverrides
163 pythonGeneratedPackages))));
164 pythonGeneratedPackages))));
164
165
165 in myPythonPackages.rhodecode-vcsserver
166 in myPythonPackages.rhodecode-vcsserver
@@ -1,47 +1,54 b''
1 # Overrides for the generated python-packages.nix
1 # Overrides for the generated python-packages.nix
2 #
2 #
3 # This function is intended to be used as an extension to the generated file
3 # This function is intended to be used as an extension to the generated file
4 # python-packages.nix. The main objective is to add needed dependencies of C
4 # python-packages.nix. The main objective is to add needed dependencies of C
5 # libraries and tweak the build instructions where needed.
5 # libraries and tweak the build instructions where needed.
6
6
7 { pkgs, basePythonPackages }:
7 { pkgs, basePythonPackages }:
8
8
9 let
9 let
10 sed = "sed -i";
10 sed = "sed -i";
11 in
11 in
12
12
13 self: super: {
13 self: super: {
14
14
15 subvertpy = super.subvertpy.override (attrs: {
15 subvertpy = super.subvertpy.override (attrs: {
16 # TODO: johbo: Remove the "or" once we drop 16.03 support
16 # TODO: johbo: Remove the "or" once we drop 16.03 support
17 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
17 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
18 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
18 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
19 pkgs.aprutil
19 pkgs.aprutil
20 pkgs.subversion
20 pkgs.subversion
21 ];
21 ];
22 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
22 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
23 ${sed} -e "s/'gcc'/'clang'/" setup.py
23 ${sed} -e "s/'gcc'/'clang'/" setup.py
24 '';
24 '';
25 });
25 });
26
26
27 hgsubversion = super.hgsubversion.override (attrs: {
28 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
29 pkgs.sqlite
30 basePythonPackages.sqlite3
31 ];
32 });
33
27 mercurial = super.mercurial.override (attrs: {
34 mercurial = super.mercurial.override (attrs: {
28 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
35 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
29 self.python.modules.curses
36 self.python.modules.curses
30 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
37 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
31 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
38 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
32 });
39 });
33
40
34 pyramid = super.pyramid.override (attrs: {
41 pyramid = super.pyramid.override (attrs: {
35 postFixup = ''
42 postFixup = ''
36 wrapPythonPrograms
43 wrapPythonPrograms
37 # TODO: johbo: "wrapPython" adds this magic line which
44 # TODO: johbo: "wrapPython" adds this magic line which
38 # confuses pserve.
45 # confuses pserve.
39 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
46 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
40 '';
47 '';
41 });
48 });
42
49
43 # Avoid that setuptools is replaced, this leads to trouble
50 # Avoid that setuptools is replaced, this leads to trouble
44 # with buildPythonPackage.
51 # with buildPythonPackage.
45 setuptools = basePythonPackages.setuptools;
52 setuptools = basePythonPackages.setuptools;
46
53
47 }
54 }
@@ -1,877 +1,877 b''
1 # Generated by pip2nix 0.4.0
1 # Generated by pip2nix 0.4.0
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 {
4 {
5 Beaker = super.buildPythonPackage {
5 Beaker = super.buildPythonPackage {
6 name = "Beaker-1.9.0";
6 name = "Beaker-1.9.0";
7 buildInputs = with self; [];
7 buildInputs = with self; [];
8 doCheck = false;
8 doCheck = false;
9 propagatedBuildInputs = with self; [funcsigs];
9 propagatedBuildInputs = with self; [funcsigs];
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
11 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
12 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
12 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.bsdOriginal ];
15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 };
16 };
17 };
17 };
18 Jinja2 = super.buildPythonPackage {
18 Jinja2 = super.buildPythonPackage {
19 name = "Jinja2-2.8";
19 name = "Jinja2-2.9.6";
20 buildInputs = with self; [];
20 buildInputs = with self; [];
21 doCheck = false;
21 doCheck = false;
22 propagatedBuildInputs = with self; [MarkupSafe];
22 propagatedBuildInputs = with self; [MarkupSafe];
23 src = fetchurl {
23 src = fetchurl {
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
24 url = "https://pypi.python.org/packages/90/61/f820ff0076a2599dd39406dcb858ecb239438c02ce706c8e91131ab9c7f1/Jinja2-2.9.6.tar.gz";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
25 md5 = "6411537324b4dba0956aaa8109f3c77b";
26 };
26 };
27 meta = {
27 meta = {
28 license = [ pkgs.lib.licenses.bsdOriginal ];
28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 };
29 };
30 };
30 };
31 Mako = super.buildPythonPackage {
31 Mako = super.buildPythonPackage {
32 name = "Mako-1.0.7";
32 name = "Mako-1.0.7";
33 buildInputs = with self; [];
33 buildInputs = with self; [];
34 doCheck = false;
34 doCheck = false;
35 propagatedBuildInputs = with self; [MarkupSafe];
35 propagatedBuildInputs = with self; [MarkupSafe];
36 src = fetchurl {
36 src = fetchurl {
37 url = "https://pypi.python.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
37 url = "https://pypi.python.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
38 md5 = "5836cc997b1b773ef389bf6629c30e65";
38 md5 = "5836cc997b1b773ef389bf6629c30e65";
39 };
39 };
40 meta = {
40 meta = {
41 license = [ pkgs.lib.licenses.mit ];
41 license = [ pkgs.lib.licenses.mit ];
42 };
42 };
43 };
43 };
44 MarkupSafe = super.buildPythonPackage {
44 MarkupSafe = super.buildPythonPackage {
45 name = "MarkupSafe-0.23";
45 name = "MarkupSafe-1.0";
46 buildInputs = with self; [];
46 buildInputs = with self; [];
47 doCheck = false;
47 doCheck = false;
48 propagatedBuildInputs = with self; [];
48 propagatedBuildInputs = with self; [];
49 src = fetchurl {
49 src = fetchurl {
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
50 url = "https://pypi.python.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
51 md5 = "2fcedc9284d50e577b5192e8e3578355";
52 };
52 };
53 meta = {
53 meta = {
54 license = [ pkgs.lib.licenses.bsdOriginal ];
54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 };
55 };
56 };
56 };
57 PasteDeploy = super.buildPythonPackage {
57 PasteDeploy = super.buildPythonPackage {
58 name = "PasteDeploy-1.5.2";
58 name = "PasteDeploy-1.5.2";
59 buildInputs = with self; [];
59 buildInputs = with self; [];
60 doCheck = false;
60 doCheck = false;
61 propagatedBuildInputs = with self; [];
61 propagatedBuildInputs = with self; [];
62 src = fetchurl {
62 src = fetchurl {
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 md5 = "352b7205c78c8de4987578d19431af3b";
64 md5 = "352b7205c78c8de4987578d19431af3b";
65 };
65 };
66 meta = {
66 meta = {
67 license = [ pkgs.lib.licenses.mit ];
67 license = [ pkgs.lib.licenses.mit ];
68 };
68 };
69 };
69 };
70 WebOb = super.buildPythonPackage {
70 WebOb = super.buildPythonPackage {
71 name = "WebOb-1.7.4";
71 name = "WebOb-1.7.4";
72 buildInputs = with self; [];
72 buildInputs = with self; [];
73 doCheck = false;
73 doCheck = false;
74 propagatedBuildInputs = with self; [];
74 propagatedBuildInputs = with self; [];
75 src = fetchurl {
75 src = fetchurl {
76 url = "https://pypi.python.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
76 url = "https://pypi.python.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
77 md5 = "397e46892d7f199b1a07eb20a2d3d9bd";
77 md5 = "397e46892d7f199b1a07eb20a2d3d9bd";
78 };
78 };
79 meta = {
79 meta = {
80 license = [ pkgs.lib.licenses.mit ];
80 license = [ pkgs.lib.licenses.mit ];
81 };
81 };
82 };
82 };
83 WebTest = super.buildPythonPackage {
83 WebTest = super.buildPythonPackage {
84 name = "WebTest-2.0.27";
84 name = "WebTest-2.0.29";
85 buildInputs = with self; [];
85 buildInputs = with self; [];
86 doCheck = false;
86 doCheck = false;
87 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
87 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
88 src = fetchurl {
88 src = fetchurl {
89 url = "https://pypi.python.org/packages/80/fa/ca3a759985c72e3a124cbca3e1f8a2e931a07ffd31fd45d8f7bf21cb95cf/WebTest-2.0.27.tar.gz";
89 url = "https://pypi.python.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
90 md5 = "54e6515ac71c51b6fc90179483c749ad";
90 md5 = "30b4cf0d340b9a5335fac4389e6f84fc";
91 };
91 };
92 meta = {
92 meta = {
93 license = [ pkgs.lib.licenses.mit ];
93 license = [ pkgs.lib.licenses.mit ];
94 };
94 };
95 };
95 };
96 backports.shutil-get-terminal-size = super.buildPythonPackage {
96 backports.shutil-get-terminal-size = super.buildPythonPackage {
97 name = "backports.shutil-get-terminal-size-1.0.0";
97 name = "backports.shutil-get-terminal-size-1.0.0";
98 buildInputs = with self; [];
98 buildInputs = with self; [];
99 doCheck = false;
99 doCheck = false;
100 propagatedBuildInputs = with self; [];
100 propagatedBuildInputs = with self; [];
101 src = fetchurl {
101 src = fetchurl {
102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
103 md5 = "03267762480bd86b50580dc19dff3c66";
103 md5 = "03267762480bd86b50580dc19dff3c66";
104 };
104 };
105 meta = {
105 meta = {
106 license = [ pkgs.lib.licenses.mit ];
106 license = [ pkgs.lib.licenses.mit ];
107 };
107 };
108 };
108 };
109 beautifulsoup4 = super.buildPythonPackage {
109 beautifulsoup4 = super.buildPythonPackage {
110 name = "beautifulsoup4-4.6.0";
110 name = "beautifulsoup4-4.6.0";
111 buildInputs = with self; [];
111 buildInputs = with self; [];
112 doCheck = false;
112 doCheck = false;
113 propagatedBuildInputs = with self; [];
113 propagatedBuildInputs = with self; [];
114 src = fetchurl {
114 src = fetchurl {
115 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
115 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
116 md5 = "c17714d0f91a23b708a592cb3c697728";
116 md5 = "c17714d0f91a23b708a592cb3c697728";
117 };
117 };
118 meta = {
118 meta = {
119 license = [ pkgs.lib.licenses.mit ];
119 license = [ pkgs.lib.licenses.mit ];
120 };
120 };
121 };
121 };
122 configobj = super.buildPythonPackage {
122 configobj = super.buildPythonPackage {
123 name = "configobj-5.0.6";
123 name = "configobj-5.0.6";
124 buildInputs = with self; [];
124 buildInputs = with self; [];
125 doCheck = false;
125 doCheck = false;
126 propagatedBuildInputs = with self; [six];
126 propagatedBuildInputs = with self; [six];
127 src = fetchurl {
127 src = fetchurl {
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
130 };
130 };
131 meta = {
131 meta = {
132 license = [ pkgs.lib.licenses.bsdOriginal ];
132 license = [ pkgs.lib.licenses.bsdOriginal ];
133 };
133 };
134 };
134 };
135 cov-core = super.buildPythonPackage {
135 cov-core = super.buildPythonPackage {
136 name = "cov-core-1.15.0";
136 name = "cov-core-1.15.0";
137 buildInputs = with self; [];
137 buildInputs = with self; [];
138 doCheck = false;
138 doCheck = false;
139 propagatedBuildInputs = with self; [coverage];
139 propagatedBuildInputs = with self; [coverage];
140 src = fetchurl {
140 src = fetchurl {
141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
143 };
143 };
144 meta = {
144 meta = {
145 license = [ pkgs.lib.licenses.mit ];
145 license = [ pkgs.lib.licenses.mit ];
146 };
146 };
147 };
147 };
148 coverage = super.buildPythonPackage {
148 coverage = super.buildPythonPackage {
149 name = "coverage-3.7.1";
149 name = "coverage-3.7.1";
150 buildInputs = with self; [];
150 buildInputs = with self; [];
151 doCheck = false;
151 doCheck = false;
152 propagatedBuildInputs = with self; [];
152 propagatedBuildInputs = with self; [];
153 src = fetchurl {
153 src = fetchurl {
154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
156 };
156 };
157 meta = {
157 meta = {
158 license = [ pkgs.lib.licenses.bsdOriginal ];
158 license = [ pkgs.lib.licenses.bsdOriginal ];
159 };
159 };
160 };
160 };
161 decorator = super.buildPythonPackage {
161 decorator = super.buildPythonPackage {
162 name = "decorator-4.0.11";
162 name = "decorator-4.1.2";
163 buildInputs = with self; [];
163 buildInputs = with self; [];
164 doCheck = false;
164 doCheck = false;
165 propagatedBuildInputs = with self; [];
165 propagatedBuildInputs = with self; [];
166 src = fetchurl {
166 src = fetchurl {
167 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
167 url = "https://pypi.python.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
168 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
168 md5 = "a0f7f4fe00ae2dde93494d90c192cf8c";
169 };
169 };
170 meta = {
170 meta = {
171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
172 };
172 };
173 };
173 };
174 dulwich = super.buildPythonPackage {
174 dulwich = super.buildPythonPackage {
175 name = "dulwich-0.13.0";
175 name = "dulwich-0.13.0";
176 buildInputs = with self; [];
176 buildInputs = with self; [];
177 doCheck = false;
177 doCheck = false;
178 propagatedBuildInputs = with self; [];
178 propagatedBuildInputs = with self; [];
179 src = fetchurl {
179 src = fetchurl {
180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
181 md5 = "6dede0626657c2bd08f48ca1221eea91";
181 md5 = "6dede0626657c2bd08f48ca1221eea91";
182 };
182 };
183 meta = {
183 meta = {
184 license = [ pkgs.lib.licenses.gpl2Plus ];
184 license = [ pkgs.lib.licenses.gpl2Plus ];
185 };
185 };
186 };
186 };
187 enum34 = super.buildPythonPackage {
187 enum34 = super.buildPythonPackage {
188 name = "enum34-1.1.6";
188 name = "enum34-1.1.6";
189 buildInputs = with self; [];
189 buildInputs = with self; [];
190 doCheck = false;
190 doCheck = false;
191 propagatedBuildInputs = with self; [];
191 propagatedBuildInputs = with self; [];
192 src = fetchurl {
192 src = fetchurl {
193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
194 md5 = "5f13a0841a61f7fc295c514490d120d0";
194 md5 = "5f13a0841a61f7fc295c514490d120d0";
195 };
195 };
196 meta = {
196 meta = {
197 license = [ pkgs.lib.licenses.bsdOriginal ];
197 license = [ pkgs.lib.licenses.bsdOriginal ];
198 };
198 };
199 };
199 };
200 funcsigs = super.buildPythonPackage {
200 funcsigs = super.buildPythonPackage {
201 name = "funcsigs-1.0.2";
201 name = "funcsigs-1.0.2";
202 buildInputs = with self; [];
202 buildInputs = with self; [];
203 doCheck = false;
203 doCheck = false;
204 propagatedBuildInputs = with self; [];
204 propagatedBuildInputs = with self; [];
205 src = fetchurl {
205 src = fetchurl {
206 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
206 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
207 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
207 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
208 };
208 };
209 meta = {
209 meta = {
210 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
210 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
211 };
211 };
212 };
212 };
213 gevent = super.buildPythonPackage {
213 gevent = super.buildPythonPackage {
214 name = "gevent-1.2.2";
214 name = "gevent-1.2.2";
215 buildInputs = with self; [];
215 buildInputs = with self; [];
216 doCheck = false;
216 doCheck = false;
217 propagatedBuildInputs = with self; [greenlet];
217 propagatedBuildInputs = with self; [greenlet];
218 src = fetchurl {
218 src = fetchurl {
219 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
219 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
220 md5 = "7f0baf355384fe5ff2ecf66853422554";
220 md5 = "7f0baf355384fe5ff2ecf66853422554";
221 };
221 };
222 meta = {
222 meta = {
223 license = [ pkgs.lib.licenses.mit ];
223 license = [ pkgs.lib.licenses.mit ];
224 };
224 };
225 };
225 };
226 gprof2dot = super.buildPythonPackage {
226 gprof2dot = super.buildPythonPackage {
227 name = "gprof2dot-2016.10.13";
227 name = "gprof2dot-2017.9.19";
228 buildInputs = with self; [];
228 buildInputs = with self; [];
229 doCheck = false;
229 doCheck = false;
230 propagatedBuildInputs = with self; [];
230 propagatedBuildInputs = with self; [];
231 src = fetchurl {
231 src = fetchurl {
232 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
232 url = "https://pypi.python.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
233 md5 = "0125401f15fd2afe1df686a76c64a4fd";
233 md5 = "cda2d552bb0d0b9f16e6824a9aabd225";
234 };
234 };
235 meta = {
235 meta = {
236 license = [ { fullName = "LGPL"; } ];
236 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
237 };
237 };
238 };
238 };
239 greenlet = super.buildPythonPackage {
239 greenlet = super.buildPythonPackage {
240 name = "greenlet-0.4.12";
240 name = "greenlet-0.4.12";
241 buildInputs = with self; [];
241 buildInputs = with self; [];
242 doCheck = false;
242 doCheck = false;
243 propagatedBuildInputs = with self; [];
243 propagatedBuildInputs = with self; [];
244 src = fetchurl {
244 src = fetchurl {
245 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
245 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
246 md5 = "e8637647d58a26c4a1f51ca393e53c00";
246 md5 = "e8637647d58a26c4a1f51ca393e53c00";
247 };
247 };
248 meta = {
248 meta = {
249 license = [ pkgs.lib.licenses.mit ];
249 license = [ pkgs.lib.licenses.mit ];
250 };
250 };
251 };
251 };
252 gunicorn = super.buildPythonPackage {
252 gunicorn = super.buildPythonPackage {
253 name = "gunicorn-19.7.1";
253 name = "gunicorn-19.7.1";
254 buildInputs = with self; [];
254 buildInputs = with self; [];
255 doCheck = false;
255 doCheck = false;
256 propagatedBuildInputs = with self; [];
256 propagatedBuildInputs = with self; [];
257 src = fetchurl {
257 src = fetchurl {
258 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
258 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
259 md5 = "174d3c3cd670a5be0404d84c484e590c";
259 md5 = "174d3c3cd670a5be0404d84c484e590c";
260 };
260 };
261 meta = {
261 meta = {
262 license = [ pkgs.lib.licenses.mit ];
262 license = [ pkgs.lib.licenses.mit ];
263 };
263 };
264 };
264 };
265 hg-evolve = super.buildPythonPackage {
265 hg-evolve = super.buildPythonPackage {
266 name = "hg-evolve-6.6.0";
266 name = "hg-evolve-7.0.1";
267 buildInputs = with self; [];
267 buildInputs = with self; [];
268 doCheck = false;
268 doCheck = false;
269 propagatedBuildInputs = with self; [];
269 propagatedBuildInputs = with self; [];
270 src = fetchurl {
270 src = fetchurl {
271 url = "https://pypi.python.org/packages/c5/04/3557c97eaa320b5a6769edade64a299cd2710f5f3b818f64991ab6c8c08f/hg-evolve-6.6.0.tar.gz";
271 url = "https://pypi.python.org/packages/92/5c/4c216be1a08f326a12076b645f4892a2b0865810db1f4a0c9648f1f4c113/hg-evolve-7.0.1.tar.gz";
272 md5 = "06b9a9c8e8137bbf0c4fbf940c009725";
272 md5 = "2dfa926846ea873a8406bababb06b277";
273 };
273 };
274 meta = {
274 meta = {
275 license = [ { fullName = "GPLv2+"; } ];
275 license = [ { fullName = "GPLv2+"; } ];
276 };
276 };
277 };
277 };
278 hgsubversion = super.buildPythonPackage {
278 hgsubversion = super.buildPythonPackage {
279 name = "hgsubversion-1.8.7";
279 name = "hgsubversion-1.9";
280 buildInputs = with self; [];
280 buildInputs = with self; [];
281 doCheck = false;
281 doCheck = false;
282 propagatedBuildInputs = with self; [mercurial subvertpy];
282 propagatedBuildInputs = with self; [mercurial subvertpy];
283 src = fetchurl {
283 src = fetchurl {
284 url = "https://pypi.python.org/packages/1c/b8/ff4d2e0ec486f9765b410f09728c02a010e7485d68d6154968074498a403/hgsubversion-1.8.7.tar.gz";
284 url = "https://pypi.python.org/packages/db/26/7293a6c6b85e2a74ab452e9ba7f00b04ff0e440e6cd4f84131ac5d5e6b22/hgsubversion-1.9.tar.gz";
285 md5 = "289f1c36c13bd6a3435a9be390a77bdc";
285 md5 = "0c6f93ef12cc2e7fe67286f16bcc7211";
286 };
286 };
287 meta = {
287 meta = {
288 license = [ pkgs.lib.licenses.gpl1 ];
288 license = [ pkgs.lib.licenses.gpl1 ];
289 };
289 };
290 };
290 };
291 hupper = super.buildPythonPackage {
291 hupper = super.buildPythonPackage {
292 name = "hupper-1.0";
292 name = "hupper-1.0";
293 buildInputs = with self; [];
293 buildInputs = with self; [];
294 doCheck = false;
294 doCheck = false;
295 propagatedBuildInputs = with self; [];
295 propagatedBuildInputs = with self; [];
296 src = fetchurl {
296 src = fetchurl {
297 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
297 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
298 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
298 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
299 };
299 };
300 meta = {
300 meta = {
301 license = [ pkgs.lib.licenses.mit ];
301 license = [ pkgs.lib.licenses.mit ];
302 };
302 };
303 };
303 };
304 infrae.cache = super.buildPythonPackage {
304 infrae.cache = super.buildPythonPackage {
305 name = "infrae.cache-1.0.1";
305 name = "infrae.cache-1.0.1";
306 buildInputs = with self; [];
306 buildInputs = with self; [];
307 doCheck = false;
307 doCheck = false;
308 propagatedBuildInputs = with self; [Beaker repoze.lru];
308 propagatedBuildInputs = with self; [Beaker repoze.lru];
309 src = fetchurl {
309 src = fetchurl {
310 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
310 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
311 md5 = "b09076a766747e6ed2a755cc62088e32";
311 md5 = "b09076a766747e6ed2a755cc62088e32";
312 };
312 };
313 meta = {
313 meta = {
314 license = [ pkgs.lib.licenses.zpt21 ];
314 license = [ pkgs.lib.licenses.zpt21 ];
315 };
315 };
316 };
316 };
317 ipdb = super.buildPythonPackage {
317 ipdb = super.buildPythonPackage {
318 name = "ipdb-0.10.3";
318 name = "ipdb-0.10.3";
319 buildInputs = with self; [];
319 buildInputs = with self; [];
320 doCheck = false;
320 doCheck = false;
321 propagatedBuildInputs = with self; [setuptools ipython];
321 propagatedBuildInputs = with self; [setuptools ipython];
322 src = fetchurl {
322 src = fetchurl {
323 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
323 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
324 md5 = "def1f6ac075d54bdee07e6501263d4fa";
324 md5 = "def1f6ac075d54bdee07e6501263d4fa";
325 };
325 };
326 meta = {
326 meta = {
327 license = [ pkgs.lib.licenses.bsdOriginal ];
327 license = [ pkgs.lib.licenses.bsdOriginal ];
328 };
328 };
329 };
329 };
330 ipython = super.buildPythonPackage {
330 ipython = super.buildPythonPackage {
331 name = "ipython-5.1.0";
331 name = "ipython-5.1.0";
332 buildInputs = with self; [];
332 buildInputs = with self; [];
333 doCheck = false;
333 doCheck = false;
334 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
334 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
335 src = fetchurl {
335 src = fetchurl {
336 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
336 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
337 md5 = "47c8122420f65b58784cb4b9b4af35e3";
337 md5 = "47c8122420f65b58784cb4b9b4af35e3";
338 };
338 };
339 meta = {
339 meta = {
340 license = [ pkgs.lib.licenses.bsdOriginal ];
340 license = [ pkgs.lib.licenses.bsdOriginal ];
341 };
341 };
342 };
342 };
343 ipython-genutils = super.buildPythonPackage {
343 ipython-genutils = super.buildPythonPackage {
344 name = "ipython-genutils-0.2.0";
344 name = "ipython-genutils-0.2.0";
345 buildInputs = with self; [];
345 buildInputs = with self; [];
346 doCheck = false;
346 doCheck = false;
347 propagatedBuildInputs = with self; [];
347 propagatedBuildInputs = with self; [];
348 src = fetchurl {
348 src = fetchurl {
349 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
349 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
350 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
350 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
351 };
351 };
352 meta = {
352 meta = {
353 license = [ pkgs.lib.licenses.bsdOriginal ];
353 license = [ pkgs.lib.licenses.bsdOriginal ];
354 };
354 };
355 };
355 };
356 mercurial = super.buildPythonPackage {
356 mercurial = super.buildPythonPackage {
357 name = "mercurial-4.2.3";
357 name = "mercurial-4.4.2";
358 buildInputs = with self; [];
358 buildInputs = with self; [];
359 doCheck = false;
359 doCheck = false;
360 propagatedBuildInputs = with self; [];
360 propagatedBuildInputs = with self; [];
361 src = fetchurl {
361 src = fetchurl {
362 url = "https://www.mercurial-scm.org/release/mercurial-4.2.3.tar.gz";
362 url = "https://pypi.python.org/packages/d0/83/92a5fa662ba277128db305e39e7ea5a638f2f1cbbc6dc5fbf4c14aefae22/mercurial-4.4.2.tar.gz";
363 md5 = "a24a8fab7c2ad2c65e945b1b35d94e3b";
363 md5 = "95769125cf7e9dbc341a983253acefcd";
364 };
364 };
365 meta = {
365 meta = {
366 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
366 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
367 };
367 };
368 };
368 };
369 mock = super.buildPythonPackage {
369 mock = super.buildPythonPackage {
370 name = "mock-1.0.1";
370 name = "mock-1.0.1";
371 buildInputs = with self; [];
371 buildInputs = with self; [];
372 doCheck = false;
372 doCheck = false;
373 propagatedBuildInputs = with self; [];
373 propagatedBuildInputs = with self; [];
374 src = fetchurl {
374 src = fetchurl {
375 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
375 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
376 md5 = "869f08d003c289a97c1a6610faf5e913";
376 md5 = "869f08d003c289a97c1a6610faf5e913";
377 };
377 };
378 meta = {
378 meta = {
379 license = [ pkgs.lib.licenses.bsdOriginal ];
379 license = [ pkgs.lib.licenses.bsdOriginal ];
380 };
380 };
381 };
381 };
382 msgpack-python = super.buildPythonPackage {
382 msgpack-python = super.buildPythonPackage {
383 name = "msgpack-python-0.4.8";
383 name = "msgpack-python-0.4.8";
384 buildInputs = with self; [];
384 buildInputs = with self; [];
385 doCheck = false;
385 doCheck = false;
386 propagatedBuildInputs = with self; [];
386 propagatedBuildInputs = with self; [];
387 src = fetchurl {
387 src = fetchurl {
388 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
388 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
389 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
389 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
390 };
390 };
391 meta = {
391 meta = {
392 license = [ pkgs.lib.licenses.asl20 ];
392 license = [ pkgs.lib.licenses.asl20 ];
393 };
393 };
394 };
394 };
395 pathlib2 = super.buildPythonPackage {
395 pathlib2 = super.buildPythonPackage {
396 name = "pathlib2-2.3.0";
396 name = "pathlib2-2.3.0";
397 buildInputs = with self; [];
397 buildInputs = with self; [];
398 doCheck = false;
398 doCheck = false;
399 propagatedBuildInputs = with self; [six scandir];
399 propagatedBuildInputs = with self; [six scandir];
400 src = fetchurl {
400 src = fetchurl {
401 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
401 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
402 md5 = "89c90409d11fd5947966b6a30a47d18c";
402 md5 = "89c90409d11fd5947966b6a30a47d18c";
403 };
403 };
404 meta = {
404 meta = {
405 license = [ pkgs.lib.licenses.mit ];
405 license = [ pkgs.lib.licenses.mit ];
406 };
406 };
407 };
407 };
408 pexpect = super.buildPythonPackage {
408 pexpect = super.buildPythonPackage {
409 name = "pexpect-4.2.1";
409 name = "pexpect-4.3.0";
410 buildInputs = with self; [];
410 buildInputs = with self; [];
411 doCheck = false;
411 doCheck = false;
412 propagatedBuildInputs = with self; [ptyprocess];
412 propagatedBuildInputs = with self; [ptyprocess];
413 src = fetchurl {
413 src = fetchurl {
414 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
414 url = "https://pypi.python.org/packages/f8/44/5466c30e49762bb92e442bbdf4472d6904608d211258eb3198a11f0309a4/pexpect-4.3.0.tar.gz";
415 md5 = "3694410001a99dff83f0b500a1ca1c95";
415 md5 = "047a486dcd26134b74f2e67046bb61a0";
416 };
416 };
417 meta = {
417 meta = {
418 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
418 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
419 };
419 };
420 };
420 };
421 pickleshare = super.buildPythonPackage {
421 pickleshare = super.buildPythonPackage {
422 name = "pickleshare-0.7.4";
422 name = "pickleshare-0.7.4";
423 buildInputs = with self; [];
423 buildInputs = with self; [];
424 doCheck = false;
424 doCheck = false;
425 propagatedBuildInputs = with self; [pathlib2];
425 propagatedBuildInputs = with self; [pathlib2];
426 src = fetchurl {
426 src = fetchurl {
427 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
427 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
428 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
428 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
429 };
429 };
430 meta = {
430 meta = {
431 license = [ pkgs.lib.licenses.mit ];
431 license = [ pkgs.lib.licenses.mit ];
432 };
432 };
433 };
433 };
434 plaster = super.buildPythonPackage {
434 plaster = super.buildPythonPackage {
435 name = "plaster-0.5";
435 name = "plaster-1.0";
436 buildInputs = with self; [];
436 buildInputs = with self; [];
437 doCheck = false;
437 doCheck = false;
438 propagatedBuildInputs = with self; [setuptools];
438 propagatedBuildInputs = with self; [setuptools];
439 src = fetchurl {
439 src = fetchurl {
440 url = "https://pypi.python.org/packages/99/b3/d7ca1fe31d2b56dba68a238721fda6820770f9c2a3de17a582d4b5b2edcc/plaster-0.5.tar.gz";
440 url = "https://pypi.python.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
441 md5 = "c59345a67a860cfcaa1bd6a81451399d";
441 md5 = "80e6beb4760c16fea31754babcc0576e";
442 };
442 };
443 meta = {
443 meta = {
444 license = [ pkgs.lib.licenses.mit ];
444 license = [ pkgs.lib.licenses.mit ];
445 };
445 };
446 };
446 };
447 plaster-pastedeploy = super.buildPythonPackage {
447 plaster-pastedeploy = super.buildPythonPackage {
448 name = "plaster-pastedeploy-0.4.1";
448 name = "plaster-pastedeploy-0.4.2";
449 buildInputs = with self; [];
449 buildInputs = with self; [];
450 doCheck = false;
450 doCheck = false;
451 propagatedBuildInputs = with self; [PasteDeploy plaster];
451 propagatedBuildInputs = with self; [PasteDeploy plaster];
452 src = fetchurl {
452 src = fetchurl {
453 url = "https://pypi.python.org/packages/9d/6e/f8be01ed41c94e6c54ac97cf2eb142a702aae0c8cce31c846f785e525b40/plaster_pastedeploy-0.4.1.tar.gz";
453 url = "https://pypi.python.org/packages/2c/62/0daf9c0be958e785023e583e51baac15863699e956bfb3d448898d80edd8/plaster_pastedeploy-0.4.2.tar.gz";
454 md5 = "f48d5344b922e56c4978eebf1cd2e0d3";
454 md5 = "58fd7852002909378e818c9d5b71e90a";
455 };
455 };
456 meta = {
456 meta = {
457 license = [ pkgs.lib.licenses.mit ];
457 license = [ pkgs.lib.licenses.mit ];
458 };
458 };
459 };
459 };
460 prompt-toolkit = super.buildPythonPackage {
460 prompt-toolkit = super.buildPythonPackage {
461 name = "prompt-toolkit-1.0.15";
461 name = "prompt-toolkit-1.0.15";
462 buildInputs = with self; [];
462 buildInputs = with self; [];
463 doCheck = false;
463 doCheck = false;
464 propagatedBuildInputs = with self; [six wcwidth];
464 propagatedBuildInputs = with self; [six wcwidth];
465 src = fetchurl {
465 src = fetchurl {
466 url = "https://pypi.python.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
466 url = "https://pypi.python.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
467 md5 = "8fe70295006dbc8afedd43e5eba99032";
467 md5 = "8fe70295006dbc8afedd43e5eba99032";
468 };
468 };
469 meta = {
469 meta = {
470 license = [ pkgs.lib.licenses.bsdOriginal ];
470 license = [ pkgs.lib.licenses.bsdOriginal ];
471 };
471 };
472 };
472 };
473 ptyprocess = super.buildPythonPackage {
473 ptyprocess = super.buildPythonPackage {
474 name = "ptyprocess-0.5.2";
474 name = "ptyprocess-0.5.2";
475 buildInputs = with self; [];
475 buildInputs = with self; [];
476 doCheck = false;
476 doCheck = false;
477 propagatedBuildInputs = with self; [];
477 propagatedBuildInputs = with self; [];
478 src = fetchurl {
478 src = fetchurl {
479 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
479 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
480 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
480 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
481 };
481 };
482 meta = {
482 meta = {
483 license = [ ];
483 license = [ ];
484 };
484 };
485 };
485 };
486 py = super.buildPythonPackage {
486 py = super.buildPythonPackage {
487 name = "py-1.4.34";
487 name = "py-1.5.2";
488 buildInputs = with self; [];
488 buildInputs = with self; [];
489 doCheck = false;
489 doCheck = false;
490 propagatedBuildInputs = with self; [];
490 propagatedBuildInputs = with self; [];
491 src = fetchurl {
491 src = fetchurl {
492 url = "https://pypi.python.org/packages/68/35/58572278f1c097b403879c1e9369069633d1cbad5239b9057944bb764782/py-1.4.34.tar.gz";
492 url = "https://pypi.python.org/packages/90/e3/e075127d39d35f09a500ebb4a90afd10f9ef0a1d28a6d09abeec0e444fdd/py-1.5.2.tar.gz";
493 md5 = "d9c3d8f734b0819ff48e355d77bf1730";
493 md5 = "279ca69c632069e1b71e11b14641ca28";
494 };
494 };
495 meta = {
495 meta = {
496 license = [ pkgs.lib.licenses.mit ];
496 license = [ pkgs.lib.licenses.mit ];
497 };
497 };
498 };
498 };
499 pygments = super.buildPythonPackage {
499 pygments = super.buildPythonPackage {
500 name = "pygments-2.2.0";
500 name = "pygments-2.2.0";
501 buildInputs = with self; [];
501 buildInputs = with self; [];
502 doCheck = false;
502 doCheck = false;
503 propagatedBuildInputs = with self; [];
503 propagatedBuildInputs = with self; [];
504 src = fetchurl {
504 src = fetchurl {
505 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
505 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
506 md5 = "13037baca42f16917cbd5ad2fab50844";
506 md5 = "13037baca42f16917cbd5ad2fab50844";
507 };
507 };
508 meta = {
508 meta = {
509 license = [ pkgs.lib.licenses.bsdOriginal ];
509 license = [ pkgs.lib.licenses.bsdOriginal ];
510 };
510 };
511 };
511 };
512 pyramid = super.buildPythonPackage {
512 pyramid = super.buildPythonPackage {
513 name = "pyramid-1.9.1";
513 name = "pyramid-1.9.1";
514 buildInputs = with self; [];
514 buildInputs = with self; [];
515 doCheck = false;
515 doCheck = false;
516 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
516 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
517 src = fetchurl {
517 src = fetchurl {
518 url = "https://pypi.python.org/packages/9a/57/73447be9e7d0512d601e3f0a1fb9d7d1efb941911f49efdfe036d2826507/pyramid-1.9.1.tar.gz";
518 url = "https://pypi.python.org/packages/9a/57/73447be9e7d0512d601e3f0a1fb9d7d1efb941911f49efdfe036d2826507/pyramid-1.9.1.tar.gz";
519 md5 = "0163e19c58c2d12976a3b6fdb57e052d";
519 md5 = "0163e19c58c2d12976a3b6fdb57e052d";
520 };
520 };
521 meta = {
521 meta = {
522 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
522 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
523 };
523 };
524 };
524 };
525 pyramid-jinja2 = super.buildPythonPackage {
525 pyramid-jinja2 = super.buildPythonPackage {
526 name = "pyramid-jinja2-2.5";
526 name = "pyramid-jinja2-2.7";
527 buildInputs = with self; [];
527 buildInputs = with self; [];
528 doCheck = false;
528 doCheck = false;
529 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
529 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
530 src = fetchurl {
530 src = fetchurl {
531 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
531 url = "https://pypi.python.org/packages/d8/80/d60a7233823de22ce77bd864a8a83736a1fe8b49884b08303a2e68b2c853/pyramid_jinja2-2.7.tar.gz";
532 md5 = "07cb6547204ac5e6f0b22a954ccee928";
532 md5 = "c2f8b2cd7b73a6f1d9a311fcfaf4fb92";
533 };
533 };
534 meta = {
534 meta = {
535 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
535 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 };
536 };
537 };
537 };
538 pyramid-mako = super.buildPythonPackage {
538 pyramid-mako = super.buildPythonPackage {
539 name = "pyramid-mako-1.0.2";
539 name = "pyramid-mako-1.0.2";
540 buildInputs = with self; [];
540 buildInputs = with self; [];
541 doCheck = false;
541 doCheck = false;
542 propagatedBuildInputs = with self; [pyramid Mako];
542 propagatedBuildInputs = with self; [pyramid Mako];
543 src = fetchurl {
543 src = fetchurl {
544 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
544 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
545 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
545 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
546 };
546 };
547 meta = {
547 meta = {
548 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
548 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
549 };
549 };
550 };
550 };
551 pytest = super.buildPythonPackage {
551 pytest = super.buildPythonPackage {
552 name = "pytest-3.1.2";
552 name = "pytest-3.2.5";
553 buildInputs = with self; [];
553 buildInputs = with self; [];
554 doCheck = false;
554 doCheck = false;
555 propagatedBuildInputs = with self; [py setuptools];
555 propagatedBuildInputs = with self; [py setuptools];
556 src = fetchurl {
556 src = fetchurl {
557 url = "https://pypi.python.org/packages/72/2b/2d3155e01f45a5a04427857352ee88220ee39550b2bc078f9db3190aea46/pytest-3.1.2.tar.gz";
557 url = "https://pypi.python.org/packages/1f/f8/8cd74c16952163ce0db0bd95fdd8810cbf093c08be00e6e665ebf0dc3138/pytest-3.2.5.tar.gz";
558 md5 = "c4d179f89043cc925e1c169d03128e02";
558 md5 = "6dbe9bb093883f75394a689a1426ac6f";
559 };
559 };
560 meta = {
560 meta = {
561 license = [ pkgs.lib.licenses.mit ];
561 license = [ pkgs.lib.licenses.mit ];
562 };
562 };
563 };
563 };
564 pytest-catchlog = super.buildPythonPackage {
564 pytest-catchlog = super.buildPythonPackage {
565 name = "pytest-catchlog-1.2.2";
565 name = "pytest-catchlog-1.2.2";
566 buildInputs = with self; [];
566 buildInputs = with self; [];
567 doCheck = false;
567 doCheck = false;
568 propagatedBuildInputs = with self; [py pytest];
568 propagatedBuildInputs = with self; [py pytest];
569 src = fetchurl {
569 src = fetchurl {
570 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
570 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
571 md5 = "09d890c54c7456c818102b7ff8c182c8";
571 md5 = "09d890c54c7456c818102b7ff8c182c8";
572 };
572 };
573 meta = {
573 meta = {
574 license = [ pkgs.lib.licenses.mit ];
574 license = [ pkgs.lib.licenses.mit ];
575 };
575 };
576 };
576 };
577 pytest-cov = super.buildPythonPackage {
577 pytest-cov = super.buildPythonPackage {
578 name = "pytest-cov-2.5.1";
578 name = "pytest-cov-2.5.1";
579 buildInputs = with self; [];
579 buildInputs = with self; [];
580 doCheck = false;
580 doCheck = false;
581 propagatedBuildInputs = with self; [pytest coverage];
581 propagatedBuildInputs = with self; [pytest coverage];
582 src = fetchurl {
582 src = fetchurl {
583 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
583 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
584 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
585 };
585 };
586 meta = {
586 meta = {
587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
588 };
588 };
589 };
589 };
590 pytest-profiling = super.buildPythonPackage {
590 pytest-profiling = super.buildPythonPackage {
591 name = "pytest-profiling-1.2.6";
591 name = "pytest-profiling-1.2.11";
592 buildInputs = with self; [];
592 buildInputs = with self; [];
593 doCheck = false;
593 doCheck = false;
594 propagatedBuildInputs = with self; [six pytest gprof2dot];
594 propagatedBuildInputs = with self; [six pytest gprof2dot];
595 src = fetchurl {
595 src = fetchurl {
596 url = "https://pypi.python.org/packages/f9/0d/df67fb9ce16c2cef201693da956321b1bccfbf9a4ead39748b9f9d1d74cb/pytest-profiling-1.2.6.tar.gz";
596 url = "https://pypi.python.org/packages/c0/4a/b4aa786e93c07a86f1f87c581a36bf355a9e06a9da7e00dbd05047626bd2/pytest-profiling-1.2.11.tar.gz";
597 md5 = "50eb4c66c3762a2f1a49669bedc0b894";
597 md5 = "9ef6b60248731be5d44477980408e8f7";
598 };
598 };
599 meta = {
599 meta = {
600 license = [ pkgs.lib.licenses.mit ];
600 license = [ pkgs.lib.licenses.mit ];
601 };
601 };
602 };
602 };
603 pytest-runner = super.buildPythonPackage {
603 pytest-runner = super.buildPythonPackage {
604 name = "pytest-runner-2.11.1";
604 name = "pytest-runner-3.0";
605 buildInputs = with self; [];
605 buildInputs = with self; [];
606 doCheck = false;
606 doCheck = false;
607 propagatedBuildInputs = with self; [];
607 propagatedBuildInputs = with self; [];
608 src = fetchurl {
608 src = fetchurl {
609 url = "https://pypi.python.org/packages/9e/4d/08889e5e27a9f5d6096b9ad257f4dea1faabb03c5ded8f665ead448f5d8a/pytest-runner-2.11.1.tar.gz";
609 url = "https://pypi.python.org/packages/65/b4/ae89338cd2d81e2cc54bd6db2e962bfe948f612303610d68ab24539ac2d1/pytest-runner-3.0.tar.gz";
610 md5 = "bdb73eb18eca2727944a2dcf963c5a81";
610 md5 = "8f8363a52bbabc4cedd5e239beb2ba11";
611 };
611 };
612 meta = {
612 meta = {
613 license = [ pkgs.lib.licenses.mit ];
613 license = [ pkgs.lib.licenses.mit ];
614 };
614 };
615 };
615 };
616 pytest-sugar = super.buildPythonPackage {
616 pytest-sugar = super.buildPythonPackage {
617 name = "pytest-sugar-0.8.0";
617 name = "pytest-sugar-0.9.0";
618 buildInputs = with self; [];
618 buildInputs = with self; [];
619 doCheck = false;
619 doCheck = false;
620 propagatedBuildInputs = with self; [pytest termcolor];
620 propagatedBuildInputs = with self; [pytest termcolor];
621 src = fetchurl {
621 src = fetchurl {
622 url = "https://pypi.python.org/packages/a5/b0/b2773dee078f17773a5bf2dfad49b0be57b6354bbd84bbefe4313e509d87/pytest-sugar-0.8.0.tar.gz";
622 url = "https://pypi.python.org/packages/49/d8/c5ff6cca3ce2ebd8b73eec89779bf6b4a7737456a70e8ea4d44c1ff90f71/pytest-sugar-0.9.0.tar.gz";
623 md5 = "8cafbdad648068e0e44b8fc5f9faae42";
623 md5 = "89fbff17277fa6a95a560a04b68cb9f9";
624 };
624 };
625 meta = {
625 meta = {
626 license = [ pkgs.lib.licenses.bsdOriginal ];
626 license = [ pkgs.lib.licenses.bsdOriginal ];
627 };
627 };
628 };
628 };
629 pytest-timeout = super.buildPythonPackage {
629 pytest-timeout = super.buildPythonPackage {
630 name = "pytest-timeout-1.2.0";
630 name = "pytest-timeout-1.2.0";
631 buildInputs = with self; [];
631 buildInputs = with self; [];
632 doCheck = false;
632 doCheck = false;
633 propagatedBuildInputs = with self; [pytest];
633 propagatedBuildInputs = with self; [pytest];
634 src = fetchurl {
634 src = fetchurl {
635 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
635 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
636 md5 = "83607d91aa163562c7ee835da57d061d";
636 md5 = "83607d91aa163562c7ee835da57d061d";
637 };
637 };
638 meta = {
638 meta = {
639 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
639 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
640 };
640 };
641 };
641 };
642 repoze.lru = super.buildPythonPackage {
642 repoze.lru = super.buildPythonPackage {
643 name = "repoze.lru-0.6";
643 name = "repoze.lru-0.7";
644 buildInputs = with self; [];
644 buildInputs = with self; [];
645 doCheck = false;
645 doCheck = false;
646 propagatedBuildInputs = with self; [];
646 propagatedBuildInputs = with self; [];
647 src = fetchurl {
647 src = fetchurl {
648 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
648 url = "https://pypi.python.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
649 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
649 md5 = "c08cc030387e0b1fc53c5c7d964b35e2";
650 };
650 };
651 meta = {
651 meta = {
652 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
652 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
653 };
653 };
654 };
654 };
655 rhodecode-vcsserver = super.buildPythonPackage {
655 rhodecode-vcsserver = super.buildPythonPackage {
656 name = "rhodecode-vcsserver-4.10.6";
656 name = "rhodecode-vcsserver-4.11.0";
657 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
657 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
658 doCheck = true;
658 doCheck = true;
659 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
659 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
660 src = ./.;
660 src = ./.;
661 meta = {
661 meta = {
662 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
662 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
663 };
663 };
664 };
664 };
665 scandir = super.buildPythonPackage {
665 scandir = super.buildPythonPackage {
666 name = "scandir-1.5";
666 name = "scandir-1.6";
667 buildInputs = with self; [];
667 buildInputs = with self; [];
668 doCheck = false;
668 doCheck = false;
669 propagatedBuildInputs = with self; [];
669 propagatedBuildInputs = with self; [];
670 src = fetchurl {
670 src = fetchurl {
671 url = "https://pypi.python.org/packages/bd/f4/3143e0289faf0883228017dbc6387a66d0b468df646645e29e1eb89ea10e/scandir-1.5.tar.gz";
671 url = "https://pypi.python.org/packages/77/3f/916f524f50ee65e3f465a280d2851bd63685250fddb3020c212b3977664d/scandir-1.6.tar.gz";
672 md5 = "a2713043de681bba6b084be42e7a8a44";
672 md5 = "0180ddb97c96cbb2d4f25d2ae11c64ac";
673 };
673 };
674 meta = {
674 meta = {
675 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
675 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
676 };
676 };
677 };
677 };
678 setuptools = super.buildPythonPackage {
678 setuptools = super.buildPythonPackage {
679 name = "setuptools-30.1.0";
679 name = "setuptools-30.1.0";
680 buildInputs = with self; [];
680 buildInputs = with self; [];
681 doCheck = false;
681 doCheck = false;
682 propagatedBuildInputs = with self; [];
682 propagatedBuildInputs = with self; [];
683 src = fetchurl {
683 src = fetchurl {
684 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
684 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
685 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
685 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
686 };
686 };
687 meta = {
687 meta = {
688 license = [ pkgs.lib.licenses.mit ];
688 license = [ pkgs.lib.licenses.mit ];
689 };
689 };
690 };
690 };
691 simplegeneric = super.buildPythonPackage {
691 simplegeneric = super.buildPythonPackage {
692 name = "simplegeneric-0.8.1";
692 name = "simplegeneric-0.8.1";
693 buildInputs = with self; [];
693 buildInputs = with self; [];
694 doCheck = false;
694 doCheck = false;
695 propagatedBuildInputs = with self; [];
695 propagatedBuildInputs = with self; [];
696 src = fetchurl {
696 src = fetchurl {
697 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
697 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
698 md5 = "f9c1fab00fd981be588fc32759f474e3";
698 md5 = "f9c1fab00fd981be588fc32759f474e3";
699 };
699 };
700 meta = {
700 meta = {
701 license = [ pkgs.lib.licenses.zpt21 ];
701 license = [ pkgs.lib.licenses.zpt21 ];
702 };
702 };
703 };
703 };
704 simplejson = super.buildPythonPackage {
704 simplejson = super.buildPythonPackage {
705 name = "simplejson-3.11.1";
705 name = "simplejson-3.11.1";
706 buildInputs = with self; [];
706 buildInputs = with self; [];
707 doCheck = false;
707 doCheck = false;
708 propagatedBuildInputs = with self; [];
708 propagatedBuildInputs = with self; [];
709 src = fetchurl {
709 src = fetchurl {
710 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
710 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
711 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
711 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
712 };
712 };
713 meta = {
713 meta = {
714 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
714 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
715 };
715 };
716 };
716 };
717 six = super.buildPythonPackage {
717 six = super.buildPythonPackage {
718 name = "six-1.9.0";
718 name = "six-1.11.0";
719 buildInputs = with self; [];
719 buildInputs = with self; [];
720 doCheck = false;
720 doCheck = false;
721 propagatedBuildInputs = with self; [];
721 propagatedBuildInputs = with self; [];
722 src = fetchurl {
722 src = fetchurl {
723 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
723 url = "https://pypi.python.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
724 md5 = "476881ef4012262dfc8adc645ee786c4";
724 md5 = "d12789f9baf7e9fb2524c0c64f1773f8";
725 };
725 };
726 meta = {
726 meta = {
727 license = [ pkgs.lib.licenses.mit ];
727 license = [ pkgs.lib.licenses.mit ];
728 };
728 };
729 };
729 };
730 subprocess32 = super.buildPythonPackage {
730 subprocess32 = super.buildPythonPackage {
731 name = "subprocess32-3.2.7";
731 name = "subprocess32-3.2.7";
732 buildInputs = with self; [];
732 buildInputs = with self; [];
733 doCheck = false;
733 doCheck = false;
734 propagatedBuildInputs = with self; [];
734 propagatedBuildInputs = with self; [];
735 src = fetchurl {
735 src = fetchurl {
736 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
736 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
737 md5 = "824c801e479d3e916879aae3e9c15e16";
737 md5 = "824c801e479d3e916879aae3e9c15e16";
738 };
738 };
739 meta = {
739 meta = {
740 license = [ pkgs.lib.licenses.psfl ];
740 license = [ pkgs.lib.licenses.psfl ];
741 };
741 };
742 };
742 };
743 subvertpy = super.buildPythonPackage {
743 subvertpy = super.buildPythonPackage {
744 name = "subvertpy-0.9.3";
744 name = "subvertpy-0.10.1";
745 buildInputs = with self; [];
745 buildInputs = with self; [];
746 doCheck = false;
746 doCheck = false;
747 propagatedBuildInputs = with self; [];
747 propagatedBuildInputs = with self; [];
748 src = fetchurl {
748 src = fetchurl {
749 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
749 url = "https://pypi.python.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
750 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
750 md5 = "a70e03579902d480f5e9f8c570f6536b";
751 };
751 };
752 meta = {
752 meta = {
753 license = [ pkgs.lib.licenses.lgpl21Plus ];
753 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
754 };
754 };
755 };
755 };
756 termcolor = super.buildPythonPackage {
756 termcolor = super.buildPythonPackage {
757 name = "termcolor-1.1.0";
757 name = "termcolor-1.1.0";
758 buildInputs = with self; [];
758 buildInputs = with self; [];
759 doCheck = false;
759 doCheck = false;
760 propagatedBuildInputs = with self; [];
760 propagatedBuildInputs = with self; [];
761 src = fetchurl {
761 src = fetchurl {
762 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
762 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
763 md5 = "043e89644f8909d462fbbfa511c768df";
763 md5 = "043e89644f8909d462fbbfa511c768df";
764 };
764 };
765 meta = {
765 meta = {
766 license = [ pkgs.lib.licenses.mit ];
766 license = [ pkgs.lib.licenses.mit ];
767 };
767 };
768 };
768 };
769 traitlets = super.buildPythonPackage {
769 traitlets = super.buildPythonPackage {
770 name = "traitlets-4.3.2";
770 name = "traitlets-4.3.2";
771 buildInputs = with self; [];
771 buildInputs = with self; [];
772 doCheck = false;
772 doCheck = false;
773 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
773 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
774 src = fetchurl {
774 src = fetchurl {
775 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
775 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
776 md5 = "3068663f2f38fd939a9eb3a500ccc154";
776 md5 = "3068663f2f38fd939a9eb3a500ccc154";
777 };
777 };
778 meta = {
778 meta = {
779 license = [ pkgs.lib.licenses.bsdOriginal ];
779 license = [ pkgs.lib.licenses.bsdOriginal ];
780 };
780 };
781 };
781 };
782 translationstring = super.buildPythonPackage {
782 translationstring = super.buildPythonPackage {
783 name = "translationstring-1.3";
783 name = "translationstring-1.3";
784 buildInputs = with self; [];
784 buildInputs = with self; [];
785 doCheck = false;
785 doCheck = false;
786 propagatedBuildInputs = with self; [];
786 propagatedBuildInputs = with self; [];
787 src = fetchurl {
787 src = fetchurl {
788 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
788 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
789 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
789 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
790 };
790 };
791 meta = {
791 meta = {
792 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
792 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
793 };
793 };
794 };
794 };
795 venusian = super.buildPythonPackage {
795 venusian = super.buildPythonPackage {
796 name = "venusian-1.1.0";
796 name = "venusian-1.1.0";
797 buildInputs = with self; [];
797 buildInputs = with self; [];
798 doCheck = false;
798 doCheck = false;
799 propagatedBuildInputs = with self; [];
799 propagatedBuildInputs = with self; [];
800 src = fetchurl {
800 src = fetchurl {
801 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
801 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
802 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
802 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
803 };
803 };
804 meta = {
804 meta = {
805 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
805 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
806 };
806 };
807 };
807 };
808 waitress = super.buildPythonPackage {
808 waitress = super.buildPythonPackage {
809 name = "waitress-1.0.2";
809 name = "waitress-1.1.0";
810 buildInputs = with self; [];
810 buildInputs = with self; [];
811 doCheck = false;
811 doCheck = false;
812 propagatedBuildInputs = with self; [];
812 propagatedBuildInputs = with self; [];
813 src = fetchurl {
813 src = fetchurl {
814 url = "https://pypi.python.org/packages/cd/f4/400d00863afa1e03618e31fd7e2092479a71b8c9718b00eb1eeb603746c6/waitress-1.0.2.tar.gz";
814 url = "https://pypi.python.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
815 md5 = "b968f39e95d609f6194c6e50425d4bb7";
815 md5 = "0f1eb7fdfdbf2e6d18decbda1733045c";
816 };
816 };
817 meta = {
817 meta = {
818 license = [ pkgs.lib.licenses.zpt21 ];
818 license = [ pkgs.lib.licenses.zpt21 ];
819 };
819 };
820 };
820 };
821 wcwidth = super.buildPythonPackage {
821 wcwidth = super.buildPythonPackage {
822 name = "wcwidth-0.1.7";
822 name = "wcwidth-0.1.7";
823 buildInputs = with self; [];
823 buildInputs = with self; [];
824 doCheck = false;
824 doCheck = false;
825 propagatedBuildInputs = with self; [];
825 propagatedBuildInputs = with self; [];
826 src = fetchurl {
826 src = fetchurl {
827 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
827 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
828 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
828 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
829 };
829 };
830 meta = {
830 meta = {
831 license = [ pkgs.lib.licenses.mit ];
831 license = [ pkgs.lib.licenses.mit ];
832 };
832 };
833 };
833 };
834 wheel = super.buildPythonPackage {
834 wheel = super.buildPythonPackage {
835 name = "wheel-0.29.0";
835 name = "wheel-0.29.0";
836 buildInputs = with self; [];
836 buildInputs = with self; [];
837 doCheck = false;
837 doCheck = false;
838 propagatedBuildInputs = with self; [];
838 propagatedBuildInputs = with self; [];
839 src = fetchurl {
839 src = fetchurl {
840 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
840 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
841 md5 = "555a67e4507cedee23a0deb9651e452f";
841 md5 = "555a67e4507cedee23a0deb9651e452f";
842 };
842 };
843 meta = {
843 meta = {
844 license = [ pkgs.lib.licenses.mit ];
844 license = [ pkgs.lib.licenses.mit ];
845 };
845 };
846 };
846 };
847 zope.deprecation = super.buildPythonPackage {
847 zope.deprecation = super.buildPythonPackage {
848 name = "zope.deprecation-4.1.2";
848 name = "zope.deprecation-4.1.2";
849 buildInputs = with self; [];
849 buildInputs = with self; [];
850 doCheck = false;
850 doCheck = false;
851 propagatedBuildInputs = with self; [setuptools];
851 propagatedBuildInputs = with self; [setuptools];
852 src = fetchurl {
852 src = fetchurl {
853 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
853 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
854 md5 = "e9a663ded58f4f9f7881beb56cae2782";
854 md5 = "e9a663ded58f4f9f7881beb56cae2782";
855 };
855 };
856 meta = {
856 meta = {
857 license = [ pkgs.lib.licenses.zpt21 ];
857 license = [ pkgs.lib.licenses.zpt21 ];
858 };
858 };
859 };
859 };
860 zope.interface = super.buildPythonPackage {
860 zope.interface = super.buildPythonPackage {
861 name = "zope.interface-4.1.3";
861 name = "zope.interface-4.1.3";
862 buildInputs = with self; [];
862 buildInputs = with self; [];
863 doCheck = false;
863 doCheck = false;
864 propagatedBuildInputs = with self; [setuptools];
864 propagatedBuildInputs = with self; [setuptools];
865 src = fetchurl {
865 src = fetchurl {
866 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
866 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
867 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
867 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
868 };
868 };
869 meta = {
869 meta = {
870 license = [ pkgs.lib.licenses.zpt21 ];
870 license = [ pkgs.lib.licenses.zpt21 ];
871 };
871 };
872 };
872 };
873
873
874 ### Test requirements
874 ### Test requirements
875
875
876
876
877 }
877 }
@@ -1,41 +1,40 b''
1 ## core
1 ## core
2 setuptools==30.1.0
2 setuptools==30.1.0
3
3
4 Beaker==1.9.0
4 Beaker==1.9.0
5 configobj==5.0.6
5 configobj==5.0.6
6 decorator==4.0.11
6 decorator==4.1.2
7 dulwich==0.13.0
7 dulwich==0.13.0
8 hgsubversion==1.8.7
8 hgsubversion==1.9.0
9 hg-evolve==6.6.0
9 hg-evolve==7.0.1
10 infrae.cache==1.0.1
10 infrae.cache==1.0.1
11 mercurial==4.2.3
11 mercurial==4.4.2
12 msgpack-python==0.4.8
12 msgpack-python==0.4.8
13 pyramid-jinja2==2.5
13 pyramid-jinja2==2.7
14 pyramid==1.9.1
14 pyramid==1.9.1
15 pyramid-mako==1.0.2
15 pyramid-mako==1.0.2
16 repoze.lru==0.6
16 repoze.lru==0.7
17 simplejson==3.11.1
17 simplejson==3.11.1
18 subprocess32==3.2.7
18 subprocess32==3.2.7
19
19
20 # Custom subvertpy that is not available on pypi.
20 subvertpy==0.10.1
21 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
22
21
23 six==1.9.0
22 six==1.11.0
24 translationstring==1.3
23 translationstring==1.3
25 WebOb==1.7.4
24 WebOb==1.7.4
26 wheel==0.29.0
25 wheel==0.29.0
27 zope.deprecation==4.1.2
26 zope.deprecation==4.1.2
28 zope.interface==4.1.3
27 zope.interface==4.1.3
29
28
30 ## http servers
29 ## http servers
31 gevent==1.2.2
30 gevent==1.2.2
32 greenlet==0.4.12
31 greenlet==0.4.12
33 gunicorn==19.7.1
32 gunicorn==19.7.1
34 waitress==1.0.2
33 waitress==1.1.0
35
34
36 ## debug
35 ## debug
37 ipdb==0.10.3
36 ipdb==0.10.3
38 ipython==5.1.0
37 ipython==5.1.0
39
38
40 ## test related requirements
39 ## test related requirements
41 -r requirements_test.txt
40 -r requirements_test.txt
@@ -1,15 +1,15 b''
1 # test related requirements
1 # test related requirements
2 pytest==3.1.2
2 pytest==3.2.5
3 py==1.4.34
3 py==1.5.2
4 pytest-cov==2.5.1
4 pytest-cov==2.5.1
5 pytest-sugar==0.8.0
5 pytest-sugar==0.9.0
6 pytest-runner==2.11.1
6 pytest-runner==3.0.0
7 pytest-catchlog==1.2.2
7 pytest-catchlog==1.2.2
8 pytest-profiling==1.2.6
8 pytest-profiling==1.2.11
9 gprof2dot==2016.10.13
9 gprof2dot==2017.9.19
10 pytest-timeout==1.2.0
10 pytest-timeout==1.2.0
11
11
12 mock==1.0.1
12 mock==1.0.1
13 WebTest==2.0.27
13 WebTest==2.0.29
14 cov-core==1.15.0
14 cov-core==1.15.0
15 coverage==3.7.1
15 coverage==3.7.1
@@ -1,1 +1,1 b''
1 4.10.6 No newline at end of file
1 4.11.0 No newline at end of file
@@ -1,21 +1,21 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import pkgutil
18 import pkgutil
19
19
20
20
21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip()
21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip()
@@ -1,98 +1,98 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import sys
18 import sys
19 import traceback
19 import traceback
20 import logging
20 import logging
21 import urlparse
21 import urlparse
22
22
23 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
24
24
25
25
26 class RepoFactory(object):
26 class RepoFactory(object):
27 """
27 """
28 Utility to create instances of repository
28 Utility to create instances of repository
29
29
30 It provides internal caching of the `repo` object based on
30 It provides internal caching of the `repo` object based on
31 the :term:`call context`.
31 the :term:`call context`.
32 """
32 """
33
33
34 def __init__(self, repo_cache):
34 def __init__(self, repo_cache):
35 self._cache = repo_cache
35 self._cache = repo_cache
36
36
37 def _create_config(self, path, config):
37 def _create_config(self, path, config):
38 config = {}
38 config = {}
39 return config
39 return config
40
40
41 def _create_repo(self, wire, create):
41 def _create_repo(self, wire, create):
42 raise NotImplementedError()
42 raise NotImplementedError()
43
43
44 def repo(self, wire, create=False):
44 def repo(self, wire, create=False):
45 """
45 """
46 Get a repository instance for the given path.
46 Get a repository instance for the given path.
47
47
48 Uses internally the low level beaker API since the decorators introduce
48 Uses internally the low level beaker API since the decorators introduce
49 significant overhead.
49 significant overhead.
50 """
50 """
51 def create_new_repo():
51 def create_new_repo():
52 return self._create_repo(wire, create)
52 return self._create_repo(wire, create)
53
53
54 return self._repo(wire, create_new_repo)
54 return self._repo(wire, create_new_repo)
55
55
56 def _repo(self, wire, createfunc):
56 def _repo(self, wire, createfunc):
57 context = wire.get('context', None)
57 context = wire.get('context', None)
58 cache = wire.get('cache', True)
58 cache = wire.get('cache', True)
59
59
60 if context and cache:
60 if context and cache:
61 cache_key = (context, wire['path'])
61 cache_key = (context, wire['path'])
62 log.debug(
62 log.debug(
63 'FETCH %s@%s repo object from cache. Context: %s',
63 'FETCH %s@%s repo object from cache. Context: %s',
64 self.__class__.__name__, wire['path'], context)
64 self.__class__.__name__, wire['path'], context)
65 return self._cache.get(key=cache_key, createfunc=createfunc)
65 return self._cache.get(key=cache_key, createfunc=createfunc)
66 else:
66 else:
67 log.debug(
67 log.debug(
68 'INIT %s@%s repo object based on wire %s. Context: %s',
68 'INIT %s@%s repo object based on wire %s. Context: %s',
69 self.__class__.__name__, wire['path'], wire, context)
69 self.__class__.__name__, wire['path'], wire, context)
70 return createfunc()
70 return createfunc()
71
71
72
72
73 def obfuscate_qs(query_string):
73 def obfuscate_qs(query_string):
74 if query_string is None:
74 if query_string is None:
75 return None
75 return None
76
76
77 parsed = []
77 parsed = []
78 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
78 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
79 if k in ['auth_token', 'api_key']:
79 if k in ['auth_token', 'api_key']:
80 v = "*****"
80 v = "*****"
81 parsed.append((k, v))
81 parsed.append((k, v))
82
82
83 return '&'.join('{}{}'.format(
83 return '&'.join('{}{}'.format(
84 k, '={}'.format(v) if v else '') for k, v in parsed)
84 k, '={}'.format(v) if v else '') for k, v in parsed)
85
85
86
86
87 def raise_from_original(new_type):
87 def raise_from_original(new_type):
88 """
88 """
89 Raise a new exception type with original args and traceback.
89 Raise a new exception type with original args and traceback.
90 """
90 """
91 exc_type, exc_value, exc_traceback = sys.exc_info()
91 exc_type, exc_value, exc_traceback = sys.exc_info()
92
92
93 traceback.format_exception(exc_type, exc_value, exc_traceback)
93 traceback.format_exception(exc_type, exc_value, exc_traceback)
94
94
95 try:
95 try:
96 raise new_type(*exc_value.args), None, exc_traceback
96 raise new_type(*exc_value.args), None, exc_traceback
97 finally:
97 finally:
98 del exc_traceback
98 del exc_traceback
@@ -1,70 +1,70 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Special exception handling over the wire.
19 Special exception handling over the wire.
20
20
21 Since we cannot assume that our client is able to import our exception classes,
21 Since we cannot assume that our client is able to import our exception classes,
22 this module provides a "wrapping" mechanism to raise plain exceptions
22 this module provides a "wrapping" mechanism to raise plain exceptions
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 import functools
27 import functools
28 from pyramid.httpexceptions import HTTPLocked
28 from pyramid.httpexceptions import HTTPLocked
29
29
30
30
31 def _make_exception(kind, *args):
31 def _make_exception(kind, *args):
32 """
32 """
33 Prepares a base `Exception` instance to be sent over the wire.
33 Prepares a base `Exception` instance to be sent over the wire.
34
34
35 To give our caller a hint what this is about, it will attach an attribute
35 To give our caller a hint what this is about, it will attach an attribute
36 `_vcs_kind` to the exception.
36 `_vcs_kind` to the exception.
37 """
37 """
38 exc = Exception(*args)
38 exc = Exception(*args)
39 exc._vcs_kind = kind
39 exc._vcs_kind = kind
40 return exc
40 return exc
41
41
42
42
43 AbortException = functools.partial(_make_exception, 'abort')
43 AbortException = functools.partial(_make_exception, 'abort')
44
44
45 ArchiveException = functools.partial(_make_exception, 'archive')
45 ArchiveException = functools.partial(_make_exception, 'archive')
46
46
47 LookupException = functools.partial(_make_exception, 'lookup')
47 LookupException = functools.partial(_make_exception, 'lookup')
48
48
49 VcsException = functools.partial(_make_exception, 'error')
49 VcsException = functools.partial(_make_exception, 'error')
50
50
51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
52
52
53 RequirementException = functools.partial(_make_exception, 'requirement')
53 RequirementException = functools.partial(_make_exception, 'requirement')
54
54
55 UnhandledException = functools.partial(_make_exception, 'unhandled')
55 UnhandledException = functools.partial(_make_exception, 'unhandled')
56
56
57 URLError = functools.partial(_make_exception, 'url_error')
57 URLError = functools.partial(_make_exception, 'url_error')
58
58
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
60
60
61
61
62 class HTTPRepoLocked(HTTPLocked):
62 class HTTPRepoLocked(HTTPLocked):
63 """
63 """
64 Subclass of HTTPLocked response that allows to set the title and status
64 Subclass of HTTPLocked response that allows to set the title and status
65 code via constructor arguments.
65 code via constructor arguments.
66 """
66 """
67 def __init__(self, title, status_code=None, **kwargs):
67 def __init__(self, title, status_code=None, **kwargs):
68 self.code = status_code or HTTPLocked.code
68 self.code = status_code or HTTPLocked.code
69 self.title = title
69 self.title = title
70 super(HTTPRepoLocked, self).__init__(**kwargs)
70 super(HTTPRepoLocked, self).__init__(**kwargs)
@@ -1,645 +1,658 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import urllib
24 import urllib
24 import urllib2
25 import urllib2
25 from functools import wraps
26 from functools import wraps
26
27
27 from dulwich import index, objects
28 from dulwich import index, objects
28 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.errors import (
30 from dulwich.errors import (
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 MissingCommitError, ObjectMissing, HangupException,
32 MissingCommitError, ObjectMissing, HangupException,
32 UnexpectedCommandError)
33 UnexpectedCommandError)
33 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.server import update_server_info
35 from dulwich.server import update_server_info
35
36
36 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver.utils import safe_str
38 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.hgcompat import (
40 from vcsserver.hgcompat import (
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 from vcsserver.git_lfs.lib import LFSOidStore
42 from vcsserver.git_lfs.lib import LFSOidStore
42
43
43 DIR_STAT = stat.S_IFDIR
44 DIR_STAT = stat.S_IFDIR
44 FILE_MODE = stat.S_IFMT
45 FILE_MODE = stat.S_IFMT
45 GIT_LINK = objects.S_IFGITLINK
46 GIT_LINK = objects.S_IFGITLINK
46
47
47 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
48
49
49
50
50 def reraise_safe_exceptions(func):
51 def reraise_safe_exceptions(func):
51 """Converts Dulwich exceptions to something neutral."""
52 """Converts Dulwich exceptions to something neutral."""
52 @wraps(func)
53 @wraps(func)
53 def wrapper(*args, **kwargs):
54 def wrapper(*args, **kwargs):
54 try:
55 try:
55 return func(*args, **kwargs)
56 return func(*args, **kwargs)
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 ObjectMissing) as e:
58 ObjectMissing) as e:
58 raise exceptions.LookupException(e.message)
59 raise exceptions.LookupException(e.message)
59 except (HangupException, UnexpectedCommandError) as e:
60 except (HangupException, UnexpectedCommandError) as e:
60 raise exceptions.VcsException(e.message)
61 raise exceptions.VcsException(e.message)
61 except Exception as e:
62 except Exception as e:
62 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # (KeyError on empty repos), we cannot track this and catch all
64 # (KeyError on empty repos), we cannot track this and catch all
64 # exceptions, it's an exceptions from other handlers
65 # exceptions, it's an exceptions from other handlers
65 #if not hasattr(e, '_vcs_kind'):
66 #if not hasattr(e, '_vcs_kind'):
66 #log.exception("Unhandled exception in git remote call")
67 #log.exception("Unhandled exception in git remote call")
67 #raise_from_original(exceptions.UnhandledException)
68 #raise_from_original(exceptions.UnhandledException)
68 raise
69 raise
69 return wrapper
70 return wrapper
70
71
71
72
72 class Repo(DulwichRepo):
73 class Repo(DulwichRepo):
73 """
74 """
74 A wrapper for dulwich Repo class.
75 A wrapper for dulwich Repo class.
75
76
76 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 "Too many open files" error. We need to close all opened file descriptors
78 "Too many open files" error. We need to close all opened file descriptors
78 once the repo object is destroyed.
79 once the repo object is destroyed.
79
80
80 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 to 0.12.0 +
82 to 0.12.0 +
82 """
83 """
83 def __del__(self):
84 def __del__(self):
84 if hasattr(self, 'object_store'):
85 if hasattr(self, 'object_store'):
85 self.close()
86 self.close()
86
87
87
88
88 class GitFactory(RepoFactory):
89 class GitFactory(RepoFactory):
89
90
90 def _create_repo(self, wire, create):
91 def _create_repo(self, wire, create):
91 repo_path = str_to_dulwich(wire['path'])
92 repo_path = str_to_dulwich(wire['path'])
92 return Repo(repo_path)
93 return Repo(repo_path)
93
94
94
95
95 class GitRemote(object):
96 class GitRemote(object):
96
97
97 def __init__(self, factory):
98 def __init__(self, factory):
98 self._factory = factory
99 self._factory = factory
99
100
100 self._bulk_methods = {
101 self._bulk_methods = {
101 "author": self.commit_attribute,
102 "author": self.commit_attribute,
102 "date": self.get_object_attrs,
103 "date": self.get_object_attrs,
103 "message": self.commit_attribute,
104 "message": self.commit_attribute,
104 "parents": self.commit_attribute,
105 "parents": self.commit_attribute,
105 "_commit": self.revision,
106 "_commit": self.revision,
106 }
107 }
107
108
108 def _wire_to_config(self, wire):
109 def _wire_to_config(self, wire):
109 if 'config' in wire:
110 if 'config' in wire:
110 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
111 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
111 return {}
112 return {}
112
113
113 def _assign_ref(self, wire, ref, commit_id):
114 def _assign_ref(self, wire, ref, commit_id):
114 repo = self._factory.repo(wire)
115 repo = self._factory.repo(wire)
115 repo[ref] = commit_id
116 repo[ref] = commit_id
116
117
117 @reraise_safe_exceptions
118 @reraise_safe_exceptions
118 def add_object(self, wire, content):
119 def add_object(self, wire, content):
119 repo = self._factory.repo(wire)
120 repo = self._factory.repo(wire)
120 blob = objects.Blob()
121 blob = objects.Blob()
121 blob.set_raw_string(content)
122 blob.set_raw_string(content)
122 repo.object_store.add_object(blob)
123 repo.object_store.add_object(blob)
123 return blob.id
124 return blob.id
124
125
125 @reraise_safe_exceptions
126 @reraise_safe_exceptions
126 def assert_correct_path(self, wire):
127 def assert_correct_path(self, wire):
128 path = wire.get('path')
127 try:
129 try:
128 self._factory.repo(wire)
130 self._factory.repo(wire)
129 except NotGitRepository as e:
131 except NotGitRepository as e:
130 # Exception can contain unicode which we convert
132 tb = traceback.format_exc()
131 raise exceptions.AbortException(repr(e))
133 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
134 return False
135
136 return True
132
137
133 @reraise_safe_exceptions
138 @reraise_safe_exceptions
134 def bare(self, wire):
139 def bare(self, wire):
135 repo = self._factory.repo(wire)
140 repo = self._factory.repo(wire)
136 return repo.bare
141 return repo.bare
137
142
138 @reraise_safe_exceptions
143 @reraise_safe_exceptions
139 def blob_as_pretty_string(self, wire, sha):
144 def blob_as_pretty_string(self, wire, sha):
140 repo = self._factory.repo(wire)
145 repo = self._factory.repo(wire)
141 return repo[sha].as_pretty_string()
146 return repo[sha].as_pretty_string()
142
147
143 @reraise_safe_exceptions
148 @reraise_safe_exceptions
144 def blob_raw_length(self, wire, sha):
149 def blob_raw_length(self, wire, sha):
145 repo = self._factory.repo(wire)
150 repo = self._factory.repo(wire)
146 blob = repo[sha]
151 blob = repo[sha]
147 return blob.raw_length()
152 return blob.raw_length()
148
153
149 def _parse_lfs_pointer(self, raw_content):
154 def _parse_lfs_pointer(self, raw_content):
150
155
151 spec_string = 'version https://git-lfs.github.com/spec'
156 spec_string = 'version https://git-lfs.github.com/spec'
152 if raw_content and raw_content.startswith(spec_string):
157 if raw_content and raw_content.startswith(spec_string):
153 pattern = re.compile(r"""
158 pattern = re.compile(r"""
154 (?:\n)?
159 (?:\n)?
155 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
160 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
156 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
161 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
157 ^size[ ](?P<oid_size>[0-9]+)\n
162 ^size[ ](?P<oid_size>[0-9]+)\n
158 (?:\n)?
163 (?:\n)?
159 """, re.VERBOSE | re.MULTILINE)
164 """, re.VERBOSE | re.MULTILINE)
160 match = pattern.match(raw_content)
165 match = pattern.match(raw_content)
161 if match:
166 if match:
162 return match.groupdict()
167 return match.groupdict()
163
168
164 return {}
169 return {}
165
170
166 @reraise_safe_exceptions
171 @reraise_safe_exceptions
167 def is_large_file(self, wire, sha):
172 def is_large_file(self, wire, sha):
168 repo = self._factory.repo(wire)
173 repo = self._factory.repo(wire)
169 blob = repo[sha]
174 blob = repo[sha]
170 return self._parse_lfs_pointer(blob.as_raw_string())
175 return self._parse_lfs_pointer(blob.as_raw_string())
171
176
172 @reraise_safe_exceptions
177 @reraise_safe_exceptions
173 def in_largefiles_store(self, wire, oid):
178 def in_largefiles_store(self, wire, oid):
174 repo = self._factory.repo(wire)
179 repo = self._factory.repo(wire)
175 conf = self._wire_to_config(wire)
180 conf = self._wire_to_config(wire)
176
181
177 store_location = conf.get('vcs_git_lfs_store_location')
182 store_location = conf.get('vcs_git_lfs_store_location')
178 if store_location:
183 if store_location:
179 repo_name = repo.path
184 repo_name = repo.path
180 store = LFSOidStore(
185 store = LFSOidStore(
181 oid=oid, repo=repo_name, store_location=store_location)
186 oid=oid, repo=repo_name, store_location=store_location)
182 return store.has_oid()
187 return store.has_oid()
183
188
184 return False
189 return False
185
190
186 @reraise_safe_exceptions
191 @reraise_safe_exceptions
187 def store_path(self, wire, oid):
192 def store_path(self, wire, oid):
188 repo = self._factory.repo(wire)
193 repo = self._factory.repo(wire)
189 conf = self._wire_to_config(wire)
194 conf = self._wire_to_config(wire)
190
195
191 store_location = conf.get('vcs_git_lfs_store_location')
196 store_location = conf.get('vcs_git_lfs_store_location')
192 if store_location:
197 if store_location:
193 repo_name = repo.path
198 repo_name = repo.path
194 store = LFSOidStore(
199 store = LFSOidStore(
195 oid=oid, repo=repo_name, store_location=store_location)
200 oid=oid, repo=repo_name, store_location=store_location)
196 return store.oid_path
201 return store.oid_path
197 raise ValueError('Unable to fetch oid with path {}'.format(oid))
202 raise ValueError('Unable to fetch oid with path {}'.format(oid))
198
203
199 @reraise_safe_exceptions
204 @reraise_safe_exceptions
200 def bulk_request(self, wire, rev, pre_load):
205 def bulk_request(self, wire, rev, pre_load):
201 result = {}
206 result = {}
202 for attr in pre_load:
207 for attr in pre_load:
203 try:
208 try:
204 method = self._bulk_methods[attr]
209 method = self._bulk_methods[attr]
205 args = [wire, rev]
210 args = [wire, rev]
206 if attr == "date":
211 if attr == "date":
207 args.extend(["commit_time", "commit_timezone"])
212 args.extend(["commit_time", "commit_timezone"])
208 elif attr in ["author", "message", "parents"]:
213 elif attr in ["author", "message", "parents"]:
209 args.append(attr)
214 args.append(attr)
210 result[attr] = method(*args)
215 result[attr] = method(*args)
211 except KeyError:
216 except KeyError:
212 raise exceptions.VcsException(
217 raise exceptions.VcsException(
213 "Unknown bulk attribute: %s" % attr)
218 "Unknown bulk attribute: %s" % attr)
214 return result
219 return result
215
220
216 def _build_opener(self, url):
221 def _build_opener(self, url):
217 handlers = []
222 handlers = []
218 url_obj = url_parser(url)
223 url_obj = url_parser(url)
219 _, authinfo = url_obj.authinfo()
224 _, authinfo = url_obj.authinfo()
220
225
221 if authinfo:
226 if authinfo:
222 # create a password manager
227 # create a password manager
223 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
228 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
224 passmgr.add_password(*authinfo)
229 passmgr.add_password(*authinfo)
225
230
226 handlers.extend((httpbasicauthhandler(passmgr),
231 handlers.extend((httpbasicauthhandler(passmgr),
227 httpdigestauthhandler(passmgr)))
232 httpdigestauthhandler(passmgr)))
228
233
229 return urllib2.build_opener(*handlers)
234 return urllib2.build_opener(*handlers)
230
235
231 @reraise_safe_exceptions
236 @reraise_safe_exceptions
232 def check_url(self, url, config):
237 def check_url(self, url, config):
233 url_obj = url_parser(url)
238 url_obj = url_parser(url)
234 test_uri, _ = url_obj.authinfo()
239 test_uri, _ = url_obj.authinfo()
235 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
240 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
236 url_obj.query = obfuscate_qs(url_obj.query)
241 url_obj.query = obfuscate_qs(url_obj.query)
237 cleaned_uri = str(url_obj)
242 cleaned_uri = str(url_obj)
238 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
243 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
239
244
240 if not test_uri.endswith('info/refs'):
245 if not test_uri.endswith('info/refs'):
241 test_uri = test_uri.rstrip('/') + '/info/refs'
246 test_uri = test_uri.rstrip('/') + '/info/refs'
242
247
243 o = self._build_opener(url)
248 o = self._build_opener(url)
244 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
249 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
245
250
246 q = {"service": 'git-upload-pack'}
251 q = {"service": 'git-upload-pack'}
247 qs = '?%s' % urllib.urlencode(q)
252 qs = '?%s' % urllib.urlencode(q)
248 cu = "%s%s" % (test_uri, qs)
253 cu = "%s%s" % (test_uri, qs)
249 req = urllib2.Request(cu, None, {})
254 req = urllib2.Request(cu, None, {})
250
255
251 try:
256 try:
252 log.debug("Trying to open URL %s", cleaned_uri)
257 log.debug("Trying to open URL %s", cleaned_uri)
253 resp = o.open(req)
258 resp = o.open(req)
254 if resp.code != 200:
259 if resp.code != 200:
255 raise exceptions.URLError('Return Code is not 200')
260 raise exceptions.URLError('Return Code is not 200')
256 except Exception as e:
261 except Exception as e:
257 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
262 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
258 # means it cannot be cloned
263 # means it cannot be cloned
259 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
264 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
260
265
261 # now detect if it's proper git repo
266 # now detect if it's proper git repo
262 gitdata = resp.read()
267 gitdata = resp.read()
263 if 'service=git-upload-pack' in gitdata:
268 if 'service=git-upload-pack' in gitdata:
264 pass
269 pass
265 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
270 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
266 # old style git can return some other format !
271 # old style git can return some other format !
267 pass
272 pass
268 else:
273 else:
269 raise exceptions.URLError(
274 raise exceptions.URLError(
270 "url [%s] does not look like an git" % (cleaned_uri,))
275 "url [%s] does not look like an git" % (cleaned_uri,))
271
276
272 return True
277 return True
273
278
274 @reraise_safe_exceptions
279 @reraise_safe_exceptions
275 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
280 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
276 remote_refs = self.fetch(wire, url, apply_refs=False)
281 remote_refs = self.fetch(wire, url, apply_refs=False)
277 repo = self._factory.repo(wire)
282 repo = self._factory.repo(wire)
278 if isinstance(valid_refs, list):
283 if isinstance(valid_refs, list):
279 valid_refs = tuple(valid_refs)
284 valid_refs = tuple(valid_refs)
280
285
281 for k in remote_refs:
286 for k in remote_refs:
282 # only parse heads/tags and skip so called deferred tags
287 # only parse heads/tags and skip so called deferred tags
283 if k.startswith(valid_refs) and not k.endswith(deferred):
288 if k.startswith(valid_refs) and not k.endswith(deferred):
284 repo[k] = remote_refs[k]
289 repo[k] = remote_refs[k]
285
290
286 if update_after_clone:
291 if update_after_clone:
287 # we want to checkout HEAD
292 # we want to checkout HEAD
288 repo["HEAD"] = remote_refs["HEAD"]
293 repo["HEAD"] = remote_refs["HEAD"]
289 index.build_index_from_tree(repo.path, repo.index_path(),
294 index.build_index_from_tree(repo.path, repo.index_path(),
290 repo.object_store, repo["HEAD"].tree)
295 repo.object_store, repo["HEAD"].tree)
291
296
292 # TODO: this is quite complex, check if that can be simplified
297 # TODO: this is quite complex, check if that can be simplified
293 @reraise_safe_exceptions
298 @reraise_safe_exceptions
294 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
299 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
295 repo = self._factory.repo(wire)
300 repo = self._factory.repo(wire)
296 object_store = repo.object_store
301 object_store = repo.object_store
297
302
298 # Create tree and populates it with blobs
303 # Create tree and populates it with blobs
299 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
304 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
300
305
301 for node in updated:
306 for node in updated:
302 # Compute subdirs if needed
307 # Compute subdirs if needed
303 dirpath, nodename = vcspath.split(node['path'])
308 dirpath, nodename = vcspath.split(node['path'])
304 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
309 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
305 parent = commit_tree
310 parent = commit_tree
306 ancestors = [('', parent)]
311 ancestors = [('', parent)]
307
312
308 # Tries to dig for the deepest existing tree
313 # Tries to dig for the deepest existing tree
309 while dirnames:
314 while dirnames:
310 curdir = dirnames.pop(0)
315 curdir = dirnames.pop(0)
311 try:
316 try:
312 dir_id = parent[curdir][1]
317 dir_id = parent[curdir][1]
313 except KeyError:
318 except KeyError:
314 # put curdir back into dirnames and stops
319 # put curdir back into dirnames and stops
315 dirnames.insert(0, curdir)
320 dirnames.insert(0, curdir)
316 break
321 break
317 else:
322 else:
318 # If found, updates parent
323 # If found, updates parent
319 parent = repo[dir_id]
324 parent = repo[dir_id]
320 ancestors.append((curdir, parent))
325 ancestors.append((curdir, parent))
321 # Now parent is deepest existing tree and we need to create
326 # Now parent is deepest existing tree and we need to create
322 # subtrees for dirnames (in reverse order)
327 # subtrees for dirnames (in reverse order)
323 # [this only applies for nodes from added]
328 # [this only applies for nodes from added]
324 new_trees = []
329 new_trees = []
325
330
326 blob = objects.Blob.from_string(node['content'])
331 blob = objects.Blob.from_string(node['content'])
327
332
328 if dirnames:
333 if dirnames:
329 # If there are trees which should be created we need to build
334 # If there are trees which should be created we need to build
330 # them now (in reverse order)
335 # them now (in reverse order)
331 reversed_dirnames = list(reversed(dirnames))
336 reversed_dirnames = list(reversed(dirnames))
332 curtree = objects.Tree()
337 curtree = objects.Tree()
333 curtree[node['node_path']] = node['mode'], blob.id
338 curtree[node['node_path']] = node['mode'], blob.id
334 new_trees.append(curtree)
339 new_trees.append(curtree)
335 for dirname in reversed_dirnames[:-1]:
340 for dirname in reversed_dirnames[:-1]:
336 newtree = objects.Tree()
341 newtree = objects.Tree()
337 newtree[dirname] = (DIR_STAT, curtree.id)
342 newtree[dirname] = (DIR_STAT, curtree.id)
338 new_trees.append(newtree)
343 new_trees.append(newtree)
339 curtree = newtree
344 curtree = newtree
340 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
345 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
341 else:
346 else:
342 parent.add(
347 parent.add(
343 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
348 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
344
349
345 new_trees.append(parent)
350 new_trees.append(parent)
346 # Update ancestors
351 # Update ancestors
347 reversed_ancestors = reversed(
352 reversed_ancestors = reversed(
348 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
353 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
349 for parent, tree, path in reversed_ancestors:
354 for parent, tree, path in reversed_ancestors:
350 parent[path] = (DIR_STAT, tree.id)
355 parent[path] = (DIR_STAT, tree.id)
351 object_store.add_object(tree)
356 object_store.add_object(tree)
352
357
353 object_store.add_object(blob)
358 object_store.add_object(blob)
354 for tree in new_trees:
359 for tree in new_trees:
355 object_store.add_object(tree)
360 object_store.add_object(tree)
356
361
357 for node_path in removed:
362 for node_path in removed:
358 paths = node_path.split('/')
363 paths = node_path.split('/')
359 tree = commit_tree
364 tree = commit_tree
360 trees = [tree]
365 trees = [tree]
361 # Traverse deep into the forest...
366 # Traverse deep into the forest...
362 for path in paths:
367 for path in paths:
363 try:
368 try:
364 obj = repo[tree[path][1]]
369 obj = repo[tree[path][1]]
365 if isinstance(obj, objects.Tree):
370 if isinstance(obj, objects.Tree):
366 trees.append(obj)
371 trees.append(obj)
367 tree = obj
372 tree = obj
368 except KeyError:
373 except KeyError:
369 break
374 break
370 # Cut down the blob and all rotten trees on the way back...
375 # Cut down the blob and all rotten trees on the way back...
371 for path, tree in reversed(zip(paths, trees)):
376 for path, tree in reversed(zip(paths, trees)):
372 del tree[path]
377 del tree[path]
373 if tree:
378 if tree:
374 # This tree still has elements - don't remove it or any
379 # This tree still has elements - don't remove it or any
375 # of it's parents
380 # of it's parents
376 break
381 break
377
382
378 object_store.add_object(commit_tree)
383 object_store.add_object(commit_tree)
379
384
380 # Create commit
385 # Create commit
381 commit = objects.Commit()
386 commit = objects.Commit()
382 commit.tree = commit_tree.id
387 commit.tree = commit_tree.id
383 for k, v in commit_data.iteritems():
388 for k, v in commit_data.iteritems():
384 setattr(commit, k, v)
389 setattr(commit, k, v)
385 object_store.add_object(commit)
390 object_store.add_object(commit)
386
391
387 ref = 'refs/heads/%s' % branch
392 ref = 'refs/heads/%s' % branch
388 repo.refs[ref] = commit.id
393 repo.refs[ref] = commit.id
389
394
390 return commit.id
395 return commit.id
391
396
392 @reraise_safe_exceptions
397 @reraise_safe_exceptions
393 def fetch(self, wire, url, apply_refs=True, refs=None):
398 def fetch(self, wire, url, apply_refs=True, refs=None):
394 if url != 'default' and '://' not in url:
399 if url != 'default' and '://' not in url:
395 client = LocalGitClient(url)
400 client = LocalGitClient(url)
396 else:
401 else:
397 url_obj = url_parser(url)
402 url_obj = url_parser(url)
398 o = self._build_opener(url)
403 o = self._build_opener(url)
399 url, _ = url_obj.authinfo()
404 url, _ = url_obj.authinfo()
400 client = HttpGitClient(base_url=url, opener=o)
405 client = HttpGitClient(base_url=url, opener=o)
401 repo = self._factory.repo(wire)
406 repo = self._factory.repo(wire)
402
407
403 determine_wants = repo.object_store.determine_wants_all
408 determine_wants = repo.object_store.determine_wants_all
404 if refs:
409 if refs:
405 def determine_wants_requested(references):
410 def determine_wants_requested(references):
406 return [references[r] for r in references if r in refs]
411 return [references[r] for r in references if r in refs]
407 determine_wants = determine_wants_requested
412 determine_wants = determine_wants_requested
408
413
409 try:
414 try:
410 remote_refs = client.fetch(
415 remote_refs = client.fetch(
411 path=url, target=repo, determine_wants=determine_wants)
416 path=url, target=repo, determine_wants=determine_wants)
412 except NotGitRepository as e:
417 except NotGitRepository as e:
413 log.warning(
418 log.warning(
414 'Trying to fetch from "%s" failed, not a Git repository.', url)
419 'Trying to fetch from "%s" failed, not a Git repository.', url)
415 # Exception can contain unicode which we convert
420 # Exception can contain unicode which we convert
416 raise exceptions.AbortException(repr(e))
421 raise exceptions.AbortException(repr(e))
417
422
418 # mikhail: client.fetch() returns all the remote refs, but fetches only
423 # mikhail: client.fetch() returns all the remote refs, but fetches only
419 # refs filtered by `determine_wants` function. We need to filter result
424 # refs filtered by `determine_wants` function. We need to filter result
420 # as well
425 # as well
421 if refs:
426 if refs:
422 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
427 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
423
428
424 if apply_refs:
429 if apply_refs:
425 # TODO: johbo: Needs proper test coverage with a git repository
430 # TODO: johbo: Needs proper test coverage with a git repository
426 # that contains a tag object, so that we would end up with
431 # that contains a tag object, so that we would end up with
427 # a peeled ref at this point.
432 # a peeled ref at this point.
428 PEELED_REF_MARKER = '^{}'
433 PEELED_REF_MARKER = '^{}'
429 for k in remote_refs:
434 for k in remote_refs:
430 if k.endswith(PEELED_REF_MARKER):
435 if k.endswith(PEELED_REF_MARKER):
431 log.info("Skipping peeled reference %s", k)
436 log.info("Skipping peeled reference %s", k)
432 continue
437 continue
433 repo[k] = remote_refs[k]
438 repo[k] = remote_refs[k]
434
439
435 if refs:
440 if refs:
436 # mikhail: explicitly set the head to the last ref.
441 # mikhail: explicitly set the head to the last ref.
437 repo['HEAD'] = remote_refs[refs[-1]]
442 repo['HEAD'] = remote_refs[refs[-1]]
438
443
439 # TODO: mikhail: should we return remote_refs here to be
444 # TODO: mikhail: should we return remote_refs here to be
440 # consistent?
445 # consistent?
441 else:
446 else:
442 return remote_refs
447 return remote_refs
443
448
444 @reraise_safe_exceptions
449 @reraise_safe_exceptions
450 def sync_push(self, wire, url, refs=None):
451 if self.check_url(url, wire):
452 repo = self._factory.repo(wire)
453 self.run_git_command(
454 wire, ['push', url, '--mirror'], fail_on_stderr=False)
455
456
457 @reraise_safe_exceptions
445 def get_remote_refs(self, wire, url):
458 def get_remote_refs(self, wire, url):
446 repo = Repo(url)
459 repo = Repo(url)
447 return repo.get_refs()
460 return repo.get_refs()
448
461
449 @reraise_safe_exceptions
462 @reraise_safe_exceptions
450 def get_description(self, wire):
463 def get_description(self, wire):
451 repo = self._factory.repo(wire)
464 repo = self._factory.repo(wire)
452 return repo.get_description()
465 return repo.get_description()
453
466
454 @reraise_safe_exceptions
467 @reraise_safe_exceptions
455 def get_file_history(self, wire, file_path, commit_id, limit):
468 def get_file_history(self, wire, file_path, commit_id, limit):
456 repo = self._factory.repo(wire)
469 repo = self._factory.repo(wire)
457 include = [commit_id]
470 include = [commit_id]
458 paths = [file_path]
471 paths = [file_path]
459
472
460 walker = repo.get_walker(include, paths=paths, max_entries=limit)
473 walker = repo.get_walker(include, paths=paths, max_entries=limit)
461 return [x.commit.id for x in walker]
474 return [x.commit.id for x in walker]
462
475
463 @reraise_safe_exceptions
476 @reraise_safe_exceptions
464 def get_missing_revs(self, wire, rev1, rev2, path2):
477 def get_missing_revs(self, wire, rev1, rev2, path2):
465 repo = self._factory.repo(wire)
478 repo = self._factory.repo(wire)
466 LocalGitClient(thin_packs=False).fetch(path2, repo)
479 LocalGitClient(thin_packs=False).fetch(path2, repo)
467
480
468 wire_remote = wire.copy()
481 wire_remote = wire.copy()
469 wire_remote['path'] = path2
482 wire_remote['path'] = path2
470 repo_remote = self._factory.repo(wire_remote)
483 repo_remote = self._factory.repo(wire_remote)
471 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
484 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
472
485
473 revs = [
486 revs = [
474 x.commit.id
487 x.commit.id
475 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
488 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
476 return revs
489 return revs
477
490
478 @reraise_safe_exceptions
491 @reraise_safe_exceptions
479 def get_object(self, wire, sha):
492 def get_object(self, wire, sha):
480 repo = self._factory.repo(wire)
493 repo = self._factory.repo(wire)
481 obj = repo.get_object(sha)
494 obj = repo.get_object(sha)
482 commit_id = obj.id
495 commit_id = obj.id
483
496
484 if isinstance(obj, Tag):
497 if isinstance(obj, Tag):
485 commit_id = obj.object[1]
498 commit_id = obj.object[1]
486
499
487 return {
500 return {
488 'id': obj.id,
501 'id': obj.id,
489 'type': obj.type_name,
502 'type': obj.type_name,
490 'commit_id': commit_id
503 'commit_id': commit_id
491 }
504 }
492
505
493 @reraise_safe_exceptions
506 @reraise_safe_exceptions
494 def get_object_attrs(self, wire, sha, *attrs):
507 def get_object_attrs(self, wire, sha, *attrs):
495 repo = self._factory.repo(wire)
508 repo = self._factory.repo(wire)
496 obj = repo.get_object(sha)
509 obj = repo.get_object(sha)
497 return list(getattr(obj, a) for a in attrs)
510 return list(getattr(obj, a) for a in attrs)
498
511
499 @reraise_safe_exceptions
512 @reraise_safe_exceptions
500 def get_refs(self, wire):
513 def get_refs(self, wire):
501 repo = self._factory.repo(wire)
514 repo = self._factory.repo(wire)
502 result = {}
515 result = {}
503 for ref, sha in repo.refs.as_dict().items():
516 for ref, sha in repo.refs.as_dict().items():
504 peeled_sha = repo.get_peeled(ref)
517 peeled_sha = repo.get_peeled(ref)
505 result[ref] = peeled_sha
518 result[ref] = peeled_sha
506 return result
519 return result
507
520
508 @reraise_safe_exceptions
521 @reraise_safe_exceptions
509 def get_refs_path(self, wire):
522 def get_refs_path(self, wire):
510 repo = self._factory.repo(wire)
523 repo = self._factory.repo(wire)
511 return repo.refs.path
524 return repo.refs.path
512
525
513 @reraise_safe_exceptions
526 @reraise_safe_exceptions
514 def head(self, wire):
527 def head(self, wire):
515 repo = self._factory.repo(wire)
528 repo = self._factory.repo(wire)
516 return repo.head()
529 return repo.head()
517
530
518 @reraise_safe_exceptions
531 @reraise_safe_exceptions
519 def init(self, wire):
532 def init(self, wire):
520 repo_path = str_to_dulwich(wire['path'])
533 repo_path = str_to_dulwich(wire['path'])
521 self.repo = Repo.init(repo_path)
534 self.repo = Repo.init(repo_path)
522
535
523 @reraise_safe_exceptions
536 @reraise_safe_exceptions
524 def init_bare(self, wire):
537 def init_bare(self, wire):
525 repo_path = str_to_dulwich(wire['path'])
538 repo_path = str_to_dulwich(wire['path'])
526 self.repo = Repo.init_bare(repo_path)
539 self.repo = Repo.init_bare(repo_path)
527
540
528 @reraise_safe_exceptions
541 @reraise_safe_exceptions
529 def revision(self, wire, rev):
542 def revision(self, wire, rev):
530 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
531 obj = repo[rev]
544 obj = repo[rev]
532 obj_data = {
545 obj_data = {
533 'id': obj.id,
546 'id': obj.id,
534 }
547 }
535 try:
548 try:
536 obj_data['tree'] = obj.tree
549 obj_data['tree'] = obj.tree
537 except AttributeError:
550 except AttributeError:
538 pass
551 pass
539 return obj_data
552 return obj_data
540
553
541 @reraise_safe_exceptions
554 @reraise_safe_exceptions
542 def commit_attribute(self, wire, rev, attr):
555 def commit_attribute(self, wire, rev, attr):
543 repo = self._factory.repo(wire)
556 repo = self._factory.repo(wire)
544 obj = repo[rev]
557 obj = repo[rev]
545 return getattr(obj, attr)
558 return getattr(obj, attr)
546
559
547 @reraise_safe_exceptions
560 @reraise_safe_exceptions
548 def set_refs(self, wire, key, value):
561 def set_refs(self, wire, key, value):
549 repo = self._factory.repo(wire)
562 repo = self._factory.repo(wire)
550 repo.refs[key] = value
563 repo.refs[key] = value
551
564
552 @reraise_safe_exceptions
565 @reraise_safe_exceptions
553 def remove_ref(self, wire, key):
566 def remove_ref(self, wire, key):
554 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
555 del repo.refs[key]
568 del repo.refs[key]
556
569
557 @reraise_safe_exceptions
570 @reraise_safe_exceptions
558 def tree_changes(self, wire, source_id, target_id):
571 def tree_changes(self, wire, source_id, target_id):
559 repo = self._factory.repo(wire)
572 repo = self._factory.repo(wire)
560 source = repo[source_id].tree if source_id else None
573 source = repo[source_id].tree if source_id else None
561 target = repo[target_id].tree
574 target = repo[target_id].tree
562 result = repo.object_store.tree_changes(source, target)
575 result = repo.object_store.tree_changes(source, target)
563 return list(result)
576 return list(result)
564
577
565 @reraise_safe_exceptions
578 @reraise_safe_exceptions
566 def tree_items(self, wire, tree_id):
579 def tree_items(self, wire, tree_id):
567 repo = self._factory.repo(wire)
580 repo = self._factory.repo(wire)
568 tree = repo[tree_id]
581 tree = repo[tree_id]
569
582
570 result = []
583 result = []
571 for item in tree.iteritems():
584 for item in tree.iteritems():
572 item_sha = item.sha
585 item_sha = item.sha
573 item_mode = item.mode
586 item_mode = item.mode
574
587
575 if FILE_MODE(item_mode) == GIT_LINK:
588 if FILE_MODE(item_mode) == GIT_LINK:
576 item_type = "link"
589 item_type = "link"
577 else:
590 else:
578 item_type = repo[item_sha].type_name
591 item_type = repo[item_sha].type_name
579
592
580 result.append((item.path, item_mode, item_sha, item_type))
593 result.append((item.path, item_mode, item_sha, item_type))
581 return result
594 return result
582
595
583 @reraise_safe_exceptions
596 @reraise_safe_exceptions
584 def update_server_info(self, wire):
597 def update_server_info(self, wire):
585 repo = self._factory.repo(wire)
598 repo = self._factory.repo(wire)
586 update_server_info(repo)
599 update_server_info(repo)
587
600
588 @reraise_safe_exceptions
601 @reraise_safe_exceptions
589 def discover_git_version(self):
602 def discover_git_version(self):
590 stdout, _ = self.run_git_command(
603 stdout, _ = self.run_git_command(
591 {}, ['--version'], _bare=True, _safe=True)
604 {}, ['--version'], _bare=True, _safe=True)
592 prefix = 'git version'
605 prefix = 'git version'
593 if stdout.startswith(prefix):
606 if stdout.startswith(prefix):
594 stdout = stdout[len(prefix):]
607 stdout = stdout[len(prefix):]
595 return stdout.strip()
608 return stdout.strip()
596
609
597 @reraise_safe_exceptions
610 @reraise_safe_exceptions
598 def run_git_command(self, wire, cmd, **opts):
611 def run_git_command(self, wire, cmd, **opts):
599 path = wire.get('path', None)
612 path = wire.get('path', None)
600
613
601 if path and os.path.isdir(path):
614 if path and os.path.isdir(path):
602 opts['cwd'] = path
615 opts['cwd'] = path
603
616
604 if '_bare' in opts:
617 if '_bare' in opts:
605 _copts = []
618 _copts = []
606 del opts['_bare']
619 del opts['_bare']
607 else:
620 else:
608 _copts = ['-c', 'core.quotepath=false', ]
621 _copts = ['-c', 'core.quotepath=false', ]
609 safe_call = False
622 safe_call = False
610 if '_safe' in opts:
623 if '_safe' in opts:
611 # no exc on failure
624 # no exc on failure
612 del opts['_safe']
625 del opts['_safe']
613 safe_call = True
626 safe_call = True
614
627
615 gitenv = os.environ.copy()
628 gitenv = os.environ.copy()
616 gitenv.update(opts.pop('extra_env', {}))
629 gitenv.update(opts.pop('extra_env', {}))
617 # need to clean fix GIT_DIR !
630 # need to clean fix GIT_DIR !
618 if 'GIT_DIR' in gitenv:
631 if 'GIT_DIR' in gitenv:
619 del gitenv['GIT_DIR']
632 del gitenv['GIT_DIR']
620 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
633 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
621
634
622 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
635 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
623
636
624 try:
637 try:
625 _opts = {'env': gitenv, 'shell': False}
638 _opts = {'env': gitenv, 'shell': False}
626 _opts.update(opts)
639 _opts.update(opts)
627 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
640 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
628
641
629 return ''.join(p), ''.join(p.error)
642 return ''.join(p), ''.join(p.error)
630 except (EnvironmentError, OSError) as err:
643 except (EnvironmentError, OSError) as err:
631 cmd = ' '.join(cmd) # human friendly CMD
644 cmd = ' '.join(cmd) # human friendly CMD
632 tb_err = ("Couldn't run git command (%s).\n"
645 tb_err = ("Couldn't run git command (%s).\n"
633 "Original error was:%s\n" % (cmd, err))
646 "Original error was:%s\n" % (cmd, err))
634 log.exception(tb_err)
647 log.exception(tb_err)
635 if safe_call:
648 if safe_call:
636 return '', err
649 return '', err
637 else:
650 else:
638 raise exceptions.VcsException(tb_err)
651 raise exceptions.VcsException(tb_err)
639
652
640
653
641 def str_to_dulwich(value):
654 def str_to_dulwich(value):
642 """
655 """
643 Dulwich 0.10.1a requires `unicode` objects to be passed in.
656 Dulwich 0.10.1a requires `unicode` objects to be passed in.
644 """
657 """
645 return value.decode(settings.WIRE_ENCODING)
658 return value.decode(settings.WIRE_ENCODING)
@@ -1,19 +1,19 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 from app import create_app
19 from app import create_app
@@ -1,287 +1,287 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import re
18 import re
19 import logging
19 import logging
20 from wsgiref.util import FileWrapper
20 from wsgiref.util import FileWrapper
21
21
22 import simplejson as json
22 import simplejson as json
23 from pyramid.config import Configurator
23 from pyramid.config import Configurator
24 from pyramid.response import Response, FileIter
24 from pyramid.response import Response, FileIter
25 from pyramid.httpexceptions import (
25 from pyramid.httpexceptions import (
26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
26 HTTPBadRequest, HTTPNotImplemented, HTTPNotFound, HTTPForbidden,
27 HTTPUnprocessableEntity)
27 HTTPUnprocessableEntity)
28
28
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
29 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
30 from vcsserver.git_lfs.utils import safe_result, get_cython_compat_decorator
31 from vcsserver.utils import safe_int
31 from vcsserver.utils import safe_int
32
32
33 log = logging.getLogger(__name__)
33 log = logging.getLogger(__name__)
34
34
35
35
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
36 GIT_LFS_CONTENT_TYPE = 'application/vnd.git-lfs' #+json ?
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
37 GIT_LFS_PROTO_PAT = re.compile(r'^/(.+)/(info/lfs/(.+))')
38
38
39
39
40 def write_response_error(http_exception, text=None):
40 def write_response_error(http_exception, text=None):
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
41 content_type = GIT_LFS_CONTENT_TYPE + '+json'
42 _exception = http_exception(content_type=content_type)
42 _exception = http_exception(content_type=content_type)
43 _exception.content_type = content_type
43 _exception.content_type = content_type
44 if text:
44 if text:
45 _exception.body = json.dumps({'message': text})
45 _exception.body = json.dumps({'message': text})
46 log.debug('LFS: writing response of type %s to client with text:%s',
46 log.debug('LFS: writing response of type %s to client with text:%s',
47 http_exception, text)
47 http_exception, text)
48 return _exception
48 return _exception
49
49
50
50
51 class AuthHeaderRequired(object):
51 class AuthHeaderRequired(object):
52 """
52 """
53 Decorator to check if request has proper auth-header
53 Decorator to check if request has proper auth-header
54 """
54 """
55
55
56 def __call__(self, func):
56 def __call__(self, func):
57 return get_cython_compat_decorator(self.__wrapper, func)
57 return get_cython_compat_decorator(self.__wrapper, func)
58
58
59 def __wrapper(self, func, *fargs, **fkwargs):
59 def __wrapper(self, func, *fargs, **fkwargs):
60 request = fargs[1]
60 request = fargs[1]
61 auth = request.authorization
61 auth = request.authorization
62 if not auth:
62 if not auth:
63 return write_response_error(HTTPForbidden)
63 return write_response_error(HTTPForbidden)
64 return func(*fargs[1:], **fkwargs)
64 return func(*fargs[1:], **fkwargs)
65
65
66
66
67 # views
67 # views
68
68
69 def lfs_objects(request):
69 def lfs_objects(request):
70 # indicate not supported, V1 API
70 # indicate not supported, V1 API
71 log.warning('LFS: v1 api not supported, reporting it back to client')
71 log.warning('LFS: v1 api not supported, reporting it back to client')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
72 return write_response_error(HTTPNotImplemented, 'LFS: v1 api not supported')
73
73
74
74
75 @AuthHeaderRequired()
75 @AuthHeaderRequired()
76 def lfs_objects_batch(request):
76 def lfs_objects_batch(request):
77 """
77 """
78 The client sends the following information to the Batch endpoint to transfer some objects:
78 The client sends the following information to the Batch endpoint to transfer some objects:
79
79
80 operation - Should be download or upload.
80 operation - Should be download or upload.
81 transfers - An optional Array of String identifiers for transfer
81 transfers - An optional Array of String identifiers for transfer
82 adapters that the client has configured. If omitted, the basic
82 adapters that the client has configured. If omitted, the basic
83 transfer adapter MUST be assumed by the server.
83 transfer adapter MUST be assumed by the server.
84 objects - An Array of objects to download.
84 objects - An Array of objects to download.
85 oid - String OID of the LFS object.
85 oid - String OID of the LFS object.
86 size - Integer byte size of the LFS object. Must be at least zero.
86 size - Integer byte size of the LFS object. Must be at least zero.
87 """
87 """
88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
88 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
89 auth = request.authorization
89 auth = request.authorization
90 repo = request.matchdict.get('repo')
90 repo = request.matchdict.get('repo')
91 data = request.json
91 data = request.json
92 operation = data.get('operation')
92 operation = data.get('operation')
93 if operation not in ('download', 'upload'):
93 if operation not in ('download', 'upload'):
94 log.debug('LFS: unsupported operation:%s', operation)
94 log.debug('LFS: unsupported operation:%s', operation)
95 return write_response_error(
95 return write_response_error(
96 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
96 HTTPBadRequest, 'unsupported operation mode: `%s`' % operation)
97
97
98 if 'objects' not in data:
98 if 'objects' not in data:
99 log.debug('LFS: missing objects data')
99 log.debug('LFS: missing objects data')
100 return write_response_error(
100 return write_response_error(
101 HTTPBadRequest, 'missing objects data')
101 HTTPBadRequest, 'missing objects data')
102
102
103 log.debug('LFS: handling operation of type: %s', operation)
103 log.debug('LFS: handling operation of type: %s', operation)
104
104
105 objects = []
105 objects = []
106 for o in data['objects']:
106 for o in data['objects']:
107 try:
107 try:
108 oid = o['oid']
108 oid = o['oid']
109 obj_size = o['size']
109 obj_size = o['size']
110 except KeyError:
110 except KeyError:
111 log.exception('LFS, failed to extract data')
111 log.exception('LFS, failed to extract data')
112 return write_response_error(
112 return write_response_error(
113 HTTPBadRequest, 'unsupported data in objects')
113 HTTPBadRequest, 'unsupported data in objects')
114
114
115 obj_data = {'oid': oid}
115 obj_data = {'oid': oid}
116
116
117 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid)
117 obj_href = request.route_url('lfs_objects_oid', repo=repo, oid=oid)
118 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo)
118 obj_verify_href = request.route_url('lfs_objects_verify', repo=repo)
119 store = LFSOidStore(
119 store = LFSOidStore(
120 oid, repo, store_location=request.registry.git_lfs_store_path)
120 oid, repo, store_location=request.registry.git_lfs_store_path)
121 handler = OidHandler(
121 handler = OidHandler(
122 store, repo, auth, oid, obj_size, obj_data,
122 store, repo, auth, oid, obj_size, obj_data,
123 obj_href, obj_verify_href)
123 obj_href, obj_verify_href)
124
124
125 # this verifies also OIDs
125 # this verifies also OIDs
126 actions, errors = handler.exec_operation(operation)
126 actions, errors = handler.exec_operation(operation)
127 if errors:
127 if errors:
128 log.warning('LFS: got following errors: %s', errors)
128 log.warning('LFS: got following errors: %s', errors)
129 obj_data['errors'] = errors
129 obj_data['errors'] = errors
130
130
131 if actions:
131 if actions:
132 obj_data['actions'] = actions
132 obj_data['actions'] = actions
133
133
134 obj_data['size'] = obj_size
134 obj_data['size'] = obj_size
135 obj_data['authenticated'] = True
135 obj_data['authenticated'] = True
136 objects.append(obj_data)
136 objects.append(obj_data)
137
137
138 result = {'objects': objects, 'transfer': 'basic'}
138 result = {'objects': objects, 'transfer': 'basic'}
139 log.debug('LFS Response %s', safe_result(result))
139 log.debug('LFS Response %s', safe_result(result))
140
140
141 return result
141 return result
142
142
143
143
144 def lfs_objects_oid_upload(request):
144 def lfs_objects_oid_upload(request):
145 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
145 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
146 repo = request.matchdict.get('repo')
146 repo = request.matchdict.get('repo')
147 oid = request.matchdict.get('oid')
147 oid = request.matchdict.get('oid')
148 store = LFSOidStore(
148 store = LFSOidStore(
149 oid, repo, store_location=request.registry.git_lfs_store_path)
149 oid, repo, store_location=request.registry.git_lfs_store_path)
150 engine = store.get_engine(mode='wb')
150 engine = store.get_engine(mode='wb')
151 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
151 log.debug('LFS: starting chunked write of LFS oid: %s to storage', oid)
152
152
153 body = request.environ['wsgi.input']
153 body = request.environ['wsgi.input']
154
154
155 with engine as f:
155 with engine as f:
156 blksize = 64 * 1024 # 64kb
156 blksize = 64 * 1024 # 64kb
157 while True:
157 while True:
158 # read in chunks as stream comes in from Gunicorn
158 # read in chunks as stream comes in from Gunicorn
159 # this is a specific Gunicorn support function.
159 # this is a specific Gunicorn support function.
160 # might work differently on waitress
160 # might work differently on waitress
161 chunk = body.read(blksize)
161 chunk = body.read(blksize)
162 if not chunk:
162 if not chunk:
163 break
163 break
164 f.write(chunk)
164 f.write(chunk)
165
165
166 return {'upload': 'ok'}
166 return {'upload': 'ok'}
167
167
168
168
169 def lfs_objects_oid_download(request):
169 def lfs_objects_oid_download(request):
170 repo = request.matchdict.get('repo')
170 repo = request.matchdict.get('repo')
171 oid = request.matchdict.get('oid')
171 oid = request.matchdict.get('oid')
172
172
173 store = LFSOidStore(
173 store = LFSOidStore(
174 oid, repo, store_location=request.registry.git_lfs_store_path)
174 oid, repo, store_location=request.registry.git_lfs_store_path)
175 if not store.has_oid():
175 if not store.has_oid():
176 log.debug('LFS: oid %s does not exists in store', oid)
176 log.debug('LFS: oid %s does not exists in store', oid)
177 return write_response_error(
177 return write_response_error(
178 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
178 HTTPNotFound, 'requested file with oid `%s` not found in store' % oid)
179
179
180 # TODO(marcink): support range header ?
180 # TODO(marcink): support range header ?
181 # Range: bytes=0-, `bytes=(\d+)\-.*`
181 # Range: bytes=0-, `bytes=(\d+)\-.*`
182
182
183 f = open(store.oid_path, 'rb')
183 f = open(store.oid_path, 'rb')
184 response = Response(
184 response = Response(
185 content_type='application/octet-stream', app_iter=FileIter(f))
185 content_type='application/octet-stream', app_iter=FileIter(f))
186 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
186 response.headers.add('X-RC-LFS-Response-Oid', str(oid))
187 return response
187 return response
188
188
189
189
190 def lfs_objects_verify(request):
190 def lfs_objects_verify(request):
191 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
191 request.response.content_type = GIT_LFS_CONTENT_TYPE + '+json'
192 repo = request.matchdict.get('repo')
192 repo = request.matchdict.get('repo')
193
193
194 data = request.json
194 data = request.json
195 oid = data.get('oid')
195 oid = data.get('oid')
196 size = safe_int(data.get('size'))
196 size = safe_int(data.get('size'))
197
197
198 if not (oid and size):
198 if not (oid and size):
199 return write_response_error(
199 return write_response_error(
200 HTTPBadRequest, 'missing oid and size in request data')
200 HTTPBadRequest, 'missing oid and size in request data')
201
201
202 store = LFSOidStore(
202 store = LFSOidStore(
203 oid, repo, store_location=request.registry.git_lfs_store_path)
203 oid, repo, store_location=request.registry.git_lfs_store_path)
204 if not store.has_oid():
204 if not store.has_oid():
205 log.debug('LFS: oid %s does not exists in store', oid)
205 log.debug('LFS: oid %s does not exists in store', oid)
206 return write_response_error(
206 return write_response_error(
207 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
207 HTTPNotFound, 'oid `%s` does not exists in store' % oid)
208
208
209 store_size = store.size_oid()
209 store_size = store.size_oid()
210 if store_size != size:
210 if store_size != size:
211 msg = 'requested file size mismatch store size:%s requested:%s' % (
211 msg = 'requested file size mismatch store size:%s requested:%s' % (
212 store_size, size)
212 store_size, size)
213 return write_response_error(
213 return write_response_error(
214 HTTPUnprocessableEntity, msg)
214 HTTPUnprocessableEntity, msg)
215
215
216 return {'message': {'size': 'ok', 'in_store': 'ok'}}
216 return {'message': {'size': 'ok', 'in_store': 'ok'}}
217
217
218
218
219 def lfs_objects_lock(request):
219 def lfs_objects_lock(request):
220 return write_response_error(
220 return write_response_error(
221 HTTPNotImplemented, 'GIT LFS locking api not supported')
221 HTTPNotImplemented, 'GIT LFS locking api not supported')
222
222
223
223
224 def not_found(request):
224 def not_found(request):
225 return write_response_error(
225 return write_response_error(
226 HTTPNotFound, 'request path not found')
226 HTTPNotFound, 'request path not found')
227
227
228
228
229 def lfs_disabled(request):
229 def lfs_disabled(request):
230 return write_response_error(
230 return write_response_error(
231 HTTPNotImplemented, 'GIT LFS disabled for this repo')
231 HTTPNotImplemented, 'GIT LFS disabled for this repo')
232
232
233
233
234 def git_lfs_app(config):
234 def git_lfs_app(config):
235
235
236 # v1 API deprecation endpoint
236 # v1 API deprecation endpoint
237 config.add_route('lfs_objects',
237 config.add_route('lfs_objects',
238 '/{repo:.*?[^/]}/info/lfs/objects')
238 '/{repo:.*?[^/]}/info/lfs/objects')
239 config.add_view(lfs_objects, route_name='lfs_objects',
239 config.add_view(lfs_objects, route_name='lfs_objects',
240 request_method='POST', renderer='json')
240 request_method='POST', renderer='json')
241
241
242 # locking API
242 # locking API
243 config.add_route('lfs_objects_lock',
243 config.add_route('lfs_objects_lock',
244 '/{repo:.*?[^/]}/info/lfs/locks')
244 '/{repo:.*?[^/]}/info/lfs/locks')
245 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
245 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock',
246 request_method=('POST', 'GET'), renderer='json')
246 request_method=('POST', 'GET'), renderer='json')
247
247
248 config.add_route('lfs_objects_lock_verify',
248 config.add_route('lfs_objects_lock_verify',
249 '/{repo:.*?[^/]}/info/lfs/locks/verify')
249 '/{repo:.*?[^/]}/info/lfs/locks/verify')
250 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
250 config.add_view(lfs_objects_lock, route_name='lfs_objects_lock_verify',
251 request_method=('POST', 'GET'), renderer='json')
251 request_method=('POST', 'GET'), renderer='json')
252
252
253 # batch API
253 # batch API
254 config.add_route('lfs_objects_batch',
254 config.add_route('lfs_objects_batch',
255 '/{repo:.*?[^/]}/info/lfs/objects/batch')
255 '/{repo:.*?[^/]}/info/lfs/objects/batch')
256 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
256 config.add_view(lfs_objects_batch, route_name='lfs_objects_batch',
257 request_method='POST', renderer='json')
257 request_method='POST', renderer='json')
258
258
259 # oid upload/download API
259 # oid upload/download API
260 config.add_route('lfs_objects_oid',
260 config.add_route('lfs_objects_oid',
261 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
261 '/{repo:.*?[^/]}/info/lfs/objects/{oid}')
262 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
262 config.add_view(lfs_objects_oid_upload, route_name='lfs_objects_oid',
263 request_method='PUT', renderer='json')
263 request_method='PUT', renderer='json')
264 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
264 config.add_view(lfs_objects_oid_download, route_name='lfs_objects_oid',
265 request_method='GET', renderer='json')
265 request_method='GET', renderer='json')
266
266
267 # verification API
267 # verification API
268 config.add_route('lfs_objects_verify',
268 config.add_route('lfs_objects_verify',
269 '/{repo:.*?[^/]}/info/lfs/verify')
269 '/{repo:.*?[^/]}/info/lfs/verify')
270 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
270 config.add_view(lfs_objects_verify, route_name='lfs_objects_verify',
271 request_method='POST', renderer='json')
271 request_method='POST', renderer='json')
272
272
273 # not found handler for API
273 # not found handler for API
274 config.add_notfound_view(not_found, renderer='json')
274 config.add_notfound_view(not_found, renderer='json')
275
275
276
276
277 def create_app(git_lfs_enabled, git_lfs_store_path):
277 def create_app(git_lfs_enabled, git_lfs_store_path):
278 config = Configurator()
278 config = Configurator()
279 if git_lfs_enabled:
279 if git_lfs_enabled:
280 config.include(git_lfs_app)
280 config.include(git_lfs_app)
281 config.registry.git_lfs_store_path = git_lfs_store_path
281 config.registry.git_lfs_store_path = git_lfs_store_path
282 else:
282 else:
283 # not found handler for API, reporting disabled LFS support
283 # not found handler for API, reporting disabled LFS support
284 config.add_notfound_view(lfs_disabled, renderer='json')
284 config.add_notfound_view(lfs_disabled, renderer='json')
285
285
286 app = config.make_wsgi_app()
286 app = config.make_wsgi_app()
287 return app
287 return app
@@ -1,175 +1,175 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import logging
20 import logging
21 from collections import OrderedDict
21 from collections import OrderedDict
22
22
23 log = logging.getLogger(__name__)
23 log = logging.getLogger(__name__)
24
24
25
25
26 class OidHandler(object):
26 class OidHandler(object):
27
27
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
28 def __init__(self, store, repo_name, auth, oid, obj_size, obj_data, obj_href,
29 obj_verify_href=None):
29 obj_verify_href=None):
30 self.current_store = store
30 self.current_store = store
31 self.repo_name = repo_name
31 self.repo_name = repo_name
32 self.auth = auth
32 self.auth = auth
33 self.oid = oid
33 self.oid = oid
34 self.obj_size = obj_size
34 self.obj_size = obj_size
35 self.obj_data = obj_data
35 self.obj_data = obj_data
36 self.obj_href = obj_href
36 self.obj_href = obj_href
37 self.obj_verify_href = obj_verify_href
37 self.obj_verify_href = obj_verify_href
38
38
39 def get_store(self, mode=None):
39 def get_store(self, mode=None):
40 return self.current_store
40 return self.current_store
41
41
42 def get_auth(self):
42 def get_auth(self):
43 """returns auth header for re-use in upload/download"""
43 """returns auth header for re-use in upload/download"""
44 return " ".join(self.auth)
44 return " ".join(self.auth)
45
45
46 def download(self):
46 def download(self):
47
47
48 store = self.get_store()
48 store = self.get_store()
49 response = None
49 response = None
50 has_errors = None
50 has_errors = None
51
51
52 if not store.has_oid():
52 if not store.has_oid():
53 # error reply back to client that something is wrong with dl
53 # error reply back to client that something is wrong with dl
54 err_msg = 'object: {} does not exist in store'.format(store.oid)
54 err_msg = 'object: {} does not exist in store'.format(store.oid)
55 has_errors = OrderedDict(
55 has_errors = OrderedDict(
56 error=OrderedDict(
56 error=OrderedDict(
57 code=404,
57 code=404,
58 message=err_msg
58 message=err_msg
59 )
59 )
60 )
60 )
61
61
62 download_action = OrderedDict(
62 download_action = OrderedDict(
63 href=self.obj_href,
63 href=self.obj_href,
64 header=OrderedDict([("Authorization", self.get_auth())])
64 header=OrderedDict([("Authorization", self.get_auth())])
65 )
65 )
66 if not has_errors:
66 if not has_errors:
67 response = OrderedDict(download=download_action)
67 response = OrderedDict(download=download_action)
68 return response, has_errors
68 return response, has_errors
69
69
70 def upload(self, skip_existing=True):
70 def upload(self, skip_existing=True):
71 """
71 """
72 Write upload action for git-lfs server
72 Write upload action for git-lfs server
73 """
73 """
74
74
75 store = self.get_store()
75 store = self.get_store()
76 response = None
76 response = None
77 has_errors = None
77 has_errors = None
78
78
79 # verify if we have the OID before, if we do, reply with empty
79 # verify if we have the OID before, if we do, reply with empty
80 if store.has_oid():
80 if store.has_oid():
81 log.debug('LFS: store already has oid %s', store.oid)
81 log.debug('LFS: store already has oid %s', store.oid)
82
82
83 # validate size
83 # validate size
84 store_size = store.size_oid()
84 store_size = store.size_oid()
85 size_match = store_size == self.obj_size
85 size_match = store_size == self.obj_size
86 if not size_match:
86 if not size_match:
87 log.warning(
87 log.warning(
88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
88 'LFS: size mismatch for oid:%s, in store:%s expected: %s',
89 self.oid, store_size, self.obj_size)
89 self.oid, store_size, self.obj_size)
90 elif skip_existing:
90 elif skip_existing:
91 log.debug('LFS: skipping further action as oid is existing')
91 log.debug('LFS: skipping further action as oid is existing')
92 return response, has_errors
92 return response, has_errors
93
93
94 chunked = ("Transfer-Encoding", "chunked")
94 chunked = ("Transfer-Encoding", "chunked")
95 upload_action = OrderedDict(
95 upload_action = OrderedDict(
96 href=self.obj_href,
96 href=self.obj_href,
97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
97 header=OrderedDict([("Authorization", self.get_auth()), chunked])
98 )
98 )
99 if not has_errors:
99 if not has_errors:
100 response = OrderedDict(upload=upload_action)
100 response = OrderedDict(upload=upload_action)
101 # if specified in handler, return the verification endpoint
101 # if specified in handler, return the verification endpoint
102 if self.obj_verify_href:
102 if self.obj_verify_href:
103 verify_action = OrderedDict(
103 verify_action = OrderedDict(
104 href=self.obj_verify_href,
104 href=self.obj_verify_href,
105 header=OrderedDict([("Authorization", self.get_auth())])
105 header=OrderedDict([("Authorization", self.get_auth())])
106 )
106 )
107 response['verify'] = verify_action
107 response['verify'] = verify_action
108 return response, has_errors
108 return response, has_errors
109
109
110 def exec_operation(self, operation, *args, **kwargs):
110 def exec_operation(self, operation, *args, **kwargs):
111 handler = getattr(self, operation)
111 handler = getattr(self, operation)
112 log.debug('LFS: handling request using %s handler', handler)
112 log.debug('LFS: handling request using %s handler', handler)
113 return handler(*args, **kwargs)
113 return handler(*args, **kwargs)
114
114
115
115
116 class LFSOidStore(object):
116 class LFSOidStore(object):
117
117
118 def __init__(self, oid, repo, store_location=None):
118 def __init__(self, oid, repo, store_location=None):
119 self.oid = oid
119 self.oid = oid
120 self.repo = repo
120 self.repo = repo
121 self.store_path = store_location or self.get_default_store()
121 self.store_path = store_location or self.get_default_store()
122 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
122 self.tmp_oid_path = os.path.join(self.store_path, oid + '.tmp')
123 self.oid_path = os.path.join(self.store_path, oid)
123 self.oid_path = os.path.join(self.store_path, oid)
124 self.fd = None
124 self.fd = None
125
125
126 def get_engine(self, mode):
126 def get_engine(self, mode):
127 """
127 """
128 engine = .get_engine(mode='wb')
128 engine = .get_engine(mode='wb')
129 with engine as f:
129 with engine as f:
130 f.write('...')
130 f.write('...')
131 """
131 """
132
132
133 class StoreEngine(object):
133 class StoreEngine(object):
134 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
134 def __init__(self, mode, store_path, oid_path, tmp_oid_path):
135 self.mode = mode
135 self.mode = mode
136 self.store_path = store_path
136 self.store_path = store_path
137 self.oid_path = oid_path
137 self.oid_path = oid_path
138 self.tmp_oid_path = tmp_oid_path
138 self.tmp_oid_path = tmp_oid_path
139
139
140 def __enter__(self):
140 def __enter__(self):
141 if not os.path.isdir(self.store_path):
141 if not os.path.isdir(self.store_path):
142 os.makedirs(self.store_path)
142 os.makedirs(self.store_path)
143
143
144 # TODO(marcink): maybe write metadata here with size/oid ?
144 # TODO(marcink): maybe write metadata here with size/oid ?
145 fd = open(self.tmp_oid_path, self.mode)
145 fd = open(self.tmp_oid_path, self.mode)
146 self.fd = fd
146 self.fd = fd
147 return fd
147 return fd
148
148
149 def __exit__(self, exc_type, exc_value, traceback):
149 def __exit__(self, exc_type, exc_value, traceback):
150 # close tmp file, and rename to final destination
150 # close tmp file, and rename to final destination
151 self.fd.close()
151 self.fd.close()
152 shutil.move(self.tmp_oid_path, self.oid_path)
152 shutil.move(self.tmp_oid_path, self.oid_path)
153
153
154 return StoreEngine(
154 return StoreEngine(
155 mode, self.store_path, self.oid_path, self.tmp_oid_path)
155 mode, self.store_path, self.oid_path, self.tmp_oid_path)
156
156
157 def get_default_store(self):
157 def get_default_store(self):
158 """
158 """
159 Default store, consistent with defaults of Mercurial large files store
159 Default store, consistent with defaults of Mercurial large files store
160 which is /home/username/.cache/largefiles
160 which is /home/username/.cache/largefiles
161 """
161 """
162 user_home = os.path.expanduser("~")
162 user_home = os.path.expanduser("~")
163 return os.path.join(user_home, '.cache', 'lfs-store')
163 return os.path.join(user_home, '.cache', 'lfs-store')
164
164
165 def has_oid(self):
165 def has_oid(self):
166 return os.path.exists(os.path.join(self.store_path, self.oid))
166 return os.path.exists(os.path.join(self.store_path, self.oid))
167
167
168 def size_oid(self):
168 def size_oid(self):
169 size = -1
169 size = -1
170
170
171 if self.has_oid():
171 if self.has_oid():
172 oid = os.path.join(self.store_path, self.oid)
172 oid = os.path.join(self.store_path, self.oid)
173 size = os.stat(oid).st_size
173 size = os.stat(oid).st_size
174
174
175 return size
175 return size
@@ -1,16 +1,16 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
@@ -1,239 +1,239 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from webtest.app import TestApp as WebObTestApp
20 from webtest.app import TestApp as WebObTestApp
21 import simplejson as json
21 import simplejson as json
22
22
23 from vcsserver.git_lfs.app import create_app
23 from vcsserver.git_lfs.app import create_app
24
24
25
25
26 @pytest.fixture(scope='function')
26 @pytest.fixture(scope='function')
27 def git_lfs_app(tmpdir):
27 def git_lfs_app(tmpdir):
28 custom_app = WebObTestApp(create_app(
28 custom_app = WebObTestApp(create_app(
29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir)))
29 git_lfs_enabled=True, git_lfs_store_path=str(tmpdir)))
30 custom_app._store = str(tmpdir)
30 custom_app._store = str(tmpdir)
31 return custom_app
31 return custom_app
32
32
33
33
34 @pytest.fixture()
34 @pytest.fixture()
35 def http_auth():
35 def http_auth():
36 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
36 return {'HTTP_AUTHORIZATION': "Basic XXXXX"}
37
37
38
38
39 class TestLFSApplication(object):
39 class TestLFSApplication(object):
40
40
41 def test_app_wrong_path(self, git_lfs_app):
41 def test_app_wrong_path(self, git_lfs_app):
42 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
42 git_lfs_app.get('/repo/info/lfs/xxx', status=404)
43
43
44 def test_app_deprecated_endpoint(self, git_lfs_app):
44 def test_app_deprecated_endpoint(self, git_lfs_app):
45 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
45 response = git_lfs_app.post('/repo/info/lfs/objects', status=501)
46 assert response.status_code == 501
46 assert response.status_code == 501
47 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
47 assert json.loads(response.text) == {u'message': u'LFS: v1 api not supported'}
48
48
49 def test_app_lock_verify_api_not_available(self, git_lfs_app):
49 def test_app_lock_verify_api_not_available(self, git_lfs_app):
50 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
50 response = git_lfs_app.post('/repo/info/lfs/locks/verify', status=501)
51 assert response.status_code == 501
51 assert response.status_code == 501
52 assert json.loads(response.text) == {
52 assert json.loads(response.text) == {
53 u'message': u'GIT LFS locking api not supported'}
53 u'message': u'GIT LFS locking api not supported'}
54
54
55 def test_app_lock_api_not_available(self, git_lfs_app):
55 def test_app_lock_api_not_available(self, git_lfs_app):
56 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
56 response = git_lfs_app.post('/repo/info/lfs/locks', status=501)
57 assert response.status_code == 501
57 assert response.status_code == 501
58 assert json.loads(response.text) == {
58 assert json.loads(response.text) == {
59 u'message': u'GIT LFS locking api not supported'}
59 u'message': u'GIT LFS locking api not supported'}
60
60
61 def test_app_batch_api_missing_auth(self, git_lfs_app,):
61 def test_app_batch_api_missing_auth(self, git_lfs_app,):
62 git_lfs_app.post_json(
62 git_lfs_app.post_json(
63 '/repo/info/lfs/objects/batch', params={}, status=403)
63 '/repo/info/lfs/objects/batch', params={}, status=403)
64
64
65 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
65 def test_app_batch_api_unsupported_operation(self, git_lfs_app, http_auth):
66 response = git_lfs_app.post_json(
66 response = git_lfs_app.post_json(
67 '/repo/info/lfs/objects/batch', params={}, status=400,
67 '/repo/info/lfs/objects/batch', params={}, status=400,
68 extra_environ=http_auth)
68 extra_environ=http_auth)
69 assert json.loads(response.text) == {
69 assert json.loads(response.text) == {
70 u'message': u'unsupported operation mode: `None`'}
70 u'message': u'unsupported operation mode: `None`'}
71
71
72 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
72 def test_app_batch_api_missing_objects(self, git_lfs_app, http_auth):
73 response = git_lfs_app.post_json(
73 response = git_lfs_app.post_json(
74 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
74 '/repo/info/lfs/objects/batch', params={'operation': 'download'},
75 status=400, extra_environ=http_auth)
75 status=400, extra_environ=http_auth)
76 assert json.loads(response.text) == {
76 assert json.loads(response.text) == {
77 u'message': u'missing objects data'}
77 u'message': u'missing objects data'}
78
78
79 def test_app_batch_api_unsupported_data_in_objects(
79 def test_app_batch_api_unsupported_data_in_objects(
80 self, git_lfs_app, http_auth):
80 self, git_lfs_app, http_auth):
81 params = {'operation': 'download',
81 params = {'operation': 'download',
82 'objects': [{}]}
82 'objects': [{}]}
83 response = git_lfs_app.post_json(
83 response = git_lfs_app.post_json(
84 '/repo/info/lfs/objects/batch', params=params, status=400,
84 '/repo/info/lfs/objects/batch', params=params, status=400,
85 extra_environ=http_auth)
85 extra_environ=http_auth)
86 assert json.loads(response.text) == {
86 assert json.loads(response.text) == {
87 u'message': u'unsupported data in objects'}
87 u'message': u'unsupported data in objects'}
88
88
89 def test_app_batch_api_download_missing_object(
89 def test_app_batch_api_download_missing_object(
90 self, git_lfs_app, http_auth):
90 self, git_lfs_app, http_auth):
91 params = {'operation': 'download',
91 params = {'operation': 'download',
92 'objects': [{'oid': '123', 'size': '1024'}]}
92 'objects': [{'oid': '123', 'size': '1024'}]}
93 response = git_lfs_app.post_json(
93 response = git_lfs_app.post_json(
94 '/repo/info/lfs/objects/batch', params=params,
94 '/repo/info/lfs/objects/batch', params=params,
95 extra_environ=http_auth)
95 extra_environ=http_auth)
96
96
97 expected_objects = [
97 expected_objects = [
98 {u'authenticated': True,
98 {u'authenticated': True,
99 u'errors': {u'error': {
99 u'errors': {u'error': {
100 u'code': 404,
100 u'code': 404,
101 u'message': u'object: 123 does not exist in store'}},
101 u'message': u'object: 123 does not exist in store'}},
102 u'oid': u'123',
102 u'oid': u'123',
103 u'size': u'1024'}
103 u'size': u'1024'}
104 ]
104 ]
105 assert json.loads(response.text) == {
105 assert json.loads(response.text) == {
106 'objects': expected_objects, 'transfer': 'basic'}
106 'objects': expected_objects, 'transfer': 'basic'}
107
107
108 def test_app_batch_api_download(self, git_lfs_app, http_auth):
108 def test_app_batch_api_download(self, git_lfs_app, http_auth):
109 oid = '456'
109 oid = '456'
110 oid_path = os.path.join(git_lfs_app._store, oid)
110 oid_path = os.path.join(git_lfs_app._store, oid)
111 if not os.path.isdir(os.path.dirname(oid_path)):
111 if not os.path.isdir(os.path.dirname(oid_path)):
112 os.makedirs(os.path.dirname(oid_path))
112 os.makedirs(os.path.dirname(oid_path))
113 with open(oid_path, 'wb') as f:
113 with open(oid_path, 'wb') as f:
114 f.write('OID_CONTENT')
114 f.write('OID_CONTENT')
115
115
116 params = {'operation': 'download',
116 params = {'operation': 'download',
117 'objects': [{'oid': oid, 'size': '1024'}]}
117 'objects': [{'oid': oid, 'size': '1024'}]}
118 response = git_lfs_app.post_json(
118 response = git_lfs_app.post_json(
119 '/repo/info/lfs/objects/batch', params=params,
119 '/repo/info/lfs/objects/batch', params=params,
120 extra_environ=http_auth)
120 extra_environ=http_auth)
121
121
122 expected_objects = [
122 expected_objects = [
123 {u'authenticated': True,
123 {u'authenticated': True,
124 u'actions': {
124 u'actions': {
125 u'download': {
125 u'download': {
126 u'header': {u'Authorization': u'Basic XXXXX'},
126 u'header': {u'Authorization': u'Basic XXXXX'},
127 u'href': u'http://localhost/repo/info/lfs/objects/456'},
127 u'href': u'http://localhost/repo/info/lfs/objects/456'},
128 },
128 },
129 u'oid': u'456',
129 u'oid': u'456',
130 u'size': u'1024'}
130 u'size': u'1024'}
131 ]
131 ]
132 assert json.loads(response.text) == {
132 assert json.loads(response.text) == {
133 'objects': expected_objects, 'transfer': 'basic'}
133 'objects': expected_objects, 'transfer': 'basic'}
134
134
135 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
135 def test_app_batch_api_upload(self, git_lfs_app, http_auth):
136 params = {'operation': 'upload',
136 params = {'operation': 'upload',
137 'objects': [{'oid': '123', 'size': '1024'}]}
137 'objects': [{'oid': '123', 'size': '1024'}]}
138 response = git_lfs_app.post_json(
138 response = git_lfs_app.post_json(
139 '/repo/info/lfs/objects/batch', params=params,
139 '/repo/info/lfs/objects/batch', params=params,
140 extra_environ=http_auth)
140 extra_environ=http_auth)
141 expected_objects = [
141 expected_objects = [
142 {u'authenticated': True,
142 {u'authenticated': True,
143 u'actions': {
143 u'actions': {
144 u'upload': {
144 u'upload': {
145 u'header': {u'Authorization': u'Basic XXXXX',
145 u'header': {u'Authorization': u'Basic XXXXX',
146 u'Transfer-Encoding': u'chunked'},
146 u'Transfer-Encoding': u'chunked'},
147 u'href': u'http://localhost/repo/info/lfs/objects/123'},
147 u'href': u'http://localhost/repo/info/lfs/objects/123'},
148 u'verify': {
148 u'verify': {
149 u'header': {u'Authorization': u'Basic XXXXX'},
149 u'header': {u'Authorization': u'Basic XXXXX'},
150 u'href': u'http://localhost/repo/info/lfs/verify'}
150 u'href': u'http://localhost/repo/info/lfs/verify'}
151 },
151 },
152 u'oid': u'123',
152 u'oid': u'123',
153 u'size': u'1024'}
153 u'size': u'1024'}
154 ]
154 ]
155 assert json.loads(response.text) == {
155 assert json.loads(response.text) == {
156 'objects': expected_objects, 'transfer': 'basic'}
156 'objects': expected_objects, 'transfer': 'basic'}
157
157
158 def test_app_verify_api_missing_data(self, git_lfs_app):
158 def test_app_verify_api_missing_data(self, git_lfs_app):
159 params = {'oid': 'missing',}
159 params = {'oid': 'missing',}
160 response = git_lfs_app.post_json(
160 response = git_lfs_app.post_json(
161 '/repo/info/lfs/verify', params=params,
161 '/repo/info/lfs/verify', params=params,
162 status=400)
162 status=400)
163
163
164 assert json.loads(response.text) == {
164 assert json.loads(response.text) == {
165 u'message': u'missing oid and size in request data'}
165 u'message': u'missing oid and size in request data'}
166
166
167 def test_app_verify_api_missing_obj(self, git_lfs_app):
167 def test_app_verify_api_missing_obj(self, git_lfs_app):
168 params = {'oid': 'missing', 'size': '1024'}
168 params = {'oid': 'missing', 'size': '1024'}
169 response = git_lfs_app.post_json(
169 response = git_lfs_app.post_json(
170 '/repo/info/lfs/verify', params=params,
170 '/repo/info/lfs/verify', params=params,
171 status=404)
171 status=404)
172
172
173 assert json.loads(response.text) == {
173 assert json.loads(response.text) == {
174 u'message': u'oid `missing` does not exists in store'}
174 u'message': u'oid `missing` does not exists in store'}
175
175
176 def test_app_verify_api_size_mismatch(self, git_lfs_app):
176 def test_app_verify_api_size_mismatch(self, git_lfs_app):
177 oid = 'existing'
177 oid = 'existing'
178 oid_path = os.path.join(git_lfs_app._store, oid)
178 oid_path = os.path.join(git_lfs_app._store, oid)
179 if not os.path.isdir(os.path.dirname(oid_path)):
179 if not os.path.isdir(os.path.dirname(oid_path)):
180 os.makedirs(os.path.dirname(oid_path))
180 os.makedirs(os.path.dirname(oid_path))
181 with open(oid_path, 'wb') as f:
181 with open(oid_path, 'wb') as f:
182 f.write('OID_CONTENT')
182 f.write('OID_CONTENT')
183
183
184 params = {'oid': oid, 'size': '1024'}
184 params = {'oid': oid, 'size': '1024'}
185 response = git_lfs_app.post_json(
185 response = git_lfs_app.post_json(
186 '/repo/info/lfs/verify', params=params, status=422)
186 '/repo/info/lfs/verify', params=params, status=422)
187
187
188 assert json.loads(response.text) == {
188 assert json.loads(response.text) == {
189 u'message': u'requested file size mismatch '
189 u'message': u'requested file size mismatch '
190 u'store size:11 requested:1024'}
190 u'store size:11 requested:1024'}
191
191
192 def test_app_verify_api(self, git_lfs_app):
192 def test_app_verify_api(self, git_lfs_app):
193 oid = 'existing'
193 oid = 'existing'
194 oid_path = os.path.join(git_lfs_app._store, oid)
194 oid_path = os.path.join(git_lfs_app._store, oid)
195 if not os.path.isdir(os.path.dirname(oid_path)):
195 if not os.path.isdir(os.path.dirname(oid_path)):
196 os.makedirs(os.path.dirname(oid_path))
196 os.makedirs(os.path.dirname(oid_path))
197 with open(oid_path, 'wb') as f:
197 with open(oid_path, 'wb') as f:
198 f.write('OID_CONTENT')
198 f.write('OID_CONTENT')
199
199
200 params = {'oid': oid, 'size': 11}
200 params = {'oid': oid, 'size': 11}
201 response = git_lfs_app.post_json(
201 response = git_lfs_app.post_json(
202 '/repo/info/lfs/verify', params=params)
202 '/repo/info/lfs/verify', params=params)
203
203
204 assert json.loads(response.text) == {
204 assert json.loads(response.text) == {
205 u'message': {u'size': u'ok', u'in_store': u'ok'}}
205 u'message': {u'size': u'ok', u'in_store': u'ok'}}
206
206
207 def test_app_download_api_oid_not_existing(self, git_lfs_app):
207 def test_app_download_api_oid_not_existing(self, git_lfs_app):
208 oid = 'missing'
208 oid = 'missing'
209
209
210 response = git_lfs_app.get(
210 response = git_lfs_app.get(
211 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
211 '/repo/info/lfs/objects/{oid}'.format(oid=oid), status=404)
212
212
213 assert json.loads(response.text) == {
213 assert json.loads(response.text) == {
214 u'message': u'requested file with oid `missing` not found in store'}
214 u'message': u'requested file with oid `missing` not found in store'}
215
215
216 def test_app_download_api(self, git_lfs_app):
216 def test_app_download_api(self, git_lfs_app):
217 oid = 'existing'
217 oid = 'existing'
218 oid_path = os.path.join(git_lfs_app._store, oid)
218 oid_path = os.path.join(git_lfs_app._store, oid)
219 if not os.path.isdir(os.path.dirname(oid_path)):
219 if not os.path.isdir(os.path.dirname(oid_path)):
220 os.makedirs(os.path.dirname(oid_path))
220 os.makedirs(os.path.dirname(oid_path))
221 with open(oid_path, 'wb') as f:
221 with open(oid_path, 'wb') as f:
222 f.write('OID_CONTENT')
222 f.write('OID_CONTENT')
223
223
224 response = git_lfs_app.get(
224 response = git_lfs_app.get(
225 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
225 '/repo/info/lfs/objects/{oid}'.format(oid=oid))
226 assert response
226 assert response
227
227
228 def test_app_upload(self, git_lfs_app):
228 def test_app_upload(self, git_lfs_app):
229 oid = 'uploaded'
229 oid = 'uploaded'
230
230
231 response = git_lfs_app.put(
231 response = git_lfs_app.put(
232 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
232 '/repo/info/lfs/objects/{oid}'.format(oid=oid), params='CONTENT')
233
233
234 assert json.loads(response.text) == {u'upload': u'ok'}
234 assert json.loads(response.text) == {u'upload': u'ok'}
235
235
236 # verify that we actually wrote that OID
236 # verify that we actually wrote that OID
237 oid_path = os.path.join(git_lfs_app._store, oid)
237 oid_path = os.path.join(git_lfs_app._store, oid)
238 assert os.path.isfile(oid_path)
238 assert os.path.isfile(oid_path)
239 assert 'CONTENT' == open(oid_path).read()
239 assert 'CONTENT' == open(oid_path).read()
@@ -1,141 +1,141 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import pytest
19 import pytest
20 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
20 from vcsserver.git_lfs.lib import OidHandler, LFSOidStore
21
21
22
22
23 @pytest.fixture()
23 @pytest.fixture()
24 def lfs_store(tmpdir):
24 def lfs_store(tmpdir):
25 repo = 'test'
25 repo = 'test'
26 oid = '123456789'
26 oid = '123456789'
27 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
27 store = LFSOidStore(oid=oid, repo=repo, store_location=str(tmpdir))
28 return store
28 return store
29
29
30
30
31 @pytest.fixture()
31 @pytest.fixture()
32 def oid_handler(lfs_store):
32 def oid_handler(lfs_store):
33 store = lfs_store
33 store = lfs_store
34 repo = store.repo
34 repo = store.repo
35 oid = store.oid
35 oid = store.oid
36
36
37 oid_handler = OidHandler(
37 oid_handler = OidHandler(
38 store=store, repo_name=repo, auth=('basic', 'xxxx'),
38 store=store, repo_name=repo, auth=('basic', 'xxxx'),
39 oid=oid,
39 oid=oid,
40 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
40 obj_size='1024', obj_data={}, obj_href='http://localhost/handle_oid',
41 obj_verify_href='http://localhost/verify')
41 obj_verify_href='http://localhost/verify')
42 return oid_handler
42 return oid_handler
43
43
44
44
45 class TestOidHandler(object):
45 class TestOidHandler(object):
46
46
47 @pytest.mark.parametrize('exec_action', [
47 @pytest.mark.parametrize('exec_action', [
48 'download',
48 'download',
49 'upload',
49 'upload',
50 ])
50 ])
51 def test_exec_action(self, exec_action, oid_handler):
51 def test_exec_action(self, exec_action, oid_handler):
52 handler = oid_handler.exec_operation(exec_action)
52 handler = oid_handler.exec_operation(exec_action)
53 assert handler
53 assert handler
54
54
55 def test_exec_action_undefined(self, oid_handler):
55 def test_exec_action_undefined(self, oid_handler):
56 with pytest.raises(AttributeError):
56 with pytest.raises(AttributeError):
57 oid_handler.exec_operation('wrong')
57 oid_handler.exec_operation('wrong')
58
58
59 def test_download_oid_not_existing(self, oid_handler):
59 def test_download_oid_not_existing(self, oid_handler):
60 response, has_errors = oid_handler.exec_operation('download')
60 response, has_errors = oid_handler.exec_operation('download')
61
61
62 assert response is None
62 assert response is None
63 assert has_errors['error'] == {
63 assert has_errors['error'] == {
64 'code': 404,
64 'code': 404,
65 'message': 'object: 123456789 does not exist in store'}
65 'message': 'object: 123456789 does not exist in store'}
66
66
67 def test_download_oid(self, oid_handler):
67 def test_download_oid(self, oid_handler):
68 store = oid_handler.get_store()
68 store = oid_handler.get_store()
69 if not os.path.isdir(os.path.dirname(store.oid_path)):
69 if not os.path.isdir(os.path.dirname(store.oid_path)):
70 os.makedirs(os.path.dirname(store.oid_path))
70 os.makedirs(os.path.dirname(store.oid_path))
71
71
72 with open(store.oid_path, 'wb') as f:
72 with open(store.oid_path, 'wb') as f:
73 f.write('CONTENT')
73 f.write('CONTENT')
74
74
75 response, has_errors = oid_handler.exec_operation('download')
75 response, has_errors = oid_handler.exec_operation('download')
76
76
77 assert has_errors is None
77 assert has_errors is None
78 assert response['download'] == {
78 assert response['download'] == {
79 'header': {'Authorization': 'basic xxxx'},
79 'header': {'Authorization': 'basic xxxx'},
80 'href': 'http://localhost/handle_oid'
80 'href': 'http://localhost/handle_oid'
81 }
81 }
82
82
83 def test_upload_oid_that_exists(self, oid_handler):
83 def test_upload_oid_that_exists(self, oid_handler):
84 store = oid_handler.get_store()
84 store = oid_handler.get_store()
85 if not os.path.isdir(os.path.dirname(store.oid_path)):
85 if not os.path.isdir(os.path.dirname(store.oid_path)):
86 os.makedirs(os.path.dirname(store.oid_path))
86 os.makedirs(os.path.dirname(store.oid_path))
87
87
88 with open(store.oid_path, 'wb') as f:
88 with open(store.oid_path, 'wb') as f:
89 f.write('CONTENT')
89 f.write('CONTENT')
90 oid_handler.obj_size = 7
90 oid_handler.obj_size = 7
91 response, has_errors = oid_handler.exec_operation('upload')
91 response, has_errors = oid_handler.exec_operation('upload')
92 assert has_errors is None
92 assert has_errors is None
93 assert response is None
93 assert response is None
94
94
95 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
95 def test_upload_oid_that_exists_but_has_wrong_size(self, oid_handler):
96 store = oid_handler.get_store()
96 store = oid_handler.get_store()
97 if not os.path.isdir(os.path.dirname(store.oid_path)):
97 if not os.path.isdir(os.path.dirname(store.oid_path)):
98 os.makedirs(os.path.dirname(store.oid_path))
98 os.makedirs(os.path.dirname(store.oid_path))
99
99
100 with open(store.oid_path, 'wb') as f:
100 with open(store.oid_path, 'wb') as f:
101 f.write('CONTENT')
101 f.write('CONTENT')
102
102
103 oid_handler.obj_size = 10240
103 oid_handler.obj_size = 10240
104 response, has_errors = oid_handler.exec_operation('upload')
104 response, has_errors = oid_handler.exec_operation('upload')
105 assert has_errors is None
105 assert has_errors is None
106 assert response['upload'] == {
106 assert response['upload'] == {
107 'header': {'Authorization': 'basic xxxx',
107 'header': {'Authorization': 'basic xxxx',
108 'Transfer-Encoding': 'chunked'},
108 'Transfer-Encoding': 'chunked'},
109 'href': 'http://localhost/handle_oid',
109 'href': 'http://localhost/handle_oid',
110 }
110 }
111
111
112 def test_upload_oid(self, oid_handler):
112 def test_upload_oid(self, oid_handler):
113 response, has_errors = oid_handler.exec_operation('upload')
113 response, has_errors = oid_handler.exec_operation('upload')
114 assert has_errors is None
114 assert has_errors is None
115 assert response['upload'] == {
115 assert response['upload'] == {
116 'header': {'Authorization': 'basic xxxx',
116 'header': {'Authorization': 'basic xxxx',
117 'Transfer-Encoding': 'chunked'},
117 'Transfer-Encoding': 'chunked'},
118 'href': 'http://localhost/handle_oid'
118 'href': 'http://localhost/handle_oid'
119 }
119 }
120
120
121
121
122 class TestLFSStore(object):
122 class TestLFSStore(object):
123 def test_write_oid(self, lfs_store):
123 def test_write_oid(self, lfs_store):
124 oid_location = lfs_store.oid_path
124 oid_location = lfs_store.oid_path
125
125
126 assert not os.path.isfile(oid_location)
126 assert not os.path.isfile(oid_location)
127
127
128 engine = lfs_store.get_engine(mode='wb')
128 engine = lfs_store.get_engine(mode='wb')
129 with engine as f:
129 with engine as f:
130 f.write('CONTENT')
130 f.write('CONTENT')
131
131
132 assert os.path.isfile(oid_location)
132 assert os.path.isfile(oid_location)
133
133
134 def test_detect_has_oid(self, lfs_store):
134 def test_detect_has_oid(self, lfs_store):
135
135
136 assert lfs_store.has_oid() is False
136 assert lfs_store.has_oid() is False
137 engine = lfs_store.get_engine(mode='wb')
137 engine = lfs_store.get_engine(mode='wb')
138 with engine as f:
138 with engine as f:
139 f.write('CONTENT')
139 f.write('CONTENT')
140
140
141 assert lfs_store.has_oid() is True No newline at end of file
141 assert lfs_store.has_oid() is True
@@ -1,50 +1,50 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 import copy
17 import copy
18 from functools import wraps
18 from functools import wraps
19
19
20
20
21 def get_cython_compat_decorator(wrapper, func):
21 def get_cython_compat_decorator(wrapper, func):
22 """
22 """
23 Creates a cython compatible decorator. The previously used
23 Creates a cython compatible decorator. The previously used
24 decorator.decorator() function seems to be incompatible with cython.
24 decorator.decorator() function seems to be incompatible with cython.
25
25
26 :param wrapper: __wrapper method of the decorator class
26 :param wrapper: __wrapper method of the decorator class
27 :param func: decorated function
27 :param func: decorated function
28 """
28 """
29 @wraps(func)
29 @wraps(func)
30 def local_wrapper(*args, **kwds):
30 def local_wrapper(*args, **kwds):
31 return wrapper(func, *args, **kwds)
31 return wrapper(func, *args, **kwds)
32 local_wrapper.__wrapped__ = func
32 local_wrapper.__wrapped__ = func
33 return local_wrapper
33 return local_wrapper
34
34
35
35
36 def safe_result(result):
36 def safe_result(result):
37 """clean result for better representation in logs"""
37 """clean result for better representation in logs"""
38 clean_copy = copy.deepcopy(result)
38 clean_copy = copy.deepcopy(result)
39
39
40 try:
40 try:
41 if 'objects' in clean_copy:
41 if 'objects' in clean_copy:
42 for oid_data in clean_copy['objects']:
42 for oid_data in clean_copy['objects']:
43 if 'actions' in oid_data:
43 if 'actions' in oid_data:
44 for action_name, data in oid_data['actions'].items():
44 for action_name, data in oid_data['actions'].items():
45 if 'header' in data:
45 if 'header' in data:
46 data['header'] = {'Authorization': '*****'}
46 data['header'] = {'Authorization': '*****'}
47 except Exception:
47 except Exception:
48 return result
48 return result
49
49
50 return clean_copy
50 return clean_copy
@@ -1,749 +1,758 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23
23
24 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
26 from mercurial import commands
27 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
28 from mercurial import verify
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 RepoLookupError, InterventionRequired, RequirementError)
37 RepoLookupError, InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 baseui.setconfig('ui', 'paginate', 'never')
56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # signal in a non-main thread, thus generating a ValueError.
58 # signal in a non-main thread, thus generating a ValueError.
58 baseui.setconfig('worker', 'numcpus', 1)
59 baseui.setconfig('worker', 'numcpus', 1)
59
60
60 # If there is no config for the largefiles extension, we explicitly disable
61 # If there is no config for the largefiles extension, we explicitly disable
61 # it here. This overrides settings from repositories hgrc file. Recent
62 # it here. This overrides settings from repositories hgrc file. Recent
62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # repo.
64 # repo.
64 if not baseui.hasconfig('extensions', 'largefiles'):
65 if not baseui.hasconfig('extensions', 'largefiles'):
65 log.debug('Explicitly disable largefiles extension for repo.')
66 log.debug('Explicitly disable largefiles extension for repo.')
66 baseui.setconfig('extensions', 'largefiles', '!')
67 baseui.setconfig('extensions', 'largefiles', '!')
67
68
68 return baseui
69 return baseui
69
70
70
71
71 def reraise_safe_exceptions(func):
72 def reraise_safe_exceptions(func):
72 """Decorator for converting mercurial exceptions to something neutral."""
73 """Decorator for converting mercurial exceptions to something neutral."""
73 def wrapper(*args, **kwargs):
74 def wrapper(*args, **kwargs):
74 try:
75 try:
75 return func(*args, **kwargs)
76 return func(*args, **kwargs)
76 except (Abort, InterventionRequired):
77 except (Abort, InterventionRequired):
77 raise_from_original(exceptions.AbortException)
78 raise_from_original(exceptions.AbortException)
78 except RepoLookupError:
79 except RepoLookupError:
79 raise_from_original(exceptions.LookupException)
80 raise_from_original(exceptions.LookupException)
80 except RequirementError:
81 except RequirementError:
81 raise_from_original(exceptions.RequirementException)
82 raise_from_original(exceptions.RequirementException)
82 except RepoError:
83 except RepoError:
83 raise_from_original(exceptions.VcsException)
84 raise_from_original(exceptions.VcsException)
84 except LookupError:
85 except LookupError:
85 raise_from_original(exceptions.LookupException)
86 raise_from_original(exceptions.LookupException)
86 except Exception as e:
87 except Exception as e:
87 if not hasattr(e, '_vcs_kind'):
88 if not hasattr(e, '_vcs_kind'):
88 log.exception("Unhandled exception in hg remote call")
89 log.exception("Unhandled exception in hg remote call")
89 raise_from_original(exceptions.UnhandledException)
90 raise_from_original(exceptions.UnhandledException)
90 raise
91 raise
91 return wrapper
92 return wrapper
92
93
93
94
94 class MercurialFactory(RepoFactory):
95 class MercurialFactory(RepoFactory):
95
96
96 def _create_config(self, config, hooks=True):
97 def _create_config(self, config, hooks=True):
97 if not hooks:
98 if not hooks:
98 hooks_to_clean = frozenset((
99 hooks_to_clean = frozenset((
99 'changegroup.repo_size', 'preoutgoing.pre_pull',
100 'changegroup.repo_size', 'preoutgoing.pre_pull',
100 'outgoing.pull_logger', 'prechangegroup.pre_push'))
101 'outgoing.pull_logger', 'prechangegroup.pre_push'))
101 new_config = []
102 new_config = []
102 for section, option, value in config:
103 for section, option, value in config:
103 if section == 'hooks' and option in hooks_to_clean:
104 if section == 'hooks' and option in hooks_to_clean:
104 continue
105 continue
105 new_config.append((section, option, value))
106 new_config.append((section, option, value))
106 config = new_config
107 config = new_config
107
108
108 baseui = make_ui_from_config(config)
109 baseui = make_ui_from_config(config)
109 return baseui
110 return baseui
110
111
111 def _create_repo(self, wire, create):
112 def _create_repo(self, wire, create):
112 baseui = self._create_config(wire["config"])
113 baseui = self._create_config(wire["config"])
113 return localrepository(baseui, wire["path"], create)
114 return localrepository(baseui, wire["path"], create)
114
115
115
116
116 class HgRemote(object):
117 class HgRemote(object):
117
118
118 def __init__(self, factory):
119 def __init__(self, factory):
119 self._factory = factory
120 self._factory = factory
120
121
121 self._bulk_methods = {
122 self._bulk_methods = {
122 "affected_files": self.ctx_files,
123 "affected_files": self.ctx_files,
123 "author": self.ctx_user,
124 "author": self.ctx_user,
124 "branch": self.ctx_branch,
125 "branch": self.ctx_branch,
125 "children": self.ctx_children,
126 "children": self.ctx_children,
126 "date": self.ctx_date,
127 "date": self.ctx_date,
127 "message": self.ctx_description,
128 "message": self.ctx_description,
128 "parents": self.ctx_parents,
129 "parents": self.ctx_parents,
129 "status": self.ctx_status,
130 "status": self.ctx_status,
130 "obsolete": self.ctx_obsolete,
131 "obsolete": self.ctx_obsolete,
131 "phase": self.ctx_phase,
132 "phase": self.ctx_phase,
132 "hidden": self.ctx_hidden,
133 "hidden": self.ctx_hidden,
133 "_file_paths": self.ctx_list,
134 "_file_paths": self.ctx_list,
134 }
135 }
135
136
136 @reraise_safe_exceptions
137 @reraise_safe_exceptions
137 def discover_hg_version(self):
138 def discover_hg_version(self):
138 from mercurial import util
139 from mercurial import util
139 return util.version()
140 return util.version()
140
141
141 @reraise_safe_exceptions
142 @reraise_safe_exceptions
142 def archive_repo(self, archive_path, mtime, file_info, kind):
143 def archive_repo(self, archive_path, mtime, file_info, kind):
143 if kind == "tgz":
144 if kind == "tgz":
144 archiver = archival.tarit(archive_path, mtime, "gz")
145 archiver = archival.tarit(archive_path, mtime, "gz")
145 elif kind == "tbz2":
146 elif kind == "tbz2":
146 archiver = archival.tarit(archive_path, mtime, "bz2")
147 archiver = archival.tarit(archive_path, mtime, "bz2")
147 elif kind == 'zip':
148 elif kind == 'zip':
148 archiver = archival.zipit(archive_path, mtime)
149 archiver = archival.zipit(archive_path, mtime)
149 else:
150 else:
150 raise exceptions.ArchiveException(
151 raise exceptions.ArchiveException(
151 'Remote does not support: "%s".' % kind)
152 'Remote does not support: "%s".' % kind)
152
153
153 for f_path, f_mode, f_is_link, f_content in file_info:
154 for f_path, f_mode, f_is_link, f_content in file_info:
154 archiver.addfile(f_path, f_mode, f_is_link, f_content)
155 archiver.addfile(f_path, f_mode, f_is_link, f_content)
155 archiver.done()
156 archiver.done()
156
157
157 @reraise_safe_exceptions
158 @reraise_safe_exceptions
158 def bookmarks(self, wire):
159 def bookmarks(self, wire):
159 repo = self._factory.repo(wire)
160 repo = self._factory.repo(wire)
160 return dict(repo._bookmarks)
161 return dict(repo._bookmarks)
161
162
162 @reraise_safe_exceptions
163 @reraise_safe_exceptions
163 def branches(self, wire, normal, closed):
164 def branches(self, wire, normal, closed):
164 repo = self._factory.repo(wire)
165 repo = self._factory.repo(wire)
165 iter_branches = repo.branchmap().iterbranches()
166 iter_branches = repo.branchmap().iterbranches()
166 bt = {}
167 bt = {}
167 for branch_name, _heads, tip, is_closed in iter_branches:
168 for branch_name, _heads, tip, is_closed in iter_branches:
168 if normal and not is_closed:
169 if normal and not is_closed:
169 bt[branch_name] = tip
170 bt[branch_name] = tip
170 if closed and is_closed:
171 if closed and is_closed:
171 bt[branch_name] = tip
172 bt[branch_name] = tip
172
173
173 return bt
174 return bt
174
175
175 @reraise_safe_exceptions
176 @reraise_safe_exceptions
176 def bulk_request(self, wire, rev, pre_load):
177 def bulk_request(self, wire, rev, pre_load):
177 result = {}
178 result = {}
178 for attr in pre_load:
179 for attr in pre_load:
179 try:
180 try:
180 method = self._bulk_methods[attr]
181 method = self._bulk_methods[attr]
181 result[attr] = method(wire, rev)
182 result[attr] = method(wire, rev)
182 except KeyError:
183 except KeyError:
183 raise exceptions.VcsException(
184 raise exceptions.VcsException(
184 'Unknown bulk attribute: "%s"' % attr)
185 'Unknown bulk attribute: "%s"' % attr)
185 return result
186 return result
186
187
187 @reraise_safe_exceptions
188 @reraise_safe_exceptions
188 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
189 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
189 baseui = self._factory._create_config(wire["config"], hooks=hooks)
190 baseui = self._factory._create_config(wire["config"], hooks=hooks)
190 clone(baseui, source, dest, noupdate=not update_after_clone)
191 clone(baseui, source, dest, noupdate=not update_after_clone)
191
192
192 @reraise_safe_exceptions
193 @reraise_safe_exceptions
193 def commitctx(
194 def commitctx(
194 self, wire, message, parents, commit_time, commit_timezone,
195 self, wire, message, parents, commit_time, commit_timezone,
195 user, files, extra, removed, updated):
196 user, files, extra, removed, updated):
196
197
197 def _filectxfn(_repo, memctx, path):
198 def _filectxfn(_repo, memctx, path):
198 """
199 """
199 Marks given path as added/changed/removed in a given _repo. This is
200 Marks given path as added/changed/removed in a given _repo. This is
200 for internal mercurial commit function.
201 for internal mercurial commit function.
201 """
202 """
202
203
203 # check if this path is removed
204 # check if this path is removed
204 if path in removed:
205 if path in removed:
205 # returning None is a way to mark node for removal
206 # returning None is a way to mark node for removal
206 return None
207 return None
207
208
208 # check if this path is added
209 # check if this path is added
209 for node in updated:
210 for node in updated:
210 if node['path'] == path:
211 if node['path'] == path:
211 return memfilectx(
212 return memfilectx(
212 _repo,
213 _repo,
213 path=node['path'],
214 path=node['path'],
214 data=node['content'],
215 data=node['content'],
215 islink=False,
216 islink=False,
216 isexec=bool(node['mode'] & stat.S_IXUSR),
217 isexec=bool(node['mode'] & stat.S_IXUSR),
217 copied=False,
218 copied=False,
218 memctx=memctx)
219 memctx=memctx)
219
220
220 raise exceptions.AbortException(
221 raise exceptions.AbortException(
221 "Given path haven't been marked as added, "
222 "Given path haven't been marked as added, "
222 "changed or removed (%s)" % path)
223 "changed or removed (%s)" % path)
223
224
224 repo = self._factory.repo(wire)
225 repo = self._factory.repo(wire)
225
226
226 commit_ctx = memctx(
227 commit_ctx = memctx(
227 repo=repo,
228 repo=repo,
228 parents=parents,
229 parents=parents,
229 text=message,
230 text=message,
230 files=files,
231 files=files,
231 filectxfn=_filectxfn,
232 filectxfn=_filectxfn,
232 user=user,
233 user=user,
233 date=(commit_time, commit_timezone),
234 date=(commit_time, commit_timezone),
234 extra=extra)
235 extra=extra)
235
236
236 n = repo.commitctx(commit_ctx)
237 n = repo.commitctx(commit_ctx)
237 new_id = hex(n)
238 new_id = hex(n)
238
239
239 return new_id
240 return new_id
240
241
241 @reraise_safe_exceptions
242 @reraise_safe_exceptions
242 def ctx_branch(self, wire, revision):
243 def ctx_branch(self, wire, revision):
243 repo = self._factory.repo(wire)
244 repo = self._factory.repo(wire)
244 ctx = repo[revision]
245 ctx = repo[revision]
245 return ctx.branch()
246 return ctx.branch()
246
247
247 @reraise_safe_exceptions
248 @reraise_safe_exceptions
248 def ctx_children(self, wire, revision):
249 def ctx_children(self, wire, revision):
249 repo = self._factory.repo(wire)
250 repo = self._factory.repo(wire)
250 ctx = repo[revision]
251 ctx = repo[revision]
251 return [child.rev() for child in ctx.children()]
252 return [child.rev() for child in ctx.children()]
252
253
253 @reraise_safe_exceptions
254 @reraise_safe_exceptions
254 def ctx_date(self, wire, revision):
255 def ctx_date(self, wire, revision):
255 repo = self._factory.repo(wire)
256 repo = self._factory.repo(wire)
256 ctx = repo[revision]
257 ctx = repo[revision]
257 return ctx.date()
258 return ctx.date()
258
259
259 @reraise_safe_exceptions
260 @reraise_safe_exceptions
260 def ctx_description(self, wire, revision):
261 def ctx_description(self, wire, revision):
261 repo = self._factory.repo(wire)
262 repo = self._factory.repo(wire)
262 ctx = repo[revision]
263 ctx = repo[revision]
263 return ctx.description()
264 return ctx.description()
264
265
265 @reraise_safe_exceptions
266 @reraise_safe_exceptions
266 def ctx_diff(
267 def ctx_diff(
267 self, wire, revision, git=True, ignore_whitespace=True, context=3):
268 self, wire, revision, git=True, ignore_whitespace=True, context=3):
268 repo = self._factory.repo(wire)
269 repo = self._factory.repo(wire)
269 ctx = repo[revision]
270 ctx = repo[revision]
270 result = ctx.diff(
271 result = ctx.diff(
271 git=git, ignore_whitespace=ignore_whitespace, context=context)
272 git=git, ignore_whitespace=ignore_whitespace, context=context)
272 return list(result)
273 return list(result)
273
274
274 @reraise_safe_exceptions
275 @reraise_safe_exceptions
275 def ctx_files(self, wire, revision):
276 def ctx_files(self, wire, revision):
276 repo = self._factory.repo(wire)
277 repo = self._factory.repo(wire)
277 ctx = repo[revision]
278 ctx = repo[revision]
278 return ctx.files()
279 return ctx.files()
279
280
280 @reraise_safe_exceptions
281 @reraise_safe_exceptions
281 def ctx_list(self, path, revision):
282 def ctx_list(self, path, revision):
282 repo = self._factory.repo(path)
283 repo = self._factory.repo(path)
283 ctx = repo[revision]
284 ctx = repo[revision]
284 return list(ctx)
285 return list(ctx)
285
286
286 @reraise_safe_exceptions
287 @reraise_safe_exceptions
287 def ctx_parents(self, wire, revision):
288 def ctx_parents(self, wire, revision):
288 repo = self._factory.repo(wire)
289 repo = self._factory.repo(wire)
289 ctx = repo[revision]
290 ctx = repo[revision]
290 return [parent.rev() for parent in ctx.parents()]
291 return [parent.rev() for parent in ctx.parents()]
291
292
292 @reraise_safe_exceptions
293 @reraise_safe_exceptions
293 def ctx_phase(self, wire, revision):
294 def ctx_phase(self, wire, revision):
294 repo = self._factory.repo(wire)
295 repo = self._factory.repo(wire)
295 ctx = repo[revision]
296 ctx = repo[revision]
296 # public=0, draft=1, secret=3
297 # public=0, draft=1, secret=3
297 return ctx.phase()
298 return ctx.phase()
298
299
299 @reraise_safe_exceptions
300 @reraise_safe_exceptions
300 def ctx_obsolete(self, wire, revision):
301 def ctx_obsolete(self, wire, revision):
301 repo = self._factory.repo(wire)
302 repo = self._factory.repo(wire)
302 ctx = repo[revision]
303 ctx = repo[revision]
303 return ctx.obsolete()
304 return ctx.obsolete()
304
305
305 @reraise_safe_exceptions
306 @reraise_safe_exceptions
306 def ctx_hidden(self, wire, revision):
307 def ctx_hidden(self, wire, revision):
307 repo = self._factory.repo(wire)
308 repo = self._factory.repo(wire)
308 ctx = repo[revision]
309 ctx = repo[revision]
309 return ctx.hidden()
310 return ctx.hidden()
310
311
311 @reraise_safe_exceptions
312 @reraise_safe_exceptions
312 def ctx_substate(self, wire, revision):
313 def ctx_substate(self, wire, revision):
313 repo = self._factory.repo(wire)
314 repo = self._factory.repo(wire)
314 ctx = repo[revision]
315 ctx = repo[revision]
315 return ctx.substate
316 return ctx.substate
316
317
317 @reraise_safe_exceptions
318 @reraise_safe_exceptions
318 def ctx_status(self, wire, revision):
319 def ctx_status(self, wire, revision):
319 repo = self._factory.repo(wire)
320 repo = self._factory.repo(wire)
320 ctx = repo[revision]
321 ctx = repo[revision]
321 status = repo[ctx.p1().node()].status(other=ctx.node())
322 status = repo[ctx.p1().node()].status(other=ctx.node())
322 # object of status (odd, custom named tuple in mercurial) is not
323 # object of status (odd, custom named tuple in mercurial) is not
323 # correctly serializable, we make it a list, as the underling
324 # correctly serializable, we make it a list, as the underling
324 # API expects this to be a list
325 # API expects this to be a list
325 return list(status)
326 return list(status)
326
327
327 @reraise_safe_exceptions
328 @reraise_safe_exceptions
328 def ctx_user(self, wire, revision):
329 def ctx_user(self, wire, revision):
329 repo = self._factory.repo(wire)
330 repo = self._factory.repo(wire)
330 ctx = repo[revision]
331 ctx = repo[revision]
331 return ctx.user()
332 return ctx.user()
332
333
333 @reraise_safe_exceptions
334 @reraise_safe_exceptions
334 def check_url(self, url, config):
335 def check_url(self, url, config):
335 _proto = None
336 _proto = None
336 if '+' in url[:url.find('://')]:
337 if '+' in url[:url.find('://')]:
337 _proto = url[0:url.find('+')]
338 _proto = url[0:url.find('+')]
338 url = url[url.find('+') + 1:]
339 url = url[url.find('+') + 1:]
339 handlers = []
340 handlers = []
340 url_obj = url_parser(url)
341 url_obj = url_parser(url)
341 test_uri, authinfo = url_obj.authinfo()
342 test_uri, authinfo = url_obj.authinfo()
342 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
343 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
343 url_obj.query = obfuscate_qs(url_obj.query)
344 url_obj.query = obfuscate_qs(url_obj.query)
344
345
345 cleaned_uri = str(url_obj)
346 cleaned_uri = str(url_obj)
346 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
347 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
347
348
348 if authinfo:
349 if authinfo:
349 # create a password manager
350 # create a password manager
350 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
351 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
351 passmgr.add_password(*authinfo)
352 passmgr.add_password(*authinfo)
352
353
353 handlers.extend((httpbasicauthhandler(passmgr),
354 handlers.extend((httpbasicauthhandler(passmgr),
354 httpdigestauthhandler(passmgr)))
355 httpdigestauthhandler(passmgr)))
355
356
356 o = urllib2.build_opener(*handlers)
357 o = urllib2.build_opener(*handlers)
357 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
358 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
358 ('Accept', 'application/mercurial-0.1')]
359 ('Accept', 'application/mercurial-0.1')]
359
360
360 q = {"cmd": 'between'}
361 q = {"cmd": 'between'}
361 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
362 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
362 qs = '?%s' % urllib.urlencode(q)
363 qs = '?%s' % urllib.urlencode(q)
363 cu = "%s%s" % (test_uri, qs)
364 cu = "%s%s" % (test_uri, qs)
364 req = urllib2.Request(cu, None, {})
365 req = urllib2.Request(cu, None, {})
365
366
366 try:
367 try:
367 log.debug("Trying to open URL %s", cleaned_uri)
368 log.debug("Trying to open URL %s", cleaned_uri)
368 resp = o.open(req)
369 resp = o.open(req)
369 if resp.code != 200:
370 if resp.code != 200:
370 raise exceptions.URLError('Return Code is not 200')
371 raise exceptions.URLError('Return Code is not 200')
371 except Exception as e:
372 except Exception as e:
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
373 # means it cannot be cloned
374 # means it cannot be cloned
374 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
375 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
375
376
376 # now check if it's a proper hg repo, but don't do it for svn
377 # now check if it's a proper hg repo, but don't do it for svn
377 try:
378 try:
378 if _proto == 'svn':
379 if _proto == 'svn':
379 pass
380 pass
380 else:
381 else:
381 # check for pure hg repos
382 # check for pure hg repos
382 log.debug(
383 log.debug(
383 "Verifying if URL is a Mercurial repository: %s",
384 "Verifying if URL is a Mercurial repository: %s",
384 cleaned_uri)
385 cleaned_uri)
385 httppeer(make_ui_from_config(config), url).lookup('tip')
386 httppeer(make_ui_from_config(config), url).lookup('tip')
386 except Exception as e:
387 except Exception as e:
387 log.warning("URL is not a valid Mercurial repository: %s",
388 log.warning("URL is not a valid Mercurial repository: %s",
388 cleaned_uri)
389 cleaned_uri)
389 raise exceptions.URLError(
390 raise exceptions.URLError(
390 "url [%s] does not look like an hg repo org_exc: %s"
391 "url [%s] does not look like an hg repo org_exc: %s"
391 % (cleaned_uri, e))
392 % (cleaned_uri, e))
392
393
393 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
394 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
394 return True
395 return True
395
396
396 @reraise_safe_exceptions
397 @reraise_safe_exceptions
397 def diff(
398 def diff(
398 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
399 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
399 context):
400 context):
400 repo = self._factory.repo(wire)
401 repo = self._factory.repo(wire)
401
402
402 if file_filter:
403 if file_filter:
403 match_filter = match(file_filter[0], '', [file_filter[1]])
404 match_filter = match(file_filter[0], '', [file_filter[1]])
404 else:
405 else:
405 match_filter = file_filter
406 match_filter = file_filter
406 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
407 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
407
408
408 try:
409 try:
409 return "".join(patch.diff(
410 return "".join(patch.diff(
410 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
411 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
411 except RepoLookupError:
412 except RepoLookupError:
412 raise exceptions.LookupException()
413 raise exceptions.LookupException()
413
414
414 @reraise_safe_exceptions
415 @reraise_safe_exceptions
415 def file_history(self, wire, revision, path, limit):
416 def file_history(self, wire, revision, path, limit):
416 repo = self._factory.repo(wire)
417 repo = self._factory.repo(wire)
417
418
418 ctx = repo[revision]
419 ctx = repo[revision]
419 fctx = ctx.filectx(path)
420 fctx = ctx.filectx(path)
420
421
421 def history_iter():
422 def history_iter():
422 limit_rev = fctx.rev()
423 limit_rev = fctx.rev()
423 for obj in reversed(list(fctx.filelog())):
424 for obj in reversed(list(fctx.filelog())):
424 obj = fctx.filectx(obj)
425 obj = fctx.filectx(obj)
425 if limit_rev >= obj.rev():
426 if limit_rev >= obj.rev():
426 yield obj
427 yield obj
427
428
428 history = []
429 history = []
429 for cnt, obj in enumerate(history_iter()):
430 for cnt, obj in enumerate(history_iter()):
430 if limit and cnt >= limit:
431 if limit and cnt >= limit:
431 break
432 break
432 history.append(hex(obj.node()))
433 history.append(hex(obj.node()))
433
434
434 return [x for x in history]
435 return [x for x in history]
435
436
436 @reraise_safe_exceptions
437 @reraise_safe_exceptions
437 def file_history_untill(self, wire, revision, path, limit):
438 def file_history_untill(self, wire, revision, path, limit):
438 repo = self._factory.repo(wire)
439 repo = self._factory.repo(wire)
439 ctx = repo[revision]
440 ctx = repo[revision]
440 fctx = ctx.filectx(path)
441 fctx = ctx.filectx(path)
441
442
442 file_log = list(fctx.filelog())
443 file_log = list(fctx.filelog())
443 if limit:
444 if limit:
444 # Limit to the last n items
445 # Limit to the last n items
445 file_log = file_log[-limit:]
446 file_log = file_log[-limit:]
446
447
447 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
448 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
448
449
449 @reraise_safe_exceptions
450 @reraise_safe_exceptions
450 def fctx_annotate(self, wire, revision, path):
451 def fctx_annotate(self, wire, revision, path):
451 repo = self._factory.repo(wire)
452 repo = self._factory.repo(wire)
452 ctx = repo[revision]
453 ctx = repo[revision]
453 fctx = ctx.filectx(path)
454 fctx = ctx.filectx(path)
454
455
455 result = []
456 result = []
456 for i, annotate_data in enumerate(fctx.annotate()):
457 for i, (a_line, content) in enumerate(fctx.annotate()):
457 ln_no = i + 1
458 ln_no = i + 1
458 node_info, content = annotate_data
459 sha = hex(a_line.fctx.node())
459 sha = hex(node_info[0].node())
460 result.append((ln_no, sha, content))
460 result.append((ln_no, sha, content))
461 return result
461 return result
462
462
463 @reraise_safe_exceptions
463 @reraise_safe_exceptions
464 def fctx_data(self, wire, revision, path):
464 def fctx_data(self, wire, revision, path):
465 repo = self._factory.repo(wire)
465 repo = self._factory.repo(wire)
466 ctx = repo[revision]
466 ctx = repo[revision]
467 fctx = ctx.filectx(path)
467 fctx = ctx.filectx(path)
468 return fctx.data()
468 return fctx.data()
469
469
470 @reraise_safe_exceptions
470 @reraise_safe_exceptions
471 def fctx_flags(self, wire, revision, path):
471 def fctx_flags(self, wire, revision, path):
472 repo = self._factory.repo(wire)
472 repo = self._factory.repo(wire)
473 ctx = repo[revision]
473 ctx = repo[revision]
474 fctx = ctx.filectx(path)
474 fctx = ctx.filectx(path)
475 return fctx.flags()
475 return fctx.flags()
476
476
477 @reraise_safe_exceptions
477 @reraise_safe_exceptions
478 def fctx_size(self, wire, revision, path):
478 def fctx_size(self, wire, revision, path):
479 repo = self._factory.repo(wire)
479 repo = self._factory.repo(wire)
480 ctx = repo[revision]
480 ctx = repo[revision]
481 fctx = ctx.filectx(path)
481 fctx = ctx.filectx(path)
482 return fctx.size()
482 return fctx.size()
483
483
484 @reraise_safe_exceptions
484 @reraise_safe_exceptions
485 def get_all_commit_ids(self, wire, name):
485 def get_all_commit_ids(self, wire, name):
486 repo = self._factory.repo(wire)
486 repo = self._factory.repo(wire)
487 revs = repo.filtered(name).changelog.index
487 revs = repo.filtered(name).changelog.index
488 return map(lambda x: hex(x[7]), revs)[:-1]
488 return map(lambda x: hex(x[7]), revs)[:-1]
489
489
490 @reraise_safe_exceptions
490 @reraise_safe_exceptions
491 def get_config_value(self, wire, section, name, untrusted=False):
491 def get_config_value(self, wire, section, name, untrusted=False):
492 repo = self._factory.repo(wire)
492 repo = self._factory.repo(wire)
493 return repo.ui.config(section, name, untrusted=untrusted)
493 return repo.ui.config(section, name, untrusted=untrusted)
494
494
495 @reraise_safe_exceptions
495 @reraise_safe_exceptions
496 def get_config_bool(self, wire, section, name, untrusted=False):
496 def get_config_bool(self, wire, section, name, untrusted=False):
497 repo = self._factory.repo(wire)
497 repo = self._factory.repo(wire)
498 return repo.ui.configbool(section, name, untrusted=untrusted)
498 return repo.ui.configbool(section, name, untrusted=untrusted)
499
499
500 @reraise_safe_exceptions
500 @reraise_safe_exceptions
501 def get_config_list(self, wire, section, name, untrusted=False):
501 def get_config_list(self, wire, section, name, untrusted=False):
502 repo = self._factory.repo(wire)
502 repo = self._factory.repo(wire)
503 return repo.ui.configlist(section, name, untrusted=untrusted)
503 return repo.ui.configlist(section, name, untrusted=untrusted)
504
504
505 @reraise_safe_exceptions
505 @reraise_safe_exceptions
506 def is_large_file(self, wire, path):
506 def is_large_file(self, wire, path):
507 return largefiles.lfutil.isstandin(path)
507 return largefiles.lfutil.isstandin(path)
508
508
509 @reraise_safe_exceptions
509 @reraise_safe_exceptions
510 def in_largefiles_store(self, wire, sha):
510 def in_largefiles_store(self, wire, sha):
511 repo = self._factory.repo(wire)
511 repo = self._factory.repo(wire)
512 return largefiles.lfutil.instore(repo, sha)
512 return largefiles.lfutil.instore(repo, sha)
513
513
514 @reraise_safe_exceptions
514 @reraise_safe_exceptions
515 def in_user_cache(self, wire, sha):
515 def in_user_cache(self, wire, sha):
516 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
517 return largefiles.lfutil.inusercache(repo.ui, sha)
517 return largefiles.lfutil.inusercache(repo.ui, sha)
518
518
519 @reraise_safe_exceptions
519 @reraise_safe_exceptions
520 def store_path(self, wire, sha):
520 def store_path(self, wire, sha):
521 repo = self._factory.repo(wire)
521 repo = self._factory.repo(wire)
522 return largefiles.lfutil.storepath(repo, sha)
522 return largefiles.lfutil.storepath(repo, sha)
523
523
524 @reraise_safe_exceptions
524 @reraise_safe_exceptions
525 def link(self, wire, sha, path):
525 def link(self, wire, sha, path):
526 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
527 largefiles.lfutil.link(
527 largefiles.lfutil.link(
528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
529
529
530 @reraise_safe_exceptions
530 @reraise_safe_exceptions
531 def localrepository(self, wire, create=False):
531 def localrepository(self, wire, create=False):
532 self._factory.repo(wire, create=create)
532 self._factory.repo(wire, create=create)
533
533
534 @reraise_safe_exceptions
534 @reraise_safe_exceptions
535 def lookup(self, wire, revision, both):
535 def lookup(self, wire, revision, both):
536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
537 if isinstance(revision, float):
537 if isinstance(revision, float):
538 revision = long(revision)
538 revision = long(revision)
539 repo = self._factory.repo(wire)
539 repo = self._factory.repo(wire)
540 try:
540 try:
541 ctx = repo[revision]
541 ctx = repo[revision]
542 except RepoLookupError:
542 except RepoLookupError:
543 raise exceptions.LookupException(revision)
543 raise exceptions.LookupException(revision)
544 except LookupError as e:
544 except LookupError as e:
545 raise exceptions.LookupException(e.name)
545 raise exceptions.LookupException(e.name)
546
546
547 if not both:
547 if not both:
548 return ctx.hex()
548 return ctx.hex()
549
549
550 ctx = repo[ctx.hex()]
550 ctx = repo[ctx.hex()]
551 return ctx.hex(), ctx.rev()
551 return ctx.hex(), ctx.rev()
552
552
553 @reraise_safe_exceptions
553 @reraise_safe_exceptions
554 def pull(self, wire, url, commit_ids=None):
554 def pull(self, wire, url, commit_ids=None):
555 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
556 remote = peer(repo, {}, url)
556 remote = peer(repo, {}, url)
557 if commit_ids:
557 if commit_ids:
558 commit_ids = [bin(commit_id) for commit_id in commit_ids]
558 commit_ids = [bin(commit_id) for commit_id in commit_ids]
559
559
560 return exchange.pull(
560 return exchange.pull(
561 repo, remote, heads=commit_ids, force=None).cgresult
561 repo, remote, heads=commit_ids, force=None).cgresult
562
562
563 @reraise_safe_exceptions
563 @reraise_safe_exceptions
564 def sync_push(self, wire, url):
565 if self.check_url(url, wire['config']):
566 repo = self._factory.repo(wire)
567 bookmarks = dict(repo._bookmarks).keys()
568 remote = peer(repo, {}, url)
569 return exchange.push(
570 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
571
572 @reraise_safe_exceptions
564 def revision(self, wire, rev):
573 def revision(self, wire, rev):
565 repo = self._factory.repo(wire)
574 repo = self._factory.repo(wire)
566 ctx = repo[rev]
575 ctx = repo[rev]
567 return ctx.rev()
576 return ctx.rev()
568
577
569 @reraise_safe_exceptions
578 @reraise_safe_exceptions
570 def rev_range(self, wire, filter):
579 def rev_range(self, wire, filter):
571 repo = self._factory.repo(wire)
580 repo = self._factory.repo(wire)
572 revisions = [rev for rev in revrange(repo, filter)]
581 revisions = [rev for rev in revrange(repo, filter)]
573 return revisions
582 return revisions
574
583
575 @reraise_safe_exceptions
584 @reraise_safe_exceptions
576 def rev_range_hash(self, wire, node):
585 def rev_range_hash(self, wire, node):
577 repo = self._factory.repo(wire)
586 repo = self._factory.repo(wire)
578
587
579 def get_revs(repo, rev_opt):
588 def get_revs(repo, rev_opt):
580 if rev_opt:
589 if rev_opt:
581 revs = revrange(repo, rev_opt)
590 revs = revrange(repo, rev_opt)
582 if len(revs) == 0:
591 if len(revs) == 0:
583 return (nullrev, nullrev)
592 return (nullrev, nullrev)
584 return max(revs), min(revs)
593 return max(revs), min(revs)
585 else:
594 else:
586 return len(repo) - 1, 0
595 return len(repo) - 1, 0
587
596
588 stop, start = get_revs(repo, [node + ':'])
597 stop, start = get_revs(repo, [node + ':'])
589 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
598 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
590 return revs
599 return revs
591
600
592 @reraise_safe_exceptions
601 @reraise_safe_exceptions
593 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
602 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
594 other_path = kwargs.pop('other_path', None)
603 other_path = kwargs.pop('other_path', None)
595
604
596 # case when we want to compare two independent repositories
605 # case when we want to compare two independent repositories
597 if other_path and other_path != wire["path"]:
606 if other_path and other_path != wire["path"]:
598 baseui = self._factory._create_config(wire["config"])
607 baseui = self._factory._create_config(wire["config"])
599 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
608 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
600 else:
609 else:
601 repo = self._factory.repo(wire)
610 repo = self._factory.repo(wire)
602 return list(repo.revs(rev_spec, *args))
611 return list(repo.revs(rev_spec, *args))
603
612
604 @reraise_safe_exceptions
613 @reraise_safe_exceptions
605 def strip(self, wire, revision, update, backup):
614 def strip(self, wire, revision, update, backup):
606 repo = self._factory.repo(wire)
615 repo = self._factory.repo(wire)
607 ctx = repo[revision]
616 ctx = repo[revision]
608 hgext_strip(
617 hgext_strip(
609 repo.baseui, repo, ctx.node(), update=update, backup=backup)
618 repo.baseui, repo, ctx.node(), update=update, backup=backup)
610
619
611 @reraise_safe_exceptions
620 @reraise_safe_exceptions
612 def verify(self, wire,):
621 def verify(self, wire,):
613 repo = self._factory.repo(wire)
622 repo = self._factory.repo(wire)
614 baseui = self._factory._create_config(wire['config'])
623 baseui = self._factory._create_config(wire['config'])
615 baseui.setconfig('ui', 'quiet', 'false')
624 baseui.setconfig('ui', 'quiet', 'false')
616 output = io.BytesIO()
625 output = io.BytesIO()
617
626
618 def write(data, **unused_kwargs):
627 def write(data, **unused_kwargs):
619 output.write(data)
628 output.write(data)
620 baseui.write = write
629 baseui.write = write
621
630
622 repo.ui = baseui
631 repo.ui = baseui
623 verify.verify(repo)
632 verify.verify(repo)
624 return output.getvalue()
633 return output.getvalue()
625
634
626 @reraise_safe_exceptions
635 @reraise_safe_exceptions
627 def tag(self, wire, name, revision, message, local, user,
636 def tag(self, wire, name, revision, message, local, user,
628 tag_time, tag_timezone):
637 tag_time, tag_timezone):
629 repo = self._factory.repo(wire)
638 repo = self._factory.repo(wire)
630 ctx = repo[revision]
639 ctx = repo[revision]
631 node = ctx.node()
640 node = ctx.node()
632
641
633 date = (tag_time, tag_timezone)
642 date = (tag_time, tag_timezone)
634 try:
643 try:
635 hg_tag.tag(repo, name, node, message, local, user, date)
644 hg_tag.tag(repo, name, node, message, local, user, date)
636 except Abort as e:
645 except Abort as e:
637 log.exception("Tag operation aborted")
646 log.exception("Tag operation aborted")
638 # Exception can contain unicode which we convert
647 # Exception can contain unicode which we convert
639 raise exceptions.AbortException(repr(e))
648 raise exceptions.AbortException(repr(e))
640
649
641 @reraise_safe_exceptions
650 @reraise_safe_exceptions
642 def tags(self, wire):
651 def tags(self, wire):
643 repo = self._factory.repo(wire)
652 repo = self._factory.repo(wire)
644 return repo.tags()
653 return repo.tags()
645
654
646 @reraise_safe_exceptions
655 @reraise_safe_exceptions
647 def update(self, wire, node=None, clean=False):
656 def update(self, wire, node=None, clean=False):
648 repo = self._factory.repo(wire)
657 repo = self._factory.repo(wire)
649 baseui = self._factory._create_config(wire['config'])
658 baseui = self._factory._create_config(wire['config'])
650 commands.update(baseui, repo, node=node, clean=clean)
659 commands.update(baseui, repo, node=node, clean=clean)
651
660
652 @reraise_safe_exceptions
661 @reraise_safe_exceptions
653 def identify(self, wire):
662 def identify(self, wire):
654 repo = self._factory.repo(wire)
663 repo = self._factory.repo(wire)
655 baseui = self._factory._create_config(wire['config'])
664 baseui = self._factory._create_config(wire['config'])
656 output = io.BytesIO()
665 output = io.BytesIO()
657 baseui.write = output.write
666 baseui.write = output.write
658 # This is required to get a full node id
667 # This is required to get a full node id
659 baseui.debugflag = True
668 baseui.debugflag = True
660 commands.identify(baseui, repo, id=True)
669 commands.identify(baseui, repo, id=True)
661
670
662 return output.getvalue()
671 return output.getvalue()
663
672
664 @reraise_safe_exceptions
673 @reraise_safe_exceptions
665 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
674 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
666 hooks=True):
675 hooks=True):
667 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
668 baseui = self._factory._create_config(wire['config'], hooks=hooks)
677 baseui = self._factory._create_config(wire['config'], hooks=hooks)
669
678
670 # Mercurial internally has a lot of logic that checks ONLY if
679 # Mercurial internally has a lot of logic that checks ONLY if
671 # option is defined, we just pass those if they are defined then
680 # option is defined, we just pass those if they are defined then
672 opts = {}
681 opts = {}
673 if bookmark:
682 if bookmark:
674 opts['bookmark'] = bookmark
683 opts['bookmark'] = bookmark
675 if branch:
684 if branch:
676 opts['branch'] = branch
685 opts['branch'] = branch
677 if revision:
686 if revision:
678 opts['rev'] = revision
687 opts['rev'] = revision
679
688
680 commands.pull(baseui, repo, source, **opts)
689 commands.pull(baseui, repo, source, **opts)
681
690
682 @reraise_safe_exceptions
691 @reraise_safe_exceptions
683 def heads(self, wire, branch=None):
692 def heads(self, wire, branch=None):
684 repo = self._factory.repo(wire)
693 repo = self._factory.repo(wire)
685 baseui = self._factory._create_config(wire['config'])
694 baseui = self._factory._create_config(wire['config'])
686 output = io.BytesIO()
695 output = io.BytesIO()
687
696
688 def write(data, **unused_kwargs):
697 def write(data, **unused_kwargs):
689 output.write(data)
698 output.write(data)
690
699
691 baseui.write = write
700 baseui.write = write
692 if branch:
701 if branch:
693 args = [branch]
702 args = [branch]
694 else:
703 else:
695 args = []
704 args = []
696 commands.heads(baseui, repo, template='{node} ', *args)
705 commands.heads(baseui, repo, template='{node} ', *args)
697
706
698 return output.getvalue()
707 return output.getvalue()
699
708
700 @reraise_safe_exceptions
709 @reraise_safe_exceptions
701 def ancestor(self, wire, revision1, revision2):
710 def ancestor(self, wire, revision1, revision2):
702 repo = self._factory.repo(wire)
711 repo = self._factory.repo(wire)
703 changelog = repo.changelog
712 changelog = repo.changelog
704 lookup = repo.lookup
713 lookup = repo.lookup
705 a = changelog.ancestor(lookup(revision1), lookup(revision2))
714 a = changelog.ancestor(lookup(revision1), lookup(revision2))
706 return hex(a)
715 return hex(a)
707
716
708 @reraise_safe_exceptions
717 @reraise_safe_exceptions
709 def push(self, wire, revisions, dest_path, hooks=True,
718 def push(self, wire, revisions, dest_path, hooks=True,
710 push_branches=False):
719 push_branches=False):
711 repo = self._factory.repo(wire)
720 repo = self._factory.repo(wire)
712 baseui = self._factory._create_config(wire['config'], hooks=hooks)
721 baseui = self._factory._create_config(wire['config'], hooks=hooks)
713 commands.push(baseui, repo, dest=dest_path, rev=revisions,
722 commands.push(baseui, repo, dest=dest_path, rev=revisions,
714 new_branch=push_branches)
723 new_branch=push_branches)
715
724
716 @reraise_safe_exceptions
725 @reraise_safe_exceptions
717 def merge(self, wire, revision):
726 def merge(self, wire, revision):
718 repo = self._factory.repo(wire)
727 repo = self._factory.repo(wire)
719 baseui = self._factory._create_config(wire['config'])
728 baseui = self._factory._create_config(wire['config'])
720 repo.ui.setconfig('ui', 'merge', 'internal:dump')
729 repo.ui.setconfig('ui', 'merge', 'internal:dump')
721
730
722 # In case of sub repositories are used mercurial prompts the user in
731 # In case of sub repositories are used mercurial prompts the user in
723 # case of merge conflicts or different sub repository sources. By
732 # case of merge conflicts or different sub repository sources. By
724 # setting the interactive flag to `False` mercurial doesn't prompt the
733 # setting the interactive flag to `False` mercurial doesn't prompt the
725 # used but instead uses a default value.
734 # used but instead uses a default value.
726 repo.ui.setconfig('ui', 'interactive', False)
735 repo.ui.setconfig('ui', 'interactive', False)
727
736
728 commands.merge(baseui, repo, rev=revision)
737 commands.merge(baseui, repo, rev=revision)
729
738
730 @reraise_safe_exceptions
739 @reraise_safe_exceptions
731 def commit(self, wire, message, username, close_branch=False):
740 def commit(self, wire, message, username, close_branch=False):
732 repo = self._factory.repo(wire)
741 repo = self._factory.repo(wire)
733 baseui = self._factory._create_config(wire['config'])
742 baseui = self._factory._create_config(wire['config'])
734 repo.ui.setconfig('ui', 'username', username)
743 repo.ui.setconfig('ui', 'username', username)
735 commands.commit(baseui, repo, message=message, close_branch=close_branch)
744 commands.commit(baseui, repo, message=message, close_branch=close_branch)
736
745
737 @reraise_safe_exceptions
746 @reraise_safe_exceptions
738 def rebase(self, wire, source=None, dest=None, abort=False):
747 def rebase(self, wire, source=None, dest=None, abort=False):
739 repo = self._factory.repo(wire)
748 repo = self._factory.repo(wire)
740 baseui = self._factory._create_config(wire['config'])
749 baseui = self._factory._create_config(wire['config'])
741 repo.ui.setconfig('ui', 'merge', 'internal:dump')
750 repo.ui.setconfig('ui', 'merge', 'internal:dump')
742 rebase.rebase(
751 rebase.rebase(
743 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
752 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
744
753
745 @reraise_safe_exceptions
754 @reraise_safe_exceptions
746 def bookmark(self, wire, bookmark, revision=None):
755 def bookmark(self, wire, bookmark, revision=None):
747 repo = self._factory.repo(wire)
756 repo = self._factory.repo(wire)
748 baseui = self._factory._create_config(wire['config'])
757 baseui = self._factory._create_config(wire['config'])
749 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
758 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,63 +1,63 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 import mercurial.demandimport
23 from mercurial import demandimport
24 # patch demandimport, due to bug in mercurial when it always triggers
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
25 # demandimport.enable()
26 mercurial.demandimport.enable = lambda *args, **kwargs: 1
26 demandimport.enable = lambda *args, **kwargs: 1
27
27
28 from mercurial import ui
28 from mercurial import ui
29 from mercurial import patch
29 from mercurial import patch
30 from mercurial import config
30 from mercurial import config
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial import scmutil
32 from mercurial import scmutil
33 from mercurial import archival
33 from mercurial import archival
34 from mercurial import discovery
34 from mercurial import discovery
35 from mercurial import unionrepo
35 from mercurial import unionrepo
36 from mercurial import localrepo
36 from mercurial import localrepo
37 from mercurial import merge as hg_merge
37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38 from mercurial import subrepo
39 from mercurial import tags as hg_tag
39 from mercurial import tags as hg_tag
40
40
41 from mercurial.commands import clone, nullid, pull
41 from mercurial.commands import clone, nullid, pull
42 from mercurial.context import memctx, memfilectx
42 from mercurial.context import memctx, memfilectx
43 from mercurial.error import (
43 from mercurial.error import (
44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
45 RequirementError)
45 RequirementError)
46 from mercurial.hgweb import hgweb_mod
46 from mercurial.hgweb import hgweb_mod
47 from mercurial.localrepo import localrepository
47 from mercurial.localrepo import localrepository
48 from mercurial.match import match
48 from mercurial.match import match
49 from mercurial.mdiff import diffopts
49 from mercurial.mdiff import diffopts
50 from mercurial.node import bin, hex
50 from mercurial.node import bin, hex
51 from mercurial.encoding import tolocal
51 from mercurial.encoding import tolocal
52 from mercurial.discovery import findcommonoutgoing
52 from mercurial.discovery import findcommonoutgoing
53 from mercurial.hg import peer
53 from mercurial.hg import peer
54 from mercurial.httppeer import httppeer
54 from mercurial.httppeer import httppeer
55 from mercurial.util import url as hg_url
55 from mercurial.util import url as hg_url
56 from mercurial.scmutil import revrange
56 from mercurial.scmutil import revrange
57 from mercurial.node import nullrev
57 from mercurial.node import nullrev
58 from mercurial import exchange
58 from mercurial import exchange
59 from hgext import largefiles
59 from hgext import largefiles
60
60
61 # those authnadlers are patched for python 2.6.5 bug an
61 # those authnadlers are patched for python 2.6.5 bug an
62 # infinit looping when given invalid resources
62 # infinit looping when given invalid resources
63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
@@ -1,134 +1,134 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Adjustments to Mercurial
19 Adjustments to Mercurial
20
20
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 be applied without having to import the whole Mercurial machinery.
22 be applied without having to import the whole Mercurial machinery.
23
23
24 Imports are function local, so that just importing this module does not cause
24 Imports are function local, so that just importing this module does not cause
25 side-effects other than these functions being defined.
25 side-effects other than these functions being defined.
26 """
26 """
27
27
28 import logging
28 import logging
29
29
30
30
31 def patch_largefiles_capabilities():
31 def patch_largefiles_capabilities():
32 """
32 """
33 Patches the capabilities function in the largefiles extension.
33 Patches the capabilities function in the largefiles extension.
34 """
34 """
35 from vcsserver import hgcompat
35 from vcsserver import hgcompat
36 lfproto = hgcompat.largefiles.proto
36 lfproto = hgcompat.largefiles.proto
37 wrapper = _dynamic_capabilities_wrapper(
37 wrapper = _dynamic_capabilities_wrapper(
38 lfproto, hgcompat.extensions.extensions)
38 lfproto, hgcompat.extensions.extensions)
39 lfproto.capabilities = wrapper
39 lfproto.capabilities = wrapper
40
40
41
41
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43
43
44 wrapped_capabilities = lfproto.capabilities
44 wrapped_capabilities = lfproto.capabilities
45 logger = logging.getLogger('vcsserver.hg')
45 logger = logging.getLogger('vcsserver.hg')
46
46
47 def _dynamic_capabilities(repo, proto):
47 def _dynamic_capabilities(repo, proto):
48 """
48 """
49 Adds dynamic behavior, so that the capability is only added if the
49 Adds dynamic behavior, so that the capability is only added if the
50 extension is enabled in the current ui object.
50 extension is enabled in the current ui object.
51 """
51 """
52 if 'largefiles' in dict(extensions(repo.ui)):
52 if 'largefiles' in dict(extensions(repo.ui)):
53 logger.debug('Extension largefiles enabled')
53 logger.debug('Extension largefiles enabled')
54 calc_capabilities = wrapped_capabilities
54 calc_capabilities = wrapped_capabilities
55 else:
55 else:
56 logger.debug('Extension largefiles disabled')
56 logger.debug('Extension largefiles disabled')
57 calc_capabilities = lfproto.capabilitiesorig
57 calc_capabilities = lfproto.capabilitiesorig
58 return calc_capabilities(repo, proto)
58 return calc_capabilities(repo, proto)
59
59
60 return _dynamic_capabilities
60 return _dynamic_capabilities
61
61
62
62
63 def patch_subrepo_type_mapping():
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
64 from collections import defaultdict
65 from hgcompat import subrepo
65 from hgcompat import subrepo
66 from exceptions import SubrepoMergeException
66 from exceptions import SubrepoMergeException
67
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
68 class NoOpSubrepo(subrepo.abstractsubrepo):
69
69
70 def __init__(self, ctx, path, *args, **kwargs):
70 def __init__(self, ctx, path, *args, **kwargs):
71 """Initialize abstractsubrepo part
71 """Initialize abstractsubrepo part
72
72
73 ``ctx`` is the context referring this subrepository in the
73 ``ctx`` is the context referring this subrepository in the
74 parent repository.
74 parent repository.
75
75
76 ``path`` is the path to this subrepository as seen from
76 ``path`` is the path to this subrepository as seen from
77 innermost repository.
77 innermost repository.
78 """
78 """
79 self.ui = ctx.repo().ui
79 self.ui = ctx.repo().ui
80 self._ctx = ctx
80 self._ctx = ctx
81 self._path = path
81 self._path = path
82
82
83 def storeclean(self, path):
83 def storeclean(self, path):
84 """
84 """
85 returns true if the repository has not changed since it was last
85 returns true if the repository has not changed since it was last
86 cloned from or pushed to a given repository.
86 cloned from or pushed to a given repository.
87 """
87 """
88 return True
88 return True
89
89
90 def dirty(self, ignoreupdate=False):
90 def dirty(self, ignoreupdate=False):
91 """returns true if the dirstate of the subrepo is dirty or does not
91 """returns true if the dirstate of the subrepo is dirty or does not
92 match current stored state. If ignoreupdate is true, only check
92 match current stored state. If ignoreupdate is true, only check
93 whether the subrepo has uncommitted changes in its dirstate.
93 whether the subrepo has uncommitted changes in its dirstate.
94 """
94 """
95 return False
95 return False
96
96
97 def basestate(self):
97 def basestate(self):
98 """current working directory base state, disregarding .hgsubstate
98 """current working directory base state, disregarding .hgsubstate
99 state and working directory modifications"""
99 state and working directory modifications"""
100 substate = subrepo.state(self._ctx, self.ui)
100 substate = subrepo.state(self._ctx, self.ui)
101 file_system_path, rev, repotype = substate.get(self._path)
101 file_system_path, rev, repotype = substate.get(self._path)
102 return rev
102 return rev
103
103
104 def remove(self):
104 def remove(self):
105 """remove the subrepo
105 """remove the subrepo
106
106
107 (should verify the dirstate is not dirty first)
107 (should verify the dirstate is not dirty first)
108 """
108 """
109 pass
109 pass
110
110
111 def get(self, state, overwrite=False):
111 def get(self, state, overwrite=False):
112 """run whatever commands are needed to put the subrepo into
112 """run whatever commands are needed to put the subrepo into
113 this state
113 this state
114 """
114 """
115 pass
115 pass
116
116
117 def merge(self, state):
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()
119 raise SubrepoMergeException()
120
120
121 def push(self, opts):
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
122 """perform whatever action is analogous to 'hg push'
123
123
124 This may be a no-op on some systems.
124 This may be a no-op on some systems.
125 """
125 """
126 pass
126 pass
127
127
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 # whenever a subrepo class is looked up.
129 # whenever a subrepo class is looked up.
130 subrepo.types = {
130 subrepo.types = {
131 'hg': NoOpSubrepo,
131 'hg': NoOpSubrepo,
132 'git': NoOpSubrepo,
132 'git': NoOpSubrepo,
133 'svn': NoOpSubrepo
133 'svn': NoOpSubrepo
134 }
134 }
@@ -1,482 +1,482 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2017 RodeCode GmbH
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import json
23 import json
24 import logging
24 import logging
25 import collections
25 import collections
26 import importlib
26 import importlib
27 import subprocess
27 import subprocess
28
28
29 from httplib import HTTPConnection
29 from httplib import HTTPConnection
30
30
31
31
32 import mercurial.scmutil
32 import mercurial.scmutil
33 import mercurial.node
33 import mercurial.node
34 import simplejson as json
34 import simplejson as json
35
35
36 from vcsserver import exceptions
36 from vcsserver import exceptions
37
37
38 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
39
39
40
40
41 class HooksHttpClient(object):
41 class HooksHttpClient(object):
42 connection = None
42 connection = None
43
43
44 def __init__(self, hooks_uri):
44 def __init__(self, hooks_uri):
45 self.hooks_uri = hooks_uri
45 self.hooks_uri = hooks_uri
46
46
47 def __call__(self, method, extras):
47 def __call__(self, method, extras):
48 connection = HTTPConnection(self.hooks_uri)
48 connection = HTTPConnection(self.hooks_uri)
49 body = self._serialize(method, extras)
49 body = self._serialize(method, extras)
50 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 response = connection.getresponse()
51 response = connection.getresponse()
52 return json.loads(response.read())
52 return json.loads(response.read())
53
53
54 def _serialize(self, hook_name, extras):
54 def _serialize(self, hook_name, extras):
55 data = {
55 data = {
56 'method': hook_name,
56 'method': hook_name,
57 'extras': extras
57 'extras': extras
58 }
58 }
59 return json.dumps(data)
59 return json.dumps(data)
60
60
61
61
62 class HooksDummyClient(object):
62 class HooksDummyClient(object):
63 def __init__(self, hooks_module):
63 def __init__(self, hooks_module):
64 self._hooks_module = importlib.import_module(hooks_module)
64 self._hooks_module = importlib.import_module(hooks_module)
65
65
66 def __call__(self, hook_name, extras):
66 def __call__(self, hook_name, extras):
67 with self._hooks_module.Hooks() as hooks:
67 with self._hooks_module.Hooks() as hooks:
68 return getattr(hooks, hook_name)(extras)
68 return getattr(hooks, hook_name)(extras)
69
69
70
70
71 class RemoteMessageWriter(object):
71 class RemoteMessageWriter(object):
72 """Writer base class."""
72 """Writer base class."""
73 def write(self, message):
73 def write(self, message):
74 raise NotImplementedError()
74 raise NotImplementedError()
75
75
76
76
77 class HgMessageWriter(RemoteMessageWriter):
77 class HgMessageWriter(RemoteMessageWriter):
78 """Writer that knows how to send messages to mercurial clients."""
78 """Writer that knows how to send messages to mercurial clients."""
79
79
80 def __init__(self, ui):
80 def __init__(self, ui):
81 self.ui = ui
81 self.ui = ui
82
82
83 def write(self, message):
83 def write(self, message):
84 # TODO: Check why the quiet flag is set by default.
84 # TODO: Check why the quiet flag is set by default.
85 old = self.ui.quiet
85 old = self.ui.quiet
86 self.ui.quiet = False
86 self.ui.quiet = False
87 self.ui.status(message.encode('utf-8'))
87 self.ui.status(message.encode('utf-8'))
88 self.ui.quiet = old
88 self.ui.quiet = old
89
89
90
90
91 class GitMessageWriter(RemoteMessageWriter):
91 class GitMessageWriter(RemoteMessageWriter):
92 """Writer that knows how to send messages to git clients."""
92 """Writer that knows how to send messages to git clients."""
93
93
94 def __init__(self, stdout=None):
94 def __init__(self, stdout=None):
95 self.stdout = stdout or sys.stdout
95 self.stdout = stdout or sys.stdout
96
96
97 def write(self, message):
97 def write(self, message):
98 self.stdout.write(message.encode('utf-8'))
98 self.stdout.write(message.encode('utf-8'))
99
99
100
100
101 def _handle_exception(result):
101 def _handle_exception(result):
102 exception_class = result.get('exception')
102 exception_class = result.get('exception')
103 exception_traceback = result.get('exception_traceback')
103 exception_traceback = result.get('exception_traceback')
104
104
105 if exception_traceback:
105 if exception_traceback:
106 log.error('Got traceback from remote call:%s', exception_traceback)
106 log.error('Got traceback from remote call:%s', exception_traceback)
107
107
108 if exception_class == 'HTTPLockedRC':
108 if exception_class == 'HTTPLockedRC':
109 raise exceptions.RepositoryLockedException(*result['exception_args'])
109 raise exceptions.RepositoryLockedException(*result['exception_args'])
110 elif exception_class == 'RepositoryError':
110 elif exception_class == 'RepositoryError':
111 raise exceptions.VcsException(*result['exception_args'])
111 raise exceptions.VcsException(*result['exception_args'])
112 elif exception_class:
112 elif exception_class:
113 raise Exception('Got remote exception "%s" with args "%s"' %
113 raise Exception('Got remote exception "%s" with args "%s"' %
114 (exception_class, result['exception_args']))
114 (exception_class, result['exception_args']))
115
115
116
116
117 def _get_hooks_client(extras):
117 def _get_hooks_client(extras):
118 if 'hooks_uri' in extras:
118 if 'hooks_uri' in extras:
119 protocol = extras.get('hooks_protocol')
119 protocol = extras.get('hooks_protocol')
120 return HooksHttpClient(extras['hooks_uri'])
120 return HooksHttpClient(extras['hooks_uri'])
121 else:
121 else:
122 return HooksDummyClient(extras['hooks_module'])
122 return HooksDummyClient(extras['hooks_module'])
123
123
124
124
125 def _call_hook(hook_name, extras, writer):
125 def _call_hook(hook_name, extras, writer):
126 hooks = _get_hooks_client(extras)
126 hooks = _get_hooks_client(extras)
127 result = hooks(hook_name, extras)
127 result = hooks(hook_name, extras)
128 log.debug('Hooks got result: %s', result)
128 log.debug('Hooks got result: %s', result)
129 writer.write(result['output'])
129 writer.write(result['output'])
130 _handle_exception(result)
130 _handle_exception(result)
131
131
132 return result['status']
132 return result['status']
133
133
134
134
135 def _extras_from_ui(ui):
135 def _extras_from_ui(ui):
136 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
136 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
137 if not hook_data:
137 if not hook_data:
138 # maybe it's inside environ ?
138 # maybe it's inside environ ?
139 env_hook_data = os.environ.get('RC_SCM_DATA')
139 env_hook_data = os.environ.get('RC_SCM_DATA')
140 if env_hook_data:
140 if env_hook_data:
141 hook_data = env_hook_data
141 hook_data = env_hook_data
142
142
143 extras = {}
143 extras = {}
144 if hook_data:
144 if hook_data:
145 extras = json.loads(hook_data)
145 extras = json.loads(hook_data)
146 return extras
146 return extras
147
147
148
148
149 def _rev_range_hash(repo, node):
149 def _rev_range_hash(repo, node):
150
150
151 commits = []
151 commits = []
152 for rev in xrange(repo[node], len(repo)):
152 for rev in xrange(repo[node], len(repo)):
153 ctx = repo[rev]
153 ctx = repo[rev]
154 commit_id = mercurial.node.hex(ctx.node())
154 commit_id = mercurial.node.hex(ctx.node())
155 branch = ctx.branch()
155 branch = ctx.branch()
156 commits.append((commit_id, branch))
156 commits.append((commit_id, branch))
157
157
158 return commits
158 return commits
159
159
160
160
161 def repo_size(ui, repo, **kwargs):
161 def repo_size(ui, repo, **kwargs):
162 extras = _extras_from_ui(ui)
162 extras = _extras_from_ui(ui)
163 return _call_hook('repo_size', extras, HgMessageWriter(ui))
163 return _call_hook('repo_size', extras, HgMessageWriter(ui))
164
164
165
165
166 def pre_pull(ui, repo, **kwargs):
166 def pre_pull(ui, repo, **kwargs):
167 extras = _extras_from_ui(ui)
167 extras = _extras_from_ui(ui)
168 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
168 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
169
169
170
170
171 def pre_pull_ssh(ui, repo, **kwargs):
171 def pre_pull_ssh(ui, repo, **kwargs):
172 extras = _extras_from_ui(ui)
172 extras = _extras_from_ui(ui)
173 if extras and extras.get('SSH'):
173 if extras and extras.get('SSH'):
174 return pre_pull(ui, repo, **kwargs)
174 return pre_pull(ui, repo, **kwargs)
175 return 0
175 return 0
176
176
177
177
178 def post_pull(ui, repo, **kwargs):
178 def post_pull(ui, repo, **kwargs):
179 extras = _extras_from_ui(ui)
179 extras = _extras_from_ui(ui)
180 return _call_hook('post_pull', extras, HgMessageWriter(ui))
180 return _call_hook('post_pull', extras, HgMessageWriter(ui))
181
181
182
182
183 def post_pull_ssh(ui, repo, **kwargs):
183 def post_pull_ssh(ui, repo, **kwargs):
184 extras = _extras_from_ui(ui)
184 extras = _extras_from_ui(ui)
185 if extras and extras.get('SSH'):
185 if extras and extras.get('SSH'):
186 return post_pull(ui, repo, **kwargs)
186 return post_pull(ui, repo, **kwargs)
187 return 0
187 return 0
188
188
189
189
190 def pre_push(ui, repo, node=None, **kwargs):
190 def pre_push(ui, repo, node=None, **kwargs):
191 extras = _extras_from_ui(ui)
191 extras = _extras_from_ui(ui)
192
192
193 rev_data = []
193 rev_data = []
194 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
194 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
195 branches = collections.defaultdict(list)
195 branches = collections.defaultdict(list)
196 for commit_id, branch in _rev_range_hash(repo, node):
196 for commit_id, branch in _rev_range_hash(repo, node):
197 branches[branch].append(commit_id)
197 branches[branch].append(commit_id)
198
198
199 for branch, commits in branches.iteritems():
199 for branch, commits in branches.iteritems():
200 old_rev = kwargs.get('node_last') or commits[0]
200 old_rev = kwargs.get('node_last') or commits[0]
201 rev_data.append({
201 rev_data.append({
202 'old_rev': old_rev,
202 'old_rev': old_rev,
203 'new_rev': commits[-1],
203 'new_rev': commits[-1],
204 'ref': '',
204 'ref': '',
205 'type': 'branch',
205 'type': 'branch',
206 'name': branch,
206 'name': branch,
207 })
207 })
208
208
209 extras['commit_ids'] = rev_data
209 extras['commit_ids'] = rev_data
210 return _call_hook('pre_push', extras, HgMessageWriter(ui))
210 return _call_hook('pre_push', extras, HgMessageWriter(ui))
211
211
212
212
213 def pre_push_ssh(ui, repo, node=None, **kwargs):
213 def pre_push_ssh(ui, repo, node=None, **kwargs):
214 if _extras_from_ui(ui).get('SSH'):
214 if _extras_from_ui(ui).get('SSH'):
215 return pre_push(ui, repo, node, **kwargs)
215 return pre_push(ui, repo, node, **kwargs)
216
216
217 return 0
217 return 0
218
218
219
219
220 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
220 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
221 extras = _extras_from_ui(ui)
221 extras = _extras_from_ui(ui)
222 if extras.get('SSH'):
222 if extras.get('SSH'):
223 permission = extras['SSH_PERMISSIONS']
223 permission = extras['SSH_PERMISSIONS']
224
224
225 if 'repository.write' == permission or 'repository.admin' == permission:
225 if 'repository.write' == permission or 'repository.admin' == permission:
226 return 0
226 return 0
227
227
228 # non-zero ret code
228 # non-zero ret code
229 return 1
229 return 1
230
230
231 return 0
231 return 0
232
232
233
233
234 def post_push(ui, repo, node, **kwargs):
234 def post_push(ui, repo, node, **kwargs):
235 extras = _extras_from_ui(ui)
235 extras = _extras_from_ui(ui)
236
236
237 commit_ids = []
237 commit_ids = []
238 branches = []
238 branches = []
239 bookmarks = []
239 bookmarks = []
240 tags = []
240 tags = []
241
241
242 for commit_id, branch in _rev_range_hash(repo, node):
242 for commit_id, branch in _rev_range_hash(repo, node):
243 commit_ids.append(commit_id)
243 commit_ids.append(commit_id)
244 if branch not in branches:
244 if branch not in branches:
245 branches.append(branch)
245 branches.append(branch)
246
246
247 if hasattr(ui, '_rc_pushkey_branches'):
247 if hasattr(ui, '_rc_pushkey_branches'):
248 bookmarks = ui._rc_pushkey_branches
248 bookmarks = ui._rc_pushkey_branches
249
249
250 extras['commit_ids'] = commit_ids
250 extras['commit_ids'] = commit_ids
251 extras['new_refs'] = {
251 extras['new_refs'] = {
252 'branches': branches,
252 'branches': branches,
253 'bookmarks': bookmarks,
253 'bookmarks': bookmarks,
254 'tags': tags
254 'tags': tags
255 }
255 }
256
256
257 return _call_hook('post_push', extras, HgMessageWriter(ui))
257 return _call_hook('post_push', extras, HgMessageWriter(ui))
258
258
259
259
260 def post_push_ssh(ui, repo, node, **kwargs):
260 def post_push_ssh(ui, repo, node, **kwargs):
261 if _extras_from_ui(ui).get('SSH'):
261 if _extras_from_ui(ui).get('SSH'):
262 return post_push(ui, repo, node, **kwargs)
262 return post_push(ui, repo, node, **kwargs)
263 return 0
263 return 0
264
264
265
265
266 def key_push(ui, repo, **kwargs):
266 def key_push(ui, repo, **kwargs):
267 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
267 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
268 # store new bookmarks in our UI object propagated later to post_push
268 # store new bookmarks in our UI object propagated later to post_push
269 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
269 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
270 return
270 return
271
271
272
272
273 # backward compat
273 # backward compat
274 log_pull_action = post_pull
274 log_pull_action = post_pull
275
275
276 # backward compat
276 # backward compat
277 log_push_action = post_push
277 log_push_action = post_push
278
278
279
279
280 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
280 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
281 """
281 """
282 Old hook name: keep here for backward compatibility.
282 Old hook name: keep here for backward compatibility.
283
283
284 This is only required when the installed git hooks are not upgraded.
284 This is only required when the installed git hooks are not upgraded.
285 """
285 """
286 pass
286 pass
287
287
288
288
289 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
289 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
290 """
290 """
291 Old hook name: keep here for backward compatibility.
291 Old hook name: keep here for backward compatibility.
292
292
293 This is only required when the installed git hooks are not upgraded.
293 This is only required when the installed git hooks are not upgraded.
294 """
294 """
295 pass
295 pass
296
296
297
297
298 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
298 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
299
299
300
300
301 def git_pre_pull(extras):
301 def git_pre_pull(extras):
302 """
302 """
303 Pre pull hook.
303 Pre pull hook.
304
304
305 :param extras: dictionary containing the keys defined in simplevcs
305 :param extras: dictionary containing the keys defined in simplevcs
306 :type extras: dict
306 :type extras: dict
307
307
308 :return: status code of the hook. 0 for success.
308 :return: status code of the hook. 0 for success.
309 :rtype: int
309 :rtype: int
310 """
310 """
311 if 'pull' not in extras['hooks']:
311 if 'pull' not in extras['hooks']:
312 return HookResponse(0, '')
312 return HookResponse(0, '')
313
313
314 stdout = io.BytesIO()
314 stdout = io.BytesIO()
315 try:
315 try:
316 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
316 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
317 except Exception as error:
317 except Exception as error:
318 status = 128
318 status = 128
319 stdout.write('ERROR: %s\n' % str(error))
319 stdout.write('ERROR: %s\n' % str(error))
320
320
321 return HookResponse(status, stdout.getvalue())
321 return HookResponse(status, stdout.getvalue())
322
322
323
323
324 def git_post_pull(extras):
324 def git_post_pull(extras):
325 """
325 """
326 Post pull hook.
326 Post pull hook.
327
327
328 :param extras: dictionary containing the keys defined in simplevcs
328 :param extras: dictionary containing the keys defined in simplevcs
329 :type extras: dict
329 :type extras: dict
330
330
331 :return: status code of the hook. 0 for success.
331 :return: status code of the hook. 0 for success.
332 :rtype: int
332 :rtype: int
333 """
333 """
334 if 'pull' not in extras['hooks']:
334 if 'pull' not in extras['hooks']:
335 return HookResponse(0, '')
335 return HookResponse(0, '')
336
336
337 stdout = io.BytesIO()
337 stdout = io.BytesIO()
338 try:
338 try:
339 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
339 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
340 except Exception as error:
340 except Exception as error:
341 status = 128
341 status = 128
342 stdout.write('ERROR: %s\n' % error)
342 stdout.write('ERROR: %s\n' % error)
343
343
344 return HookResponse(status, stdout.getvalue())
344 return HookResponse(status, stdout.getvalue())
345
345
346
346
347 def _parse_git_ref_lines(revision_lines):
347 def _parse_git_ref_lines(revision_lines):
348 rev_data = []
348 rev_data = []
349 for revision_line in revision_lines or []:
349 for revision_line in revision_lines or []:
350 old_rev, new_rev, ref = revision_line.strip().split(' ')
350 old_rev, new_rev, ref = revision_line.strip().split(' ')
351 ref_data = ref.split('/', 2)
351 ref_data = ref.split('/', 2)
352 if ref_data[1] in ('tags', 'heads'):
352 if ref_data[1] in ('tags', 'heads'):
353 rev_data.append({
353 rev_data.append({
354 'old_rev': old_rev,
354 'old_rev': old_rev,
355 'new_rev': new_rev,
355 'new_rev': new_rev,
356 'ref': ref,
356 'ref': ref,
357 'type': ref_data[1],
357 'type': ref_data[1],
358 'name': ref_data[2],
358 'name': ref_data[2],
359 })
359 })
360 return rev_data
360 return rev_data
361
361
362
362
363 def git_pre_receive(unused_repo_path, revision_lines, env):
363 def git_pre_receive(unused_repo_path, revision_lines, env):
364 """
364 """
365 Pre push hook.
365 Pre push hook.
366
366
367 :param extras: dictionary containing the keys defined in simplevcs
367 :param extras: dictionary containing the keys defined in simplevcs
368 :type extras: dict
368 :type extras: dict
369
369
370 :return: status code of the hook. 0 for success.
370 :return: status code of the hook. 0 for success.
371 :rtype: int
371 :rtype: int
372 """
372 """
373 extras = json.loads(env['RC_SCM_DATA'])
373 extras = json.loads(env['RC_SCM_DATA'])
374 rev_data = _parse_git_ref_lines(revision_lines)
374 rev_data = _parse_git_ref_lines(revision_lines)
375 if 'push' not in extras['hooks']:
375 if 'push' not in extras['hooks']:
376 return 0
376 return 0
377 extras['commit_ids'] = rev_data
377 extras['commit_ids'] = rev_data
378 return _call_hook('pre_push', extras, GitMessageWriter())
378 return _call_hook('pre_push', extras, GitMessageWriter())
379
379
380
380
381 def _run_command(arguments):
381 def _run_command(arguments):
382 """
382 """
383 Run the specified command and return the stdout.
383 Run the specified command and return the stdout.
384
384
385 :param arguments: sequence of program arguments (including the program name)
385 :param arguments: sequence of program arguments (including the program name)
386 :type arguments: list[str]
386 :type arguments: list[str]
387 """
387 """
388 # TODO(skreft): refactor this method and all the other similar ones.
388 # TODO(skreft): refactor this method and all the other similar ones.
389 # Probably this should be using subprocessio.
389 # Probably this should be using subprocessio.
390 process = subprocess.Popen(
390 process = subprocess.Popen(
391 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
391 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
392 stdout, stderr = process.communicate()
392 stdout, stderr = process.communicate()
393
393
394 if process.returncode != 0:
394 if process.returncode != 0:
395 raise Exception(
395 raise Exception(
396 'Command %s exited with exit code %s: stderr:%s' % (
396 'Command %s exited with exit code %s: stderr:%s' % (
397 arguments, process.returncode, stderr))
397 arguments, process.returncode, stderr))
398
398
399 return stdout
399 return stdout
400
400
401
401
402 def git_post_receive(unused_repo_path, revision_lines, env):
402 def git_post_receive(unused_repo_path, revision_lines, env):
403 """
403 """
404 Post push hook.
404 Post push hook.
405
405
406 :param extras: dictionary containing the keys defined in simplevcs
406 :param extras: dictionary containing the keys defined in simplevcs
407 :type extras: dict
407 :type extras: dict
408
408
409 :return: status code of the hook. 0 for success.
409 :return: status code of the hook. 0 for success.
410 :rtype: int
410 :rtype: int
411 """
411 """
412 extras = json.loads(env['RC_SCM_DATA'])
412 extras = json.loads(env['RC_SCM_DATA'])
413 if 'push' not in extras['hooks']:
413 if 'push' not in extras['hooks']:
414 return 0
414 return 0
415
415
416 rev_data = _parse_git_ref_lines(revision_lines)
416 rev_data = _parse_git_ref_lines(revision_lines)
417
417
418 git_revs = []
418 git_revs = []
419
419
420 # N.B.(skreft): it is ok to just call git, as git before calling a
420 # N.B.(skreft): it is ok to just call git, as git before calling a
421 # subcommand sets the PATH environment variable so that it point to the
421 # subcommand sets the PATH environment variable so that it point to the
422 # correct version of the git executable.
422 # correct version of the git executable.
423 empty_commit_id = '0' * 40
423 empty_commit_id = '0' * 40
424 branches = []
424 branches = []
425 tags = []
425 tags = []
426 for push_ref in rev_data:
426 for push_ref in rev_data:
427 type_ = push_ref['type']
427 type_ = push_ref['type']
428
428
429 if type_ == 'heads':
429 if type_ == 'heads':
430 if push_ref['old_rev'] == empty_commit_id:
430 if push_ref['old_rev'] == empty_commit_id:
431 # starting new branch case
431 # starting new branch case
432 if push_ref['name'] not in branches:
432 if push_ref['name'] not in branches:
433 branches.append(push_ref['name'])
433 branches.append(push_ref['name'])
434
434
435 # Fix up head revision if needed
435 # Fix up head revision if needed
436 cmd = ['git', 'show', 'HEAD']
436 cmd = ['git', 'show', 'HEAD']
437 try:
437 try:
438 _run_command(cmd)
438 _run_command(cmd)
439 except Exception:
439 except Exception:
440 cmd = ['git', 'symbolic-ref', 'HEAD',
440 cmd = ['git', 'symbolic-ref', 'HEAD',
441 'refs/heads/%s' % push_ref['name']]
441 'refs/heads/%s' % push_ref['name']]
442 print("Setting default branch to %s" % push_ref['name'])
442 print("Setting default branch to %s" % push_ref['name'])
443 _run_command(cmd)
443 _run_command(cmd)
444
444
445 cmd = ['git', 'for-each-ref', '--format=%(refname)',
445 cmd = ['git', 'for-each-ref', '--format=%(refname)',
446 'refs/heads/*']
446 'refs/heads/*']
447 heads = _run_command(cmd)
447 heads = _run_command(cmd)
448 heads = heads.replace(push_ref['ref'], '')
448 heads = heads.replace(push_ref['ref'], '')
449 heads = ' '.join(head for head in heads.splitlines() if head)
449 heads = ' '.join(head for head in heads.splitlines() if head)
450 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
450 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
451 '--', push_ref['new_rev'], '--not', heads]
451 '--', push_ref['new_rev'], '--not', heads]
452 git_revs.extend(_run_command(cmd).splitlines())
452 git_revs.extend(_run_command(cmd).splitlines())
453 elif push_ref['new_rev'] == empty_commit_id:
453 elif push_ref['new_rev'] == empty_commit_id:
454 # delete branch case
454 # delete branch case
455 git_revs.append('delete_branch=>%s' % push_ref['name'])
455 git_revs.append('delete_branch=>%s' % push_ref['name'])
456 else:
456 else:
457 if push_ref['name'] not in branches:
457 if push_ref['name'] not in branches:
458 branches.append(push_ref['name'])
458 branches.append(push_ref['name'])
459
459
460 cmd = ['git', 'log',
460 cmd = ['git', 'log',
461 '{old_rev}..{new_rev}'.format(**push_ref),
461 '{old_rev}..{new_rev}'.format(**push_ref),
462 '--reverse', '--pretty=format:%H']
462 '--reverse', '--pretty=format:%H']
463 git_revs.extend(_run_command(cmd).splitlines())
463 git_revs.extend(_run_command(cmd).splitlines())
464 elif type_ == 'tags':
464 elif type_ == 'tags':
465 if push_ref['name'] not in tags:
465 if push_ref['name'] not in tags:
466 tags.append(push_ref['name'])
466 tags.append(push_ref['name'])
467 git_revs.append('tag=>%s' % push_ref['name'])
467 git_revs.append('tag=>%s' % push_ref['name'])
468
468
469 extras['commit_ids'] = git_revs
469 extras['commit_ids'] = git_revs
470 extras['new_refs'] = {
470 extras['new_refs'] = {
471 'branches': branches,
471 'branches': branches,
472 'bookmarks': [],
472 'bookmarks': [],
473 'tags': tags,
473 'tags': tags,
474 }
474 }
475
475
476 if 'repo_size' in extras['hooks']:
476 if 'repo_size' in extras['hooks']:
477 try:
477 try:
478 _call_hook('repo_size', extras, GitMessageWriter())
478 _call_hook('repo_size', extras, GitMessageWriter())
479 except:
479 except:
480 pass
480 pass
481
481
482 return _call_hook('post_push', extras, GitMessageWriter())
482 return _call_hook('post_push', extras, GitMessageWriter())
@@ -1,476 +1,478 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import base64
18 import base64
19 import locale
19 import locale
20 import logging
20 import logging
21 import uuid
21 import uuid
22 import wsgiref.util
22 import wsgiref.util
23 import traceback
23 import traceback
24 from itertools import chain
24 from itertools import chain
25
25
26 import simplejson as json
26 import simplejson as json
27 import msgpack
27 import msgpack
28 from beaker.cache import CacheManager
28 from beaker.cache import CacheManager
29 from beaker.util import parse_cache_config_options
29 from beaker.util import parse_cache_config_options
30 from pyramid.config import Configurator
30 from pyramid.config import Configurator
31 from pyramid.wsgi import wsgiapp
31 from pyramid.wsgi import wsgiapp
32
32
33 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
33 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
34 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
34 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
35 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
35 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
36 from vcsserver.echo_stub.echo_app import EchoApp
36 from vcsserver.echo_stub.echo_app import EchoApp
37 from vcsserver.exceptions import HTTPRepoLocked
37 from vcsserver.exceptions import HTTPRepoLocked
38 from vcsserver.server import VcsServer
38 from vcsserver.server import VcsServer
39
39
40 try:
40 try:
41 from vcsserver.git import GitFactory, GitRemote
41 from vcsserver.git import GitFactory, GitRemote
42 except ImportError:
42 except ImportError:
43 GitFactory = None
43 GitFactory = None
44 GitRemote = None
44 GitRemote = None
45
45
46 try:
46 try:
47 from vcsserver.hg import MercurialFactory, HgRemote
47 from vcsserver.hg import MercurialFactory, HgRemote
48 except ImportError:
48 except ImportError:
49 MercurialFactory = None
49 MercurialFactory = None
50 HgRemote = None
50 HgRemote = None
51
51
52 try:
52 try:
53 from vcsserver.svn import SubversionFactory, SvnRemote
53 from vcsserver.svn import SubversionFactory, SvnRemote
54 except ImportError:
54 except ImportError:
55 SubversionFactory = None
55 SubversionFactory = None
56 SvnRemote = None
56 SvnRemote = None
57
57
58 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
59
59
60
60
61 def _is_request_chunked(environ):
61 def _is_request_chunked(environ):
62 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
62 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
63 return stream
63 return stream
64
64
65
65
66 class VCS(object):
66 class VCS(object):
67 def __init__(self, locale=None, cache_config=None):
67 def __init__(self, locale=None, cache_config=None):
68 self.locale = locale
68 self.locale = locale
69 self.cache_config = cache_config
69 self.cache_config = cache_config
70 self._configure_locale()
70 self._configure_locale()
71 self._initialize_cache()
71 self._initialize_cache()
72
72
73 if GitFactory and GitRemote:
73 if GitFactory and GitRemote:
74 git_repo_cache = self.cache.get_cache_region(
74 git_repo_cache = self.cache.get_cache_region(
75 'git', region='repo_object')
75 'git', region='repo_object')
76 git_factory = GitFactory(git_repo_cache)
76 git_factory = GitFactory(git_repo_cache)
77 self._git_remote = GitRemote(git_factory)
77 self._git_remote = GitRemote(git_factory)
78 else:
78 else:
79 log.info("Git client import failed")
79 log.info("Git client import failed")
80
80
81 if MercurialFactory and HgRemote:
81 if MercurialFactory and HgRemote:
82 hg_repo_cache = self.cache.get_cache_region(
82 hg_repo_cache = self.cache.get_cache_region(
83 'hg', region='repo_object')
83 'hg', region='repo_object')
84 hg_factory = MercurialFactory(hg_repo_cache)
84 hg_factory = MercurialFactory(hg_repo_cache)
85 self._hg_remote = HgRemote(hg_factory)
85 self._hg_remote = HgRemote(hg_factory)
86 else:
86 else:
87 log.info("Mercurial client import failed")
87 log.info("Mercurial client import failed")
88
88
89 if SubversionFactory and SvnRemote:
89 if SubversionFactory and SvnRemote:
90 svn_repo_cache = self.cache.get_cache_region(
90 svn_repo_cache = self.cache.get_cache_region(
91 'svn', region='repo_object')
91 'svn', region='repo_object')
92 svn_factory = SubversionFactory(svn_repo_cache)
92 svn_factory = SubversionFactory(svn_repo_cache)
93 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
93 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
94 else:
94 else:
95 log.info("Subversion client import failed")
95 log.info("Subversion client import failed")
96
96
97 self._vcsserver = VcsServer()
97 self._vcsserver = VcsServer()
98
98
99 def _initialize_cache(self):
99 def _initialize_cache(self):
100 cache_config = parse_cache_config_options(self.cache_config)
100 cache_config = parse_cache_config_options(self.cache_config)
101 log.info('Initializing beaker cache: %s' % cache_config)
101 log.info('Initializing beaker cache: %s' % cache_config)
102 self.cache = CacheManager(**cache_config)
102 self.cache = CacheManager(**cache_config)
103
103
104 def _configure_locale(self):
104 def _configure_locale(self):
105 if self.locale:
105 if self.locale:
106 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
106 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
107 else:
107 else:
108 log.info(
108 log.info(
109 'Configuring locale subsystem based on environment variables')
109 'Configuring locale subsystem based on environment variables')
110 try:
110 try:
111 # If self.locale is the empty string, then the locale
111 # If self.locale is the empty string, then the locale
112 # module will use the environment variables. See the
112 # module will use the environment variables. See the
113 # documentation of the package `locale`.
113 # documentation of the package `locale`.
114 locale.setlocale(locale.LC_ALL, self.locale)
114 locale.setlocale(locale.LC_ALL, self.locale)
115
115
116 language_code, encoding = locale.getlocale()
116 language_code, encoding = locale.getlocale()
117 log.info(
117 log.info(
118 'Locale set to language code "%s" with encoding "%s".',
118 'Locale set to language code "%s" with encoding "%s".',
119 language_code, encoding)
119 language_code, encoding)
120 except locale.Error:
120 except locale.Error:
121 log.exception(
121 log.exception(
122 'Cannot set locale, not configuring the locale system')
122 'Cannot set locale, not configuring the locale system')
123
123
124
124
125 class WsgiProxy(object):
125 class WsgiProxy(object):
126 def __init__(self, wsgi):
126 def __init__(self, wsgi):
127 self.wsgi = wsgi
127 self.wsgi = wsgi
128
128
129 def __call__(self, environ, start_response):
129 def __call__(self, environ, start_response):
130 input_data = environ['wsgi.input'].read()
130 input_data = environ['wsgi.input'].read()
131 input_data = msgpack.unpackb(input_data)
131 input_data = msgpack.unpackb(input_data)
132
132
133 error = None
133 error = None
134 try:
134 try:
135 data, status, headers = self.wsgi.handle(
135 data, status, headers = self.wsgi.handle(
136 input_data['environment'], input_data['input_data'],
136 input_data['environment'], input_data['input_data'],
137 *input_data['args'], **input_data['kwargs'])
137 *input_data['args'], **input_data['kwargs'])
138 except Exception as e:
138 except Exception as e:
139 data, status, headers = [], None, None
139 data, status, headers = [], None, None
140 error = {
140 error = {
141 'message': str(e),
141 'message': str(e),
142 '_vcs_kind': getattr(e, '_vcs_kind', None)
142 '_vcs_kind': getattr(e, '_vcs_kind', None)
143 }
143 }
144
144
145 start_response(200, {})
145 start_response(200, {})
146 return self._iterator(error, status, headers, data)
146 return self._iterator(error, status, headers, data)
147
147
148 def _iterator(self, error, status, headers, data):
148 def _iterator(self, error, status, headers, data):
149 initial_data = [
149 initial_data = [
150 error,
150 error,
151 status,
151 status,
152 headers,
152 headers,
153 ]
153 ]
154
154
155 for d in chain(initial_data, data):
155 for d in chain(initial_data, data):
156 yield msgpack.packb(d)
156 yield msgpack.packb(d)
157
157
158
158
159 class HTTPApplication(object):
159 class HTTPApplication(object):
160 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
160 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
161
161
162 remote_wsgi = remote_wsgi
162 remote_wsgi = remote_wsgi
163 _use_echo_app = False
163 _use_echo_app = False
164
164
165 def __init__(self, settings=None, global_config=None):
165 def __init__(self, settings=None, global_config=None):
166 self.config = Configurator(settings=settings)
166 self.config = Configurator(settings=settings)
167 self.global_config = global_config
167 self.global_config = global_config
168
168
169 locale = settings.get('locale', '') or 'en_US.UTF-8'
169 locale = settings.get('locale', '') or 'en_US.UTF-8'
170 vcs = VCS(locale=locale, cache_config=settings)
170 vcs = VCS(locale=locale, cache_config=settings)
171 self._remotes = {
171 self._remotes = {
172 'hg': vcs._hg_remote,
172 'hg': vcs._hg_remote,
173 'git': vcs._git_remote,
173 'git': vcs._git_remote,
174 'svn': vcs._svn_remote,
174 'svn': vcs._svn_remote,
175 'server': vcs._vcsserver,
175 'server': vcs._vcsserver,
176 }
176 }
177 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
177 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
178 self._use_echo_app = True
178 self._use_echo_app = True
179 log.warning("Using EchoApp for VCS operations.")
179 log.warning("Using EchoApp for VCS operations.")
180 self.remote_wsgi = remote_wsgi_stub
180 self.remote_wsgi = remote_wsgi_stub
181 self._configure_settings(settings)
181 self._configure_settings(settings)
182 self._configure()
182 self._configure()
183
183
184 def _configure_settings(self, app_settings):
184 def _configure_settings(self, app_settings):
185 """
185 """
186 Configure the settings module.
186 Configure the settings module.
187 """
187 """
188 git_path = app_settings.get('git_path', None)
188 git_path = app_settings.get('git_path', None)
189 if git_path:
189 if git_path:
190 settings.GIT_EXECUTABLE = git_path
190 settings.GIT_EXECUTABLE = git_path
191
191
192 def _configure(self):
192 def _configure(self):
193 self.config.add_renderer(
193 self.config.add_renderer(
194 name='msgpack',
194 name='msgpack',
195 factory=self._msgpack_renderer_factory)
195 factory=self._msgpack_renderer_factory)
196
196
197 self.config.add_route('service', '/_service')
197 self.config.add_route('service', '/_service')
198 self.config.add_route('status', '/status')
198 self.config.add_route('status', '/status')
199 self.config.add_route('hg_proxy', '/proxy/hg')
199 self.config.add_route('hg_proxy', '/proxy/hg')
200 self.config.add_route('git_proxy', '/proxy/git')
200 self.config.add_route('git_proxy', '/proxy/git')
201 self.config.add_route('vcs', '/{backend}')
201 self.config.add_route('vcs', '/{backend}')
202 self.config.add_route('stream_git', '/stream/git/*repo_name')
202 self.config.add_route('stream_git', '/stream/git/*repo_name')
203 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
203 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
204
204
205 self.config.add_view(
205 self.config.add_view(
206 self.status_view, route_name='status', renderer='json')
206 self.status_view, route_name='status', renderer='json')
207 self.config.add_view(
207 self.config.add_view(
208 self.service_view, route_name='service', renderer='msgpack')
208 self.service_view, route_name='service', renderer='msgpack')
209
209
210 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
210 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
211 self.config.add_view(self.git_proxy(), route_name='git_proxy')
211 self.config.add_view(self.git_proxy(), route_name='git_proxy')
212 self.config.add_view(
212 self.config.add_view(
213 self.vcs_view, route_name='vcs', renderer='msgpack',
213 self.vcs_view, route_name='vcs', renderer='msgpack',
214 custom_predicates=[self.is_vcs_view])
214 custom_predicates=[self.is_vcs_view])
215
215
216 self.config.add_view(self.hg_stream(), route_name='stream_hg')
216 self.config.add_view(self.hg_stream(), route_name='stream_hg')
217 self.config.add_view(self.git_stream(), route_name='stream_git')
217 self.config.add_view(self.git_stream(), route_name='stream_git')
218
218
219 def notfound(request):
219 def notfound(request):
220 return {'status': '404 NOT FOUND'}
220 return {'status': '404 NOT FOUND'}
221 self.config.add_notfound_view(notfound, renderer='json')
221 self.config.add_notfound_view(notfound, renderer='json')
222
222
223 self.config.add_view(self.handle_vcs_exception, context=Exception)
223 self.config.add_view(self.handle_vcs_exception, context=Exception)
224
224
225 self.config.add_tween(
225 self.config.add_tween(
226 'vcsserver.tweens.RequestWrapperTween',
226 'vcsserver.tweens.RequestWrapperTween',
227 )
227 )
228
228
229 def wsgi_app(self):
229 def wsgi_app(self):
230 return self.config.make_wsgi_app()
230 return self.config.make_wsgi_app()
231
231
232 def vcs_view(self, request):
232 def vcs_view(self, request):
233 remote = self._remotes[request.matchdict['backend']]
233 remote = self._remotes[request.matchdict['backend']]
234 payload = msgpack.unpackb(request.body, use_list=True)
234 payload = msgpack.unpackb(request.body, use_list=True)
235 method = payload.get('method')
235 method = payload.get('method')
236 params = payload.get('params')
236 params = payload.get('params')
237 wire = params.get('wire')
237 wire = params.get('wire')
238 args = params.get('args')
238 args = params.get('args')
239 kwargs = params.get('kwargs')
239 kwargs = params.get('kwargs')
240 if wire:
240 if wire:
241 try:
241 try:
242 wire['context'] = uuid.UUID(wire['context'])
242 wire['context'] = uuid.UUID(wire['context'])
243 except KeyError:
243 except KeyError:
244 pass
244 pass
245 args.insert(0, wire)
245 args.insert(0, wire)
246
246
247 log.debug('method called:%s with kwargs:%s', method, kwargs)
247 log.debug('method called:%s with kwargs:%s', method, kwargs)
248 try:
248 try:
249 resp = getattr(remote, method)(*args, **kwargs)
249 resp = getattr(remote, method)(*args, **kwargs)
250 except Exception as e:
250 except Exception as e:
251 tb_info = traceback.format_exc()
251 tb_info = traceback.format_exc()
252
252
253 type_ = e.__class__.__name__
253 type_ = e.__class__.__name__
254 if type_ not in self.ALLOWED_EXCEPTIONS:
254 if type_ not in self.ALLOWED_EXCEPTIONS:
255 type_ = None
255 type_ = None
256
256
257 resp = {
257 resp = {
258 'id': payload.get('id'),
258 'id': payload.get('id'),
259 'error': {
259 'error': {
260 'message': e.message,
260 'message': e.message,
261 'traceback': tb_info,
261 'traceback': tb_info,
262 'type': type_
262 'type': type_
263 }
263 }
264 }
264 }
265 try:
265 try:
266 resp['error']['_vcs_kind'] = e._vcs_kind
266 resp['error']['_vcs_kind'] = e._vcs_kind
267 except AttributeError:
267 except AttributeError:
268 pass
268 pass
269 else:
269 else:
270 resp = {
270 resp = {
271 'id': payload.get('id'),
271 'id': payload.get('id'),
272 'result': resp
272 'result': resp
273 }
273 }
274
274
275 return resp
275 return resp
276
276
277 def status_view(self, request):
277 def status_view(self, request):
278 import vcsserver
278 import vcsserver
279 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__}
279 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__}
280
280
281 def service_view(self, request):
281 def service_view(self, request):
282 import vcsserver
282 import vcsserver
283 import ConfigParser as configparser
283 import ConfigParser as configparser
284
284
285 payload = msgpack.unpackb(request.body, use_list=True)
285 payload = msgpack.unpackb(request.body, use_list=True)
286
286
287 try:
287 try:
288 path = self.global_config['__file__']
288 path = self.global_config['__file__']
289 config = configparser.ConfigParser()
289 config = configparser.ConfigParser()
290 config.read(path)
290 config.read(path)
291 parsed_ini = config
291 parsed_ini = config
292 if parsed_ini.has_section('server:main'):
292 if parsed_ini.has_section('server:main'):
293 parsed_ini = dict(parsed_ini.items('server:main'))
293 parsed_ini = dict(parsed_ini.items('server:main'))
294 except Exception:
294 except Exception:
295 log.exception('Failed to read .ini file for display')
295 log.exception('Failed to read .ini file for display')
296 parsed_ini = {}
296 parsed_ini = {}
297
297
298 resp = {
298 resp = {
299 'id': payload.get('id'),
299 'id': payload.get('id'),
300 'result': dict(
300 'result': dict(
301 version=vcsserver.__version__,
301 version=vcsserver.__version__,
302 config=parsed_ini,
302 config=parsed_ini,
303 payload=payload,
303 payload=payload,
304 )
304 )
305 }
305 }
306 return resp
306 return resp
307
307
308 def _msgpack_renderer_factory(self, info):
308 def _msgpack_renderer_factory(self, info):
309 def _render(value, system):
309 def _render(value, system):
310 value = msgpack.packb(value)
310 value = msgpack.packb(value)
311 request = system.get('request')
311 request = system.get('request')
312 if request is not None:
312 if request is not None:
313 response = request.response
313 response = request.response
314 ct = response.content_type
314 ct = response.content_type
315 if ct == response.default_content_type:
315 if ct == response.default_content_type:
316 response.content_type = 'application/x-msgpack'
316 response.content_type = 'application/x-msgpack'
317 return value
317 return value
318 return _render
318 return _render
319
319
320 def set_env_from_config(self, environ, config):
320 def set_env_from_config(self, environ, config):
321 dict_conf = {}
321 dict_conf = {}
322 try:
322 try:
323 for elem in config:
323 for elem in config:
324 if elem[0] == 'rhodecode':
324 if elem[0] == 'rhodecode':
325 dict_conf = json.loads(elem[2])
325 dict_conf = json.loads(elem[2])
326 break
326 break
327 except Exception:
327 except Exception:
328 log.exception('Failed to fetch SCM CONFIG')
328 log.exception('Failed to fetch SCM CONFIG')
329 return
329 return
330
330
331 username = dict_conf.get('username')
331 username = dict_conf.get('username')
332 if username:
332 if username:
333 environ['REMOTE_USER'] = username
333 environ['REMOTE_USER'] = username
334 # mercurial specific, some extension api rely on this
335 environ['HGUSER'] = username
334
336
335 ip = dict_conf.get('ip')
337 ip = dict_conf.get('ip')
336 if ip:
338 if ip:
337 environ['REMOTE_HOST'] = ip
339 environ['REMOTE_HOST'] = ip
338
340
339 if _is_request_chunked(environ):
341 if _is_request_chunked(environ):
340 # set the compatibility flag for webob
342 # set the compatibility flag for webob
341 environ['wsgi.input_terminated'] = True
343 environ['wsgi.input_terminated'] = True
342
344
343 def hg_proxy(self):
345 def hg_proxy(self):
344 @wsgiapp
346 @wsgiapp
345 def _hg_proxy(environ, start_response):
347 def _hg_proxy(environ, start_response):
346 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
348 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
347 return app(environ, start_response)
349 return app(environ, start_response)
348 return _hg_proxy
350 return _hg_proxy
349
351
350 def git_proxy(self):
352 def git_proxy(self):
351 @wsgiapp
353 @wsgiapp
352 def _git_proxy(environ, start_response):
354 def _git_proxy(environ, start_response):
353 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
355 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
354 return app(environ, start_response)
356 return app(environ, start_response)
355 return _git_proxy
357 return _git_proxy
356
358
357 def hg_stream(self):
359 def hg_stream(self):
358 if self._use_echo_app:
360 if self._use_echo_app:
359 @wsgiapp
361 @wsgiapp
360 def _hg_stream(environ, start_response):
362 def _hg_stream(environ, start_response):
361 app = EchoApp('fake_path', 'fake_name', None)
363 app = EchoApp('fake_path', 'fake_name', None)
362 return app(environ, start_response)
364 return app(environ, start_response)
363 return _hg_stream
365 return _hg_stream
364 else:
366 else:
365 @wsgiapp
367 @wsgiapp
366 def _hg_stream(environ, start_response):
368 def _hg_stream(environ, start_response):
367 log.debug('http-app: handling hg stream')
369 log.debug('http-app: handling hg stream')
368 repo_path = environ['HTTP_X_RC_REPO_PATH']
370 repo_path = environ['HTTP_X_RC_REPO_PATH']
369 repo_name = environ['HTTP_X_RC_REPO_NAME']
371 repo_name = environ['HTTP_X_RC_REPO_NAME']
370 packed_config = base64.b64decode(
372 packed_config = base64.b64decode(
371 environ['HTTP_X_RC_REPO_CONFIG'])
373 environ['HTTP_X_RC_REPO_CONFIG'])
372 config = msgpack.unpackb(packed_config)
374 config = msgpack.unpackb(packed_config)
373 app = scm_app.create_hg_wsgi_app(
375 app = scm_app.create_hg_wsgi_app(
374 repo_path, repo_name, config)
376 repo_path, repo_name, config)
375
377
376 # Consistent path information for hgweb
378 # Consistent path information for hgweb
377 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
379 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
378 environ['REPO_NAME'] = repo_name
380 environ['REPO_NAME'] = repo_name
379 self.set_env_from_config(environ, config)
381 self.set_env_from_config(environ, config)
380
382
381 log.debug('http-app: starting app handler '
383 log.debug('http-app: starting app handler '
382 'with %s and process request', app)
384 'with %s and process request', app)
383 return app(environ, ResponseFilter(start_response))
385 return app(environ, ResponseFilter(start_response))
384 return _hg_stream
386 return _hg_stream
385
387
386 def git_stream(self):
388 def git_stream(self):
387 if self._use_echo_app:
389 if self._use_echo_app:
388 @wsgiapp
390 @wsgiapp
389 def _git_stream(environ, start_response):
391 def _git_stream(environ, start_response):
390 app = EchoApp('fake_path', 'fake_name', None)
392 app = EchoApp('fake_path', 'fake_name', None)
391 return app(environ, start_response)
393 return app(environ, start_response)
392 return _git_stream
394 return _git_stream
393 else:
395 else:
394 @wsgiapp
396 @wsgiapp
395 def _git_stream(environ, start_response):
397 def _git_stream(environ, start_response):
396 log.debug('http-app: handling git stream')
398 log.debug('http-app: handling git stream')
397 repo_path = environ['HTTP_X_RC_REPO_PATH']
399 repo_path = environ['HTTP_X_RC_REPO_PATH']
398 repo_name = environ['HTTP_X_RC_REPO_NAME']
400 repo_name = environ['HTTP_X_RC_REPO_NAME']
399 packed_config = base64.b64decode(
401 packed_config = base64.b64decode(
400 environ['HTTP_X_RC_REPO_CONFIG'])
402 environ['HTTP_X_RC_REPO_CONFIG'])
401 config = msgpack.unpackb(packed_config)
403 config = msgpack.unpackb(packed_config)
402
404
403 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
405 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
404 self.set_env_from_config(environ, config)
406 self.set_env_from_config(environ, config)
405
407
406 content_type = environ.get('CONTENT_TYPE', '')
408 content_type = environ.get('CONTENT_TYPE', '')
407
409
408 path = environ['PATH_INFO']
410 path = environ['PATH_INFO']
409 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
411 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
410 log.debug(
412 log.debug(
411 'LFS: Detecting if request `%s` is LFS server path based '
413 'LFS: Detecting if request `%s` is LFS server path based '
412 'on content type:`%s`, is_lfs:%s',
414 'on content type:`%s`, is_lfs:%s',
413 path, content_type, is_lfs_request)
415 path, content_type, is_lfs_request)
414
416
415 if not is_lfs_request:
417 if not is_lfs_request:
416 # fallback detection by path
418 # fallback detection by path
417 if GIT_LFS_PROTO_PAT.match(path):
419 if GIT_LFS_PROTO_PAT.match(path):
418 is_lfs_request = True
420 is_lfs_request = True
419 log.debug(
421 log.debug(
420 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
422 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
421 path, is_lfs_request)
423 path, is_lfs_request)
422
424
423 if is_lfs_request:
425 if is_lfs_request:
424 app = scm_app.create_git_lfs_wsgi_app(
426 app = scm_app.create_git_lfs_wsgi_app(
425 repo_path, repo_name, config)
427 repo_path, repo_name, config)
426 else:
428 else:
427 app = scm_app.create_git_wsgi_app(
429 app = scm_app.create_git_wsgi_app(
428 repo_path, repo_name, config)
430 repo_path, repo_name, config)
429
431
430 log.debug('http-app: starting app handler '
432 log.debug('http-app: starting app handler '
431 'with %s and process request', app)
433 'with %s and process request', app)
432
434
433 return app(environ, start_response)
435 return app(environ, start_response)
434
436
435 return _git_stream
437 return _git_stream
436
438
437 def is_vcs_view(self, context, request):
439 def is_vcs_view(self, context, request):
438 """
440 """
439 View predicate that returns true if given backend is supported by
441 View predicate that returns true if given backend is supported by
440 defined remotes.
442 defined remotes.
441 """
443 """
442 backend = request.matchdict.get('backend')
444 backend = request.matchdict.get('backend')
443 return backend in self._remotes
445 return backend in self._remotes
444
446
445 def handle_vcs_exception(self, exception, request):
447 def handle_vcs_exception(self, exception, request):
446 _vcs_kind = getattr(exception, '_vcs_kind', '')
448 _vcs_kind = getattr(exception, '_vcs_kind', '')
447 if _vcs_kind == 'repo_locked':
449 if _vcs_kind == 'repo_locked':
448 # Get custom repo-locked status code if present.
450 # Get custom repo-locked status code if present.
449 status_code = request.headers.get('X-RC-Locked-Status-Code')
451 status_code = request.headers.get('X-RC-Locked-Status-Code')
450 return HTTPRepoLocked(
452 return HTTPRepoLocked(
451 title=exception.message, status_code=status_code)
453 title=exception.message, status_code=status_code)
452
454
453 # Re-raise exception if we can not handle it.
455 # Re-raise exception if we can not handle it.
454 log.exception(
456 log.exception(
455 'error occurred handling this request for path: %s', request.path)
457 'error occurred handling this request for path: %s', request.path)
456 raise exception
458 raise exception
457
459
458
460
459 class ResponseFilter(object):
461 class ResponseFilter(object):
460
462
461 def __init__(self, start_response):
463 def __init__(self, start_response):
462 self._start_response = start_response
464 self._start_response = start_response
463
465
464 def __call__(self, status, response_headers, exc_info=None):
466 def __call__(self, status, response_headers, exc_info=None):
465 headers = tuple(
467 headers = tuple(
466 (h, v) for h, v in response_headers
468 (h, v) for h, v in response_headers
467 if not wsgiref.util.is_hop_by_hop(h))
469 if not wsgiref.util.is_hop_by_hop(h))
468 return self._start_response(status, headers, exc_info)
470 return self._start_response(status, headers, exc_info)
469
471
470
472
471 def main(global_config, **settings):
473 def main(global_config, **settings):
472 if MercurialFactory:
474 if MercurialFactory:
473 hgpatches.patch_largefiles_capabilities()
475 hgpatches.patch_largefiles_capabilities()
474 hgpatches.patch_subrepo_type_mapping()
476 hgpatches.patch_subrepo_type_mapping()
475 app = HTTPApplication(settings=settings, global_config=global_config)
477 app = HTTPApplication(settings=settings, global_config=global_config)
476 return app.wsgi_app()
478 return app.wsgi_app()
@@ -1,386 +1,386 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Handles the Git smart protocol."""
18 """Handles the Git smart protocol."""
19
19
20 import os
20 import os
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import simplejson as json
24 import simplejson as json
25 import dulwich.protocol
25 import dulwich.protocol
26 from webob import Request, Response, exc
26 from webob import Request, Response, exc
27
27
28 from vcsserver import hooks, subprocessio
28 from vcsserver import hooks, subprocessio
29
29
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class FileWrapper(object):
34 class FileWrapper(object):
35 """File wrapper that ensures how much data is read from it."""
35 """File wrapper that ensures how much data is read from it."""
36
36
37 def __init__(self, fd, content_length):
37 def __init__(self, fd, content_length):
38 self.fd = fd
38 self.fd = fd
39 self.content_length = content_length
39 self.content_length = content_length
40 self.remain = content_length
40 self.remain = content_length
41
41
42 def read(self, size):
42 def read(self, size):
43 if size <= self.remain:
43 if size <= self.remain:
44 try:
44 try:
45 data = self.fd.read(size)
45 data = self.fd.read(size)
46 except socket.error:
46 except socket.error:
47 raise IOError(self)
47 raise IOError(self)
48 self.remain -= size
48 self.remain -= size
49 elif self.remain:
49 elif self.remain:
50 data = self.fd.read(self.remain)
50 data = self.fd.read(self.remain)
51 self.remain = 0
51 self.remain = 0
52 else:
52 else:
53 data = None
53 data = None
54 return data
54 return data
55
55
56 def __repr__(self):
56 def __repr__(self):
57 return '<FileWrapper %s len: %s, read: %s>' % (
57 return '<FileWrapper %s len: %s, read: %s>' % (
58 self.fd, self.content_length, self.content_length - self.remain
58 self.fd, self.content_length, self.content_length - self.remain
59 )
59 )
60
60
61
61
62 class GitRepository(object):
62 class GitRepository(object):
63 """WSGI app for handling Git smart protocol endpoints."""
63 """WSGI app for handling Git smart protocol endpoints."""
64
64
65 git_folder_signature = frozenset(
65 git_folder_signature = frozenset(
66 ('config', 'head', 'info', 'objects', 'refs'))
66 ('config', 'head', 'info', 'objects', 'refs'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 valid_accepts = frozenset(('application/x-%s-result' %
68 valid_accepts = frozenset(('application/x-%s-result' %
69 c for c in commands))
69 c for c in commands))
70
70
71 # The last bytes are the SHA1 of the first 12 bytes.
71 # The last bytes are the SHA1 of the first 12 bytes.
72 EMPTY_PACK = (
72 EMPTY_PACK = (
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 )
75 )
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
77
77
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
79 extras):
79 extras):
80 files = frozenset(f.lower() for f in os.listdir(content_path))
80 files = frozenset(f.lower() for f in os.listdir(content_path))
81 valid_dir_signature = self.git_folder_signature.issubset(files)
81 valid_dir_signature = self.git_folder_signature.issubset(files)
82
82
83 if not valid_dir_signature:
83 if not valid_dir_signature:
84 raise OSError('%s missing git signature' % content_path)
84 raise OSError('%s missing git signature' % content_path)
85
85
86 self.content_path = content_path
86 self.content_path = content_path
87 self.repo_name = repo_name
87 self.repo_name = repo_name
88 self.extras = extras
88 self.extras = extras
89 self.git_path = git_path
89 self.git_path = git_path
90 self.update_server_info = update_server_info
90 self.update_server_info = update_server_info
91
91
92 def _get_fixedpath(self, path):
92 def _get_fixedpath(self, path):
93 """
93 """
94 Small fix for repo_path
94 Small fix for repo_path
95
95
96 :param path:
96 :param path:
97 """
97 """
98 path = path.split(self.repo_name, 1)[-1]
98 path = path.split(self.repo_name, 1)[-1]
99 if path.startswith('.git'):
99 if path.startswith('.git'):
100 # for bare repos we still get the .git prefix inside, we skip it
100 # for bare repos we still get the .git prefix inside, we skip it
101 # here, and remove from the service command
101 # here, and remove from the service command
102 path = path[4:]
102 path = path[4:]
103
103
104 return path.strip('/')
104 return path.strip('/')
105
105
106 def inforefs(self, request, unused_environ):
106 def inforefs(self, request, unused_environ):
107 """
107 """
108 WSGI Response producer for HTTP GET Git Smart
108 WSGI Response producer for HTTP GET Git Smart
109 HTTP /info/refs request.
109 HTTP /info/refs request.
110 """
110 """
111
111
112 git_command = request.GET.get('service')
112 git_command = request.GET.get('service')
113 if git_command not in self.commands:
113 if git_command not in self.commands:
114 log.debug('command %s not allowed', git_command)
114 log.debug('command %s not allowed', git_command)
115 return exc.HTTPForbidden()
115 return exc.HTTPForbidden()
116
116
117 # please, resist the urge to add '\n' to git capture and increment
117 # please, resist the urge to add '\n' to git capture and increment
118 # line count by 1.
118 # line count by 1.
119 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
119 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
120 # a part of protocol.
120 # a part of protocol.
121 # The code in Git client not only does NOT need '\n', but actually
121 # The code in Git client not only does NOT need '\n', but actually
122 # blows up if you sprinkle "flush" (0000) as "0001\n".
122 # blows up if you sprinkle "flush" (0000) as "0001\n".
123 # It reads binary, per number of bytes specified.
123 # It reads binary, per number of bytes specified.
124 # if you do add '\n' as part of data, count it.
124 # if you do add '\n' as part of data, count it.
125 server_advert = '# service=%s\n' % git_command
125 server_advert = '# service=%s\n' % git_command
126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
127 try:
127 try:
128 gitenv = dict(os.environ)
128 gitenv = dict(os.environ)
129 # forget all configs
129 # forget all configs
130 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
130 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
131 command = [self.git_path, git_command[4:], '--stateless-rpc',
131 command = [self.git_path, git_command[4:], '--stateless-rpc',
132 '--advertise-refs', self.content_path]
132 '--advertise-refs', self.content_path]
133 out = subprocessio.SubprocessIOChunker(
133 out = subprocessio.SubprocessIOChunker(
134 command,
134 command,
135 env=gitenv,
135 env=gitenv,
136 starting_values=[packet_len + server_advert + '0000'],
136 starting_values=[packet_len + server_advert + '0000'],
137 shell=False
137 shell=False
138 )
138 )
139 except EnvironmentError:
139 except EnvironmentError:
140 log.exception('Error processing command')
140 log.exception('Error processing command')
141 raise exc.HTTPExpectationFailed()
141 raise exc.HTTPExpectationFailed()
142
142
143 resp = Response()
143 resp = Response()
144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
145 resp.charset = None
145 resp.charset = None
146 resp.app_iter = out
146 resp.app_iter = out
147
147
148 return resp
148 return resp
149
149
150 def _get_want_capabilities(self, request):
150 def _get_want_capabilities(self, request):
151 """Read the capabilities found in the first want line of the request."""
151 """Read the capabilities found in the first want line of the request."""
152 pos = request.body_file_seekable.tell()
152 pos = request.body_file_seekable.tell()
153 first_line = request.body_file_seekable.readline()
153 first_line = request.body_file_seekable.readline()
154 request.body_file_seekable.seek(pos)
154 request.body_file_seekable.seek(pos)
155
155
156 return frozenset(
156 return frozenset(
157 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
157 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
158
158
159 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
159 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
160 """
160 """
161 Construct a response with an empty PACK file.
161 Construct a response with an empty PACK file.
162
162
163 We use an empty PACK file, as that would trigger the failure of the pull
163 We use an empty PACK file, as that would trigger the failure of the pull
164 or clone command.
164 or clone command.
165
165
166 We also print in the error output a message explaining why the command
166 We also print in the error output a message explaining why the command
167 was aborted.
167 was aborted.
168
168
169 If aditionally, the user is accepting messages we send them the output
169 If aditionally, the user is accepting messages we send them the output
170 of the pre-pull hook.
170 of the pre-pull hook.
171
171
172 Note that for clients not supporting side-band we just send them the
172 Note that for clients not supporting side-band we just send them the
173 emtpy PACK file.
173 emtpy PACK file.
174 """
174 """
175 if self.SIDE_BAND_CAPS.intersection(capabilities):
175 if self.SIDE_BAND_CAPS.intersection(capabilities):
176 response = []
176 response = []
177 proto = dulwich.protocol.Protocol(None, response.append)
177 proto = dulwich.protocol.Protocol(None, response.append)
178 proto.write_pkt_line('NAK\n')
178 proto.write_pkt_line('NAK\n')
179 self._write_sideband_to_proto(pre_pull_messages, proto,
179 self._write_sideband_to_proto(pre_pull_messages, proto,
180 capabilities)
180 capabilities)
181 # N.B.(skreft): Do not change the sideband channel to 3, as that
181 # N.B.(skreft): Do not change the sideband channel to 3, as that
182 # produces a fatal error in the client:
182 # produces a fatal error in the client:
183 # fatal: error in sideband demultiplexer
183 # fatal: error in sideband demultiplexer
184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
185 proto.write_sideband(1, self.EMPTY_PACK)
185 proto.write_sideband(1, self.EMPTY_PACK)
186
186
187 # writes 0000
187 # writes 0000
188 proto.write_pkt_line(None)
188 proto.write_pkt_line(None)
189
189
190 return response
190 return response
191 else:
191 else:
192 return [self.EMPTY_PACK]
192 return [self.EMPTY_PACK]
193
193
194 def _write_sideband_to_proto(self, data, proto, capabilities):
194 def _write_sideband_to_proto(self, data, proto, capabilities):
195 """
195 """
196 Write the data to the proto's sideband number 2.
196 Write the data to the proto's sideband number 2.
197
197
198 We do not use dulwich's write_sideband directly as it only supports
198 We do not use dulwich's write_sideband directly as it only supports
199 side-band-64k.
199 side-band-64k.
200 """
200 """
201 if not data:
201 if not data:
202 return
202 return
203
203
204 # N.B.(skreft): The values below are explained in the pack protocol
204 # N.B.(skreft): The values below are explained in the pack protocol
205 # documentation, section Packfile Data.
205 # documentation, section Packfile Data.
206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
207 if 'side-band-64k' in capabilities:
207 if 'side-band-64k' in capabilities:
208 chunk_size = 65515
208 chunk_size = 65515
209 elif 'side-band' in capabilities:
209 elif 'side-band' in capabilities:
210 chunk_size = 995
210 chunk_size = 995
211 else:
211 else:
212 return
212 return
213
213
214 chunker = (
214 chunker = (
215 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
215 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
216
216
217 for chunk in chunker:
217 for chunk in chunker:
218 proto.write_sideband(2, chunk)
218 proto.write_sideband(2, chunk)
219
219
220 def _get_messages(self, data, capabilities):
220 def _get_messages(self, data, capabilities):
221 """Return a list with packets for sending data in sideband number 2."""
221 """Return a list with packets for sending data in sideband number 2."""
222 response = []
222 response = []
223 proto = dulwich.protocol.Protocol(None, response.append)
223 proto = dulwich.protocol.Protocol(None, response.append)
224
224
225 self._write_sideband_to_proto(data, proto, capabilities)
225 self._write_sideband_to_proto(data, proto, capabilities)
226
226
227 return response
227 return response
228
228
229 def _inject_messages_to_response(self, response, capabilities,
229 def _inject_messages_to_response(self, response, capabilities,
230 start_messages, end_messages):
230 start_messages, end_messages):
231 """
231 """
232 Given a list response we inject the pre/post-pull messages.
232 Given a list response we inject the pre/post-pull messages.
233
233
234 We only inject the messages if the client supports sideband, and the
234 We only inject the messages if the client supports sideband, and the
235 response has the format:
235 response has the format:
236 0008NAK\n...0000
236 0008NAK\n...0000
237
237
238 Note that we do not check the no-progress capability as by default, git
238 Note that we do not check the no-progress capability as by default, git
239 sends it, which effectively would block all messages.
239 sends it, which effectively would block all messages.
240 """
240 """
241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
242 return response
242 return response
243
243
244 if not start_messages and not end_messages:
244 if not start_messages and not end_messages:
245 return response
245 return response
246
246
247 # make a list out of response if it's an iterator
247 # make a list out of response if it's an iterator
248 # so we can investigate it for message injection.
248 # so we can investigate it for message injection.
249 if hasattr(response, '__iter__'):
249 if hasattr(response, '__iter__'):
250 response = list(response)
250 response = list(response)
251
251
252 if (not response[0].startswith('0008NAK\n') or
252 if (not response[0].startswith('0008NAK\n') or
253 not response[-1].endswith('0000')):
253 not response[-1].endswith('0000')):
254 return response
254 return response
255
255
256 new_response = ['0008NAK\n']
256 new_response = ['0008NAK\n']
257 new_response.extend(self._get_messages(start_messages, capabilities))
257 new_response.extend(self._get_messages(start_messages, capabilities))
258 if len(response) == 1:
258 if len(response) == 1:
259 new_response.append(response[0][8:-4])
259 new_response.append(response[0][8:-4])
260 else:
260 else:
261 new_response.append(response[0][8:])
261 new_response.append(response[0][8:])
262 new_response.extend(response[1:-1])
262 new_response.extend(response[1:-1])
263 new_response.append(response[-1][:-4])
263 new_response.append(response[-1][:-4])
264 new_response.extend(self._get_messages(end_messages, capabilities))
264 new_response.extend(self._get_messages(end_messages, capabilities))
265 new_response.append('0000')
265 new_response.append('0000')
266
266
267 return new_response
267 return new_response
268
268
269 def backend(self, request, environ):
269 def backend(self, request, environ):
270 """
270 """
271 WSGI Response producer for HTTP POST Git Smart HTTP requests.
271 WSGI Response producer for HTTP POST Git Smart HTTP requests.
272 Reads commands and data from HTTP POST's body.
272 Reads commands and data from HTTP POST's body.
273 returns an iterator obj with contents of git command's
273 returns an iterator obj with contents of git command's
274 response to stdout
274 response to stdout
275 """
275 """
276 # TODO(skreft): think how we could detect an HTTPLockedException, as
276 # TODO(skreft): think how we could detect an HTTPLockedException, as
277 # we probably want to have the same mechanism used by mercurial and
277 # we probably want to have the same mechanism used by mercurial and
278 # simplevcs.
278 # simplevcs.
279 # For that we would need to parse the output of the command looking for
279 # For that we would need to parse the output of the command looking for
280 # some signs of the HTTPLockedError, parse the data and reraise it in
280 # some signs of the HTTPLockedError, parse the data and reraise it in
281 # pygrack. However, that would interfere with the streaming.
281 # pygrack. However, that would interfere with the streaming.
282 #
282 #
283 # Now the output of a blocked push is:
283 # Now the output of a blocked push is:
284 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
284 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
285 # POST git-receive-pack (1047 bytes)
285 # POST git-receive-pack (1047 bytes)
286 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
286 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
287 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
287 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
288 # ! [remote rejected] master -> master (pre-receive hook declined)
288 # ! [remote rejected] master -> master (pre-receive hook declined)
289 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
289 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
290
290
291 git_command = self._get_fixedpath(request.path_info)
291 git_command = self._get_fixedpath(request.path_info)
292 if git_command not in self.commands:
292 if git_command not in self.commands:
293 log.debug('command %s not allowed', git_command)
293 log.debug('command %s not allowed', git_command)
294 return exc.HTTPForbidden()
294 return exc.HTTPForbidden()
295
295
296 capabilities = None
296 capabilities = None
297 if git_command == 'git-upload-pack':
297 if git_command == 'git-upload-pack':
298 capabilities = self._get_want_capabilities(request)
298 capabilities = self._get_want_capabilities(request)
299
299
300 if 'CONTENT_LENGTH' in environ:
300 if 'CONTENT_LENGTH' in environ:
301 inputstream = FileWrapper(request.body_file_seekable,
301 inputstream = FileWrapper(request.body_file_seekable,
302 request.content_length)
302 request.content_length)
303 else:
303 else:
304 inputstream = request.body_file_seekable
304 inputstream = request.body_file_seekable
305
305
306 resp = Response()
306 resp = Response()
307 resp.content_type = ('application/x-%s-result' %
307 resp.content_type = ('application/x-%s-result' %
308 git_command.encode('utf8'))
308 git_command.encode('utf8'))
309 resp.charset = None
309 resp.charset = None
310
310
311 pre_pull_messages = ''
311 pre_pull_messages = ''
312 if git_command == 'git-upload-pack':
312 if git_command == 'git-upload-pack':
313 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
313 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
314 if status != 0:
314 if status != 0:
315 resp.app_iter = self._build_failed_pre_pull_response(
315 resp.app_iter = self._build_failed_pre_pull_response(
316 capabilities, pre_pull_messages)
316 capabilities, pre_pull_messages)
317 return resp
317 return resp
318
318
319 gitenv = dict(os.environ)
319 gitenv = dict(os.environ)
320 # forget all configs
320 # forget all configs
321 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
321 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
322 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
322 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
323 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
323 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
324 self.content_path]
324 self.content_path]
325 log.debug('handling cmd %s', cmd)
325 log.debug('handling cmd %s', cmd)
326
326
327 out = subprocessio.SubprocessIOChunker(
327 out = subprocessio.SubprocessIOChunker(
328 cmd,
328 cmd,
329 inputstream=inputstream,
329 inputstream=inputstream,
330 env=gitenv,
330 env=gitenv,
331 cwd=self.content_path,
331 cwd=self.content_path,
332 shell=False,
332 shell=False,
333 fail_on_stderr=False,
333 fail_on_stderr=False,
334 fail_on_return_code=False
334 fail_on_return_code=False
335 )
335 )
336
336
337 if self.update_server_info and git_command == 'git-receive-pack':
337 if self.update_server_info and git_command == 'git-receive-pack':
338 # We need to fully consume the iterator here, as the
338 # We need to fully consume the iterator here, as the
339 # update-server-info command needs to be run after the push.
339 # update-server-info command needs to be run after the push.
340 out = list(out)
340 out = list(out)
341
341
342 # Updating refs manually after each push.
342 # Updating refs manually after each push.
343 # This is required as some clients are exposing Git repos internally
343 # This is required as some clients are exposing Git repos internally
344 # with the dumb protocol.
344 # with the dumb protocol.
345 cmd = [self.git_path, 'update-server-info']
345 cmd = [self.git_path, 'update-server-info']
346 log.debug('handling cmd %s', cmd)
346 log.debug('handling cmd %s', cmd)
347 output = subprocessio.SubprocessIOChunker(
347 output = subprocessio.SubprocessIOChunker(
348 cmd,
348 cmd,
349 inputstream=inputstream,
349 inputstream=inputstream,
350 env=gitenv,
350 env=gitenv,
351 cwd=self.content_path,
351 cwd=self.content_path,
352 shell=False,
352 shell=False,
353 fail_on_stderr=False,
353 fail_on_stderr=False,
354 fail_on_return_code=False
354 fail_on_return_code=False
355 )
355 )
356 # Consume all the output so the subprocess finishes
356 # Consume all the output so the subprocess finishes
357 for _ in output:
357 for _ in output:
358 pass
358 pass
359
359
360 if git_command == 'git-upload-pack':
360 if git_command == 'git-upload-pack':
361 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
361 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
362 resp.app_iter = self._inject_messages_to_response(
362 resp.app_iter = self._inject_messages_to_response(
363 out, capabilities, pre_pull_messages, post_pull_messages)
363 out, capabilities, pre_pull_messages, post_pull_messages)
364 else:
364 else:
365 resp.app_iter = out
365 resp.app_iter = out
366
366
367 return resp
367 return resp
368
368
369 def __call__(self, environ, start_response):
369 def __call__(self, environ, start_response):
370 request = Request(environ)
370 request = Request(environ)
371 _path = self._get_fixedpath(request.path_info)
371 _path = self._get_fixedpath(request.path_info)
372 if _path.startswith('info/refs'):
372 if _path.startswith('info/refs'):
373 app = self.inforefs
373 app = self.inforefs
374 else:
374 else:
375 app = self.backend
375 app = self.backend
376
376
377 try:
377 try:
378 resp = app(request, environ)
378 resp = app(request, environ)
379 except exc.HTTPException as error:
379 except exc.HTTPException as error:
380 log.exception('HTTP Error')
380 log.exception('HTTP Error')
381 resp = error
381 resp = error
382 except Exception:
382 except Exception:
383 log.exception('Unknown error')
383 log.exception('Unknown error')
384 resp = exc.HTTPInternalServerError()
384 resp = exc.HTTPInternalServerError()
385
385
386 return resp(environ, start_response)
386 return resp(environ, start_response)
@@ -1,34 +1,34 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver import scm_app, wsgi_app_caller
18 from vcsserver import scm_app, wsgi_app_caller
19
19
20
20
21 class GitRemoteWsgi(object):
21 class GitRemoteWsgi(object):
22 def handle(self, environ, input_data, *args, **kwargs):
22 def handle(self, environ, input_data, *args, **kwargs):
23 app = wsgi_app_caller.WSGIAppCaller(
23 app = wsgi_app_caller.WSGIAppCaller(
24 scm_app.create_git_wsgi_app(*args, **kwargs))
24 scm_app.create_git_wsgi_app(*args, **kwargs))
25
25
26 return app.handle(environ, input_data)
26 return app.handle(environ, input_data)
27
27
28
28
29 class HgRemoteWsgi(object):
29 class HgRemoteWsgi(object):
30 def handle(self, environ, input_data, *args, **kwargs):
30 def handle(self, environ, input_data, *args, **kwargs):
31 app = wsgi_app_caller.WSGIAppCaller(
31 app = wsgi_app_caller.WSGIAppCaller(
32 scm_app.create_hg_wsgi_app(*args, **kwargs))
32 scm_app.create_hg_wsgi_app(*args, **kwargs))
33
33
34 return app.handle(environ, input_data)
34 return app.handle(environ, input_data)
@@ -1,229 +1,229 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import logging
19 import logging
20 import itertools
20 import itertools
21
21
22 import mercurial
22 import mercurial
23 import mercurial.error
23 import mercurial.error
24 import mercurial.hgweb.common
24 import mercurial.hgweb.common
25 import mercurial.hgweb.hgweb_mod
25 import mercurial.hgweb.hgweb_mod
26 import mercurial.hgweb.protocol
26 import mercurial.hgweb.protocol
27 import webob.exc
27 import webob.exc
28
28
29 from vcsserver import pygrack, exceptions, settings, git_lfs
29 from vcsserver import pygrack, exceptions, settings, git_lfs
30
30
31
31
32 log = logging.getLogger(__name__)
32 log = logging.getLogger(__name__)
33
33
34
34
35 # propagated from mercurial documentation
35 # propagated from mercurial documentation
36 HG_UI_SECTIONS = [
36 HG_UI_SECTIONS = [
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
37 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
38 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
39 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
40 ]
40 ]
41
41
42
42
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
43 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
44 """Extension of hgweb that simplifies some functions."""
44 """Extension of hgweb that simplifies some functions."""
45
45
46 def _get_view(self, repo):
46 def _get_view(self, repo):
47 """Views are not supported."""
47 """Views are not supported."""
48 return repo
48 return repo
49
49
50 def loadsubweb(self):
50 def loadsubweb(self):
51 """The result is only used in the templater method which is not used."""
51 """The result is only used in the templater method which is not used."""
52 return None
52 return None
53
53
54 def run(self):
54 def run(self):
55 """Unused function so raise an exception if accidentally called."""
55 """Unused function so raise an exception if accidentally called."""
56 raise NotImplementedError
56 raise NotImplementedError
57
57
58 def templater(self, req):
58 def templater(self, req):
59 """Function used in an unreachable code path.
59 """Function used in an unreachable code path.
60
60
61 This code is unreachable because we guarantee that the HTTP request,
61 This code is unreachable because we guarantee that the HTTP request,
62 corresponds to a Mercurial command. See the is_hg method. So, we are
62 corresponds to a Mercurial command. See the is_hg method. So, we are
63 never going to get a user-visible url.
63 never going to get a user-visible url.
64 """
64 """
65 raise NotImplementedError
65 raise NotImplementedError
66
66
67 def archivelist(self, nodeid):
67 def archivelist(self, nodeid):
68 """Unused function so raise an exception if accidentally called."""
68 """Unused function so raise an exception if accidentally called."""
69 raise NotImplementedError
69 raise NotImplementedError
70
70
71 def __call__(self, environ, start_response):
71 def __call__(self, environ, start_response):
72 """Run the WSGI application.
72 """Run the WSGI application.
73
73
74 This may be called by multiple threads.
74 This may be called by multiple threads.
75 """
75 """
76 req = mercurial.hgweb.request.wsgirequest(environ, start_response)
76 req = mercurial.hgweb.request.wsgirequest(environ, start_response)
77 gen = self.run_wsgi(req)
77 gen = self.run_wsgi(req)
78
78
79 first_chunk = None
79 first_chunk = None
80
80
81 try:
81 try:
82 data = gen.next()
82 data = gen.next()
83 def first_chunk(): yield data
83 def first_chunk(): yield data
84 except StopIteration:
84 except StopIteration:
85 pass
85 pass
86
86
87 if first_chunk:
87 if first_chunk:
88 return itertools.chain(first_chunk(), gen)
88 return itertools.chain(first_chunk(), gen)
89 return gen
89 return gen
90
90
91 def _runwsgi(self, req, repo):
91 def _runwsgi(self, req, repo):
92 cmd = req.form.get('cmd', [''])[0]
92 cmd = req.form.get('cmd', [''])[0]
93 if not mercurial.hgweb.protocol.iscmd(cmd):
93 if not mercurial.hgweb.protocol.iscmd(cmd):
94 req.respond(
94 req.respond(
95 mercurial.hgweb.common.ErrorResponse(
95 mercurial.hgweb.common.ErrorResponse(
96 mercurial.hgweb.common.HTTP_BAD_REQUEST),
96 mercurial.hgweb.common.HTTP_BAD_REQUEST),
97 mercurial.hgweb.protocol.HGTYPE
97 mercurial.hgweb.protocol.HGTYPE
98 )
98 )
99 return ['']
99 return ['']
100
100
101 return super(HgWeb, self)._runwsgi(req, repo)
101 return super(HgWeb, self)._runwsgi(req, repo)
102
102
103
103
104 def make_hg_ui_from_config(repo_config):
104 def make_hg_ui_from_config(repo_config):
105 baseui = mercurial.ui.ui()
105 baseui = mercurial.ui.ui()
106
106
107 # clean the baseui object
107 # clean the baseui object
108 baseui._ocfg = mercurial.config.config()
108 baseui._ocfg = mercurial.config.config()
109 baseui._ucfg = mercurial.config.config()
109 baseui._ucfg = mercurial.config.config()
110 baseui._tcfg = mercurial.config.config()
110 baseui._tcfg = mercurial.config.config()
111
111
112 for section, option, value in repo_config:
112 for section, option, value in repo_config:
113 baseui.setconfig(section, option, value)
113 baseui.setconfig(section, option, value)
114
114
115 # make our hgweb quiet so it doesn't print output
115 # make our hgweb quiet so it doesn't print output
116 baseui.setconfig('ui', 'quiet', 'true')
116 baseui.setconfig('ui', 'quiet', 'true')
117
117
118 return baseui
118 return baseui
119
119
120
120
121 def update_hg_ui_from_hgrc(baseui, repo_path):
121 def update_hg_ui_from_hgrc(baseui, repo_path):
122 path = os.path.join(repo_path, '.hg', 'hgrc')
122 path = os.path.join(repo_path, '.hg', 'hgrc')
123
123
124 if not os.path.isfile(path):
124 if not os.path.isfile(path):
125 log.debug('hgrc file is not present at %s, skipping...', path)
125 log.debug('hgrc file is not present at %s, skipping...', path)
126 return
126 return
127 log.debug('reading hgrc from %s', path)
127 log.debug('reading hgrc from %s', path)
128 cfg = mercurial.config.config()
128 cfg = mercurial.config.config()
129 cfg.read(path)
129 cfg.read(path)
130 for section in HG_UI_SECTIONS:
130 for section in HG_UI_SECTIONS:
131 for k, v in cfg.items(section):
131 for k, v in cfg.items(section):
132 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
132 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
133 baseui.setconfig(section, k, v)
133 baseui.setconfig(section, k, v)
134
134
135
135
136 def create_hg_wsgi_app(repo_path, repo_name, config):
136 def create_hg_wsgi_app(repo_path, repo_name, config):
137 """
137 """
138 Prepares a WSGI application to handle Mercurial requests.
138 Prepares a WSGI application to handle Mercurial requests.
139
139
140 :param config: is a list of 3-item tuples representing a ConfigObject
140 :param config: is a list of 3-item tuples representing a ConfigObject
141 (it is the serialized version of the config object).
141 (it is the serialized version of the config object).
142 """
142 """
143 log.debug("Creating Mercurial WSGI application")
143 log.debug("Creating Mercurial WSGI application")
144
144
145 baseui = make_hg_ui_from_config(config)
145 baseui = make_hg_ui_from_config(config)
146 update_hg_ui_from_hgrc(baseui, repo_path)
146 update_hg_ui_from_hgrc(baseui, repo_path)
147
147
148 try:
148 try:
149 return HgWeb(repo_path, name=repo_name, baseui=baseui)
149 return HgWeb(repo_path, name=repo_name, baseui=baseui)
150 except mercurial.error.RequirementError as exc:
150 except mercurial.error.RequirementError as exc:
151 raise exceptions.RequirementException(exc)
151 raise exceptions.RequirementException(exc)
152
152
153
153
154 class GitHandler(object):
154 class GitHandler(object):
155 """
155 """
156 Handler for Git operations like push/pull etc
156 Handler for Git operations like push/pull etc
157 """
157 """
158 def __init__(self, repo_location, repo_name, git_path, update_server_info,
158 def __init__(self, repo_location, repo_name, git_path, update_server_info,
159 extras):
159 extras):
160 if not os.path.isdir(repo_location):
160 if not os.path.isdir(repo_location):
161 raise OSError(repo_location)
161 raise OSError(repo_location)
162 self.content_path = repo_location
162 self.content_path = repo_location
163 self.repo_name = repo_name
163 self.repo_name = repo_name
164 self.repo_location = repo_location
164 self.repo_location = repo_location
165 self.extras = extras
165 self.extras = extras
166 self.git_path = git_path
166 self.git_path = git_path
167 self.update_server_info = update_server_info
167 self.update_server_info = update_server_info
168
168
169 def __call__(self, environ, start_response):
169 def __call__(self, environ, start_response):
170 app = webob.exc.HTTPNotFound()
170 app = webob.exc.HTTPNotFound()
171 candidate_paths = (
171 candidate_paths = (
172 self.content_path, os.path.join(self.content_path, '.git'))
172 self.content_path, os.path.join(self.content_path, '.git'))
173
173
174 for content_path in candidate_paths:
174 for content_path in candidate_paths:
175 try:
175 try:
176 app = pygrack.GitRepository(
176 app = pygrack.GitRepository(
177 self.repo_name, content_path, self.git_path,
177 self.repo_name, content_path, self.git_path,
178 self.update_server_info, self.extras)
178 self.update_server_info, self.extras)
179 break
179 break
180 except OSError:
180 except OSError:
181 continue
181 continue
182
182
183 return app(environ, start_response)
183 return app(environ, start_response)
184
184
185
185
186 def create_git_wsgi_app(repo_path, repo_name, config):
186 def create_git_wsgi_app(repo_path, repo_name, config):
187 """
187 """
188 Creates a WSGI application to handle Git requests.
188 Creates a WSGI application to handle Git requests.
189
189
190 :param config: is a dictionary holding the extras.
190 :param config: is a dictionary holding the extras.
191 """
191 """
192 git_path = settings.GIT_EXECUTABLE
192 git_path = settings.GIT_EXECUTABLE
193 update_server_info = config.pop('git_update_server_info')
193 update_server_info = config.pop('git_update_server_info')
194 app = GitHandler(
194 app = GitHandler(
195 repo_path, repo_name, git_path, update_server_info, config)
195 repo_path, repo_name, git_path, update_server_info, config)
196
196
197 return app
197 return app
198
198
199
199
200 class GitLFSHandler(object):
200 class GitLFSHandler(object):
201 """
201 """
202 Handler for Git LFS operations
202 Handler for Git LFS operations
203 """
203 """
204
204
205 def __init__(self, repo_location, repo_name, git_path, update_server_info,
205 def __init__(self, repo_location, repo_name, git_path, update_server_info,
206 extras):
206 extras):
207 if not os.path.isdir(repo_location):
207 if not os.path.isdir(repo_location):
208 raise OSError(repo_location)
208 raise OSError(repo_location)
209 self.content_path = repo_location
209 self.content_path = repo_location
210 self.repo_name = repo_name
210 self.repo_name = repo_name
211 self.repo_location = repo_location
211 self.repo_location = repo_location
212 self.extras = extras
212 self.extras = extras
213 self.git_path = git_path
213 self.git_path = git_path
214 self.update_server_info = update_server_info
214 self.update_server_info = update_server_info
215
215
216 def get_app(self, git_lfs_enabled, git_lfs_store_path):
216 def get_app(self, git_lfs_enabled, git_lfs_store_path):
217 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
217 app = git_lfs.create_app(git_lfs_enabled, git_lfs_store_path)
218 return app
218 return app
219
219
220
220
221 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
221 def create_git_lfs_wsgi_app(repo_path, repo_name, config):
222 git_path = settings.GIT_EXECUTABLE
222 git_path = settings.GIT_EXECUTABLE
223 update_server_info = config.pop('git_update_server_info')
223 update_server_info = config.pop('git_update_server_info')
224 git_lfs_enabled = config.pop('git_lfs_enabled')
224 git_lfs_enabled = config.pop('git_lfs_enabled')
225 git_lfs_store_path = config.pop('git_lfs_store_path')
225 git_lfs_store_path = config.pop('git_lfs_store_path')
226 app = GitLFSHandler(
226 app = GitLFSHandler(
227 repo_path, repo_name, git_path, update_server_info, config)
227 repo_path, repo_name, git_path, update_server_info, config)
228
228
229 return app.get_app(git_lfs_enabled, git_lfs_store_path)
229 return app.get_app(git_lfs_enabled, git_lfs_store_path)
@@ -1,78 +1,78 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import gc
18 import gc
19 import logging
19 import logging
20 import os
20 import os
21 import time
21 import time
22
22
23
23
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 class VcsServer(object):
27 class VcsServer(object):
28 """
28 """
29 Exposed remote interface of the vcsserver itself.
29 Exposed remote interface of the vcsserver itself.
30
30
31 This object can be used to manage the server remotely. Right now the main
31 This object can be used to manage the server remotely. Right now the main
32 use case is to allow to shut down the server.
32 use case is to allow to shut down the server.
33 """
33 """
34
34
35 _shutdown = False
35 _shutdown = False
36
36
37 def shutdown(self):
37 def shutdown(self):
38 self._shutdown = True
38 self._shutdown = True
39
39
40 def ping(self):
40 def ping(self):
41 """
41 """
42 Utility to probe a server connection.
42 Utility to probe a server connection.
43 """
43 """
44 log.debug("Received server ping.")
44 log.debug("Received server ping.")
45
45
46 def echo(self, data):
46 def echo(self, data):
47 """
47 """
48 Utility for performance testing.
48 Utility for performance testing.
49
49
50 Allows to pass in arbitrary data and will return this data.
50 Allows to pass in arbitrary data and will return this data.
51 """
51 """
52 log.debug("Received server echo.")
52 log.debug("Received server echo.")
53 return data
53 return data
54
54
55 def sleep(self, seconds):
55 def sleep(self, seconds):
56 """
56 """
57 Utility to simulate long running server interaction.
57 Utility to simulate long running server interaction.
58 """
58 """
59 log.debug("Sleeping %s seconds", seconds)
59 log.debug("Sleeping %s seconds", seconds)
60 time.sleep(seconds)
60 time.sleep(seconds)
61
61
62 def get_pid(self):
62 def get_pid(self):
63 """
63 """
64 Allows to discover the PID based on a proxy object.
64 Allows to discover the PID based on a proxy object.
65 """
65 """
66 return os.getpid()
66 return os.getpid()
67
67
68 def run_gc(self):
68 def run_gc(self):
69 """
69 """
70 Allows to trigger the garbage collector.
70 Allows to trigger the garbage collector.
71
71
72 Main intention is to support statistics gathering during test runs.
72 Main intention is to support statistics gathering during test runs.
73 """
73 """
74 freed_objects = gc.collect()
74 freed_objects = gc.collect()
75 return {
75 return {
76 'freed_objects': freed_objects,
76 'freed_objects': freed_objects,
77 'garbage': len(gc.garbage),
77 'garbage': len(gc.garbage),
78 }
78 }
@@ -1,19 +1,19 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 WIRE_ENCODING = 'UTF-8'
18 WIRE_ENCODING = 'UTF-8'
19 GIT_EXECUTABLE = 'git'
19 GIT_EXECUTABLE = 'git'
@@ -1,476 +1,481 b''
1 """
1 """
2 Module provides a class allowing to wrap communication over subprocess.Popen
2 Module provides a class allowing to wrap communication over subprocess.Popen
3 input, output, error streams into a meaningfull, non-blocking, concurrent
3 input, output, error streams into a meaningfull, non-blocking, concurrent
4 stream processor exposing the output data as an iterator fitting to be a
4 stream processor exposing the output data as an iterator fitting to be a
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
5 return value passed by a WSGI applicaiton to a WSGI server per PEP 3333.
6
6
7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
7 Copyright (c) 2011 Daniel Dotsenko <dotsa[at]hotmail.com>
8
8
9 This file is part of git_http_backend.py Project.
9 This file is part of git_http_backend.py Project.
10
10
11 git_http_backend.py Project is free software: you can redistribute it and/or
11 git_http_backend.py Project is free software: you can redistribute it and/or
12 modify it under the terms of the GNU Lesser General Public License as
12 modify it under the terms of the GNU Lesser General Public License as
13 published by the Free Software Foundation, either version 2.1 of the License,
13 published by the Free Software Foundation, either version 2.1 of the License,
14 or (at your option) any later version.
14 or (at your option) any later version.
15
15
16 git_http_backend.py Project is distributed in the hope that it will be useful,
16 git_http_backend.py Project is distributed in the hope that it will be useful,
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 but WITHOUT ANY WARRANTY; without even the implied warranty of
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 GNU Lesser General Public License for more details.
19 GNU Lesser General Public License for more details.
20
20
21 You should have received a copy of the GNU Lesser General Public License
21 You should have received a copy of the GNU Lesser General Public License
22 along with git_http_backend.py Project.
22 along with git_http_backend.py Project.
23 If not, see <http://www.gnu.org/licenses/>.
23 If not, see <http://www.gnu.org/licenses/>.
24 """
24 """
25 import os
25 import os
26 import subprocess32 as subprocess
26 import subprocess32 as subprocess
27 from collections import deque
27 from collections import deque
28 from threading import Event, Thread
28 from threading import Event, Thread
29
29
30
30
31 class StreamFeeder(Thread):
31 class StreamFeeder(Thread):
32 """
32 """
33 Normal writing into pipe-like is blocking once the buffer is filled.
33 Normal writing into pipe-like is blocking once the buffer is filled.
34 This thread allows a thread to seep data from a file-like into a pipe
34 This thread allows a thread to seep data from a file-like into a pipe
35 without blocking the main thread.
35 without blocking the main thread.
36 We close inpipe once the end of the source stream is reached.
36 We close inpipe once the end of the source stream is reached.
37 """
37 """
38
38
39 def __init__(self, source):
39 def __init__(self, source):
40 super(StreamFeeder, self).__init__()
40 super(StreamFeeder, self).__init__()
41 self.daemon = True
41 self.daemon = True
42 filelike = False
42 filelike = False
43 self.bytes = bytes()
43 self.bytes = bytes()
44 if type(source) in (type(''), bytes, bytearray): # string-like
44 if type(source) in (type(''), bytes, bytearray): # string-like
45 self.bytes = bytes(source)
45 self.bytes = bytes(source)
46 else: # can be either file pointer or file-like
46 else: # can be either file pointer or file-like
47 if type(source) in (int, long): # file pointer it is
47 if type(source) in (int, long): # file pointer it is
48 ## converting file descriptor (int) stdin into file-like
48 # converting file descriptor (int) stdin into file-like
49 try:
49 try:
50 source = os.fdopen(source, 'rb', 16384)
50 source = os.fdopen(source, 'rb', 16384)
51 except Exception:
51 except Exception:
52 pass
52 pass
53 # let's see if source is file-like by now
53 # let's see if source is file-like by now
54 try:
54 try:
55 filelike = source.read
55 filelike = source.read
56 except Exception:
56 except Exception:
57 pass
57 pass
58 if not filelike and not self.bytes:
58 if not filelike and not self.bytes:
59 raise TypeError("StreamFeeder's source object must be a readable "
59 raise TypeError("StreamFeeder's source object must be a readable "
60 "file-like, a file descriptor, or a string-like.")
60 "file-like, a file descriptor, or a string-like.")
61 self.source = source
61 self.source = source
62 self.readiface, self.writeiface = os.pipe()
62 self.readiface, self.writeiface = os.pipe()
63
63
64 def run(self):
64 def run(self):
65 t = self.writeiface
65 t = self.writeiface
66 if self.bytes:
66 if self.bytes:
67 os.write(t, self.bytes)
67 os.write(t, self.bytes)
68 else:
68 else:
69 s = self.source
69 s = self.source
70 b = s.read(4096)
70 b = s.read(4096)
71 while b:
71 while b:
72 os.write(t, b)
72 os.write(t, b)
73 b = s.read(4096)
73 b = s.read(4096)
74 os.close(t)
74 os.close(t)
75
75
76 @property
76 @property
77 def output(self):
77 def output(self):
78 return self.readiface
78 return self.readiface
79
79
80
80
81 class InputStreamChunker(Thread):
81 class InputStreamChunker(Thread):
82 def __init__(self, source, target, buffer_size, chunk_size):
82 def __init__(self, source, target, buffer_size, chunk_size):
83
83
84 super(InputStreamChunker, self).__init__()
84 super(InputStreamChunker, self).__init__()
85
85
86 self.daemon = True # die die die.
86 self.daemon = True # die die die.
87
87
88 self.source = source
88 self.source = source
89 self.target = target
89 self.target = target
90 self.chunk_count_max = int(buffer_size / chunk_size) + 1
90 self.chunk_count_max = int(buffer_size / chunk_size) + 1
91 self.chunk_size = chunk_size
91 self.chunk_size = chunk_size
92
92
93 self.data_added = Event()
93 self.data_added = Event()
94 self.data_added.clear()
94 self.data_added.clear()
95
95
96 self.keep_reading = Event()
96 self.keep_reading = Event()
97 self.keep_reading.set()
97 self.keep_reading.set()
98
98
99 self.EOF = Event()
99 self.EOF = Event()
100 self.EOF.clear()
100 self.EOF.clear()
101
101
102 self.go = Event()
102 self.go = Event()
103 self.go.set()
103 self.go.set()
104
104
105 def stop(self):
105 def stop(self):
106 self.go.clear()
106 self.go.clear()
107 self.EOF.set()
107 self.EOF.set()
108 try:
108 try:
109 # this is not proper, but is done to force the reader thread let
109 # this is not proper, but is done to force the reader thread let
110 # go of the input because, if successful, .close() will send EOF
110 # go of the input because, if successful, .close() will send EOF
111 # down the pipe.
111 # down the pipe.
112 self.source.close()
112 self.source.close()
113 except:
113 except:
114 pass
114 pass
115
115
116 def run(self):
116 def run(self):
117 s = self.source
117 s = self.source
118 t = self.target
118 t = self.target
119 cs = self.chunk_size
119 cs = self.chunk_size
120 ccm = self.chunk_count_max
120 ccm = self.chunk_count_max
121 kr = self.keep_reading
121 keep_reading = self.keep_reading
122 da = self.data_added
122 da = self.data_added
123 go = self.go
123 go = self.go
124
124
125 try:
125 try:
126 b = s.read(cs)
126 b = s.read(cs)
127 except ValueError:
127 except ValueError:
128 b = ''
128 b = ''
129
129
130 while b and go.is_set():
130 while b and go.is_set():
131 if len(t) > ccm:
131 if len(t) > ccm:
132 kr.clear()
132 keep_reading.clear()
133 kr.wait(2)
133 keep_reading.wait(2)
134 # # this only works on 2.7.x and up
134
135 # if not kr.wait(10):
135 if not keep_reading.wait(10):
136 # raise Exception("Timed out while waiting for input to be read.")
136 raise Exception(
137 # instead we'll use this
137 "Timed out while waiting for input to be read.")
138 if len(t) > ccm + 3:
138
139 raise IOError(
140 "Timed out while waiting for input from subprocess.")
141 t.append(b)
139 t.append(b)
142 da.set()
140 da.set()
143 b = s.read(cs)
141 b = s.read(cs)
144 self.EOF.set()
142 self.EOF.set()
145 da.set() # for cases when done but there was no input.
143 da.set() # for cases when done but there was no input.
146
144
147
145
148 class BufferedGenerator(object):
146 class BufferedGenerator(object):
149 """
147 """
150 Class behaves as a non-blocking, buffered pipe reader.
148 Class behaves as a non-blocking, buffered pipe reader.
151 Reads chunks of data (through a thread)
149 Reads chunks of data (through a thread)
152 from a blocking pipe, and attaches these to an array (Deque) of chunks.
150 from a blocking pipe, and attaches these to an array (Deque) of chunks.
153 Reading is halted in the thread when max chunks is internally buffered.
151 Reading is halted in the thread when max chunks is internally buffered.
154 The .next() may operate in blocking or non-blocking fashion by yielding
152 The .next() may operate in blocking or non-blocking fashion by yielding
155 '' if no data is ready
153 '' if no data is ready
156 to be sent or by not returning until there is some data to send
154 to be sent or by not returning until there is some data to send
157 When we get EOF from underlying source pipe we raise the marker to raise
155 When we get EOF from underlying source pipe we raise the marker to raise
158 StopIteration after the last chunk of data is yielded.
156 StopIteration after the last chunk of data is yielded.
159 """
157 """
160
158
161 def __init__(self, source, buffer_size=65536, chunk_size=4096,
159 def __init__(self, source, buffer_size=65536, chunk_size=4096,
162 starting_values=[], bottomless=False):
160 starting_values=None, bottomless=False):
161 starting_values = starting_values or []
163
162
164 if bottomless:
163 if bottomless:
165 maxlen = int(buffer_size / chunk_size)
164 maxlen = int(buffer_size / chunk_size)
166 else:
165 else:
167 maxlen = None
166 maxlen = None
168
167
169 self.data = deque(starting_values, maxlen)
168 self.data = deque(starting_values, maxlen)
170 self.worker = InputStreamChunker(source, self.data, buffer_size,
169 self.worker = InputStreamChunker(source, self.data, buffer_size,
171 chunk_size)
170 chunk_size)
172 if starting_values:
171 if starting_values:
173 self.worker.data_added.set()
172 self.worker.data_added.set()
174 self.worker.start()
173 self.worker.start()
175
174
176 ####################
175 ####################
177 # Generator's methods
176 # Generator's methods
178 ####################
177 ####################
179
178
180 def __iter__(self):
179 def __iter__(self):
181 return self
180 return self
182
181
183 def next(self):
182 def next(self):
184 while not len(self.data) and not self.worker.EOF.is_set():
183 while not len(self.data) and not self.worker.EOF.is_set():
185 self.worker.data_added.clear()
184 self.worker.data_added.clear()
186 self.worker.data_added.wait(0.2)
185 self.worker.data_added.wait(0.2)
187 if len(self.data):
186 if len(self.data):
188 self.worker.keep_reading.set()
187 self.worker.keep_reading.set()
189 return bytes(self.data.popleft())
188 return bytes(self.data.popleft())
190 elif self.worker.EOF.is_set():
189 elif self.worker.EOF.is_set():
191 raise StopIteration
190 raise StopIteration
192
191
193 def throw(self, type, value=None, traceback=None):
192 def throw(self, exc_type, value=None, traceback=None):
194 if not self.worker.EOF.is_set():
193 if not self.worker.EOF.is_set():
195 raise type(value)
194 raise exc_type(value)
196
195
197 def start(self):
196 def start(self):
198 self.worker.start()
197 self.worker.start()
199
198
200 def stop(self):
199 def stop(self):
201 self.worker.stop()
200 self.worker.stop()
202
201
203 def close(self):
202 def close(self):
204 try:
203 try:
205 self.worker.stop()
204 self.worker.stop()
206 self.throw(GeneratorExit)
205 self.throw(GeneratorExit)
207 except (GeneratorExit, StopIteration):
206 except (GeneratorExit, StopIteration):
208 pass
207 pass
209
208
210 def __del__(self):
209 def __del__(self):
211 self.close()
210 self.close()
212
211
213 ####################
212 ####################
214 # Threaded reader's infrastructure.
213 # Threaded reader's infrastructure.
215 ####################
214 ####################
216 @property
215 @property
217 def input(self):
216 def input(self):
218 return self.worker.w
217 return self.worker.w
219
218
220 @property
219 @property
221 def data_added_event(self):
220 def data_added_event(self):
222 return self.worker.data_added
221 return self.worker.data_added
223
222
224 @property
223 @property
225 def data_added(self):
224 def data_added(self):
226 return self.worker.data_added.is_set()
225 return self.worker.data_added.is_set()
227
226
228 @property
227 @property
229 def reading_paused(self):
228 def reading_paused(self):
230 return not self.worker.keep_reading.is_set()
229 return not self.worker.keep_reading.is_set()
231
230
232 @property
231 @property
233 def done_reading_event(self):
232 def done_reading_event(self):
234 """
233 """
235 Done_reding does not mean that the iterator's buffer is empty.
234 Done_reding does not mean that the iterator's buffer is empty.
236 Iterator might have done reading from underlying source, but the read
235 Iterator might have done reading from underlying source, but the read
237 chunks might still be available for serving through .next() method.
236 chunks might still be available for serving through .next() method.
238
237
239 :returns: An Event class instance.
238 :returns: An Event class instance.
240 """
239 """
241 return self.worker.EOF
240 return self.worker.EOF
242
241
243 @property
242 @property
244 def done_reading(self):
243 def done_reading(self):
245 """
244 """
246 Done_reding does not mean that the iterator's buffer is empty.
245 Done_reding does not mean that the iterator's buffer is empty.
247 Iterator might have done reading from underlying source, but the read
246 Iterator might have done reading from underlying source, but the read
248 chunks might still be available for serving through .next() method.
247 chunks might still be available for serving through .next() method.
249
248
250 :returns: An Bool value.
249 :returns: An Bool value.
251 """
250 """
252 return self.worker.EOF.is_set()
251 return self.worker.EOF.is_set()
253
252
254 @property
253 @property
255 def length(self):
254 def length(self):
256 """
255 """
257 returns int.
256 returns int.
258
257
259 This is the lenght of the que of chunks, not the length of
258 This is the lenght of the que of chunks, not the length of
260 the combined contents in those chunks.
259 the combined contents in those chunks.
261
260
262 __len__() cannot be meaningfully implemented because this
261 __len__() cannot be meaningfully implemented because this
263 reader is just flying throuh a bottomless pit content and
262 reader is just flying throuh a bottomless pit content and
264 can only know the lenght of what it already saw.
263 can only know the lenght of what it already saw.
265
264
266 If __len__() on WSGI server per PEP 3333 returns a value,
265 If __len__() on WSGI server per PEP 3333 returns a value,
267 the responce's length will be set to that. In order not to
266 the responce's length will be set to that. In order not to
268 confuse WSGI PEP3333 servers, we will not implement __len__
267 confuse WSGI PEP3333 servers, we will not implement __len__
269 at all.
268 at all.
270 """
269 """
271 return len(self.data)
270 return len(self.data)
272
271
273 def prepend(self, x):
272 def prepend(self, x):
274 self.data.appendleft(x)
273 self.data.appendleft(x)
275
274
276 def append(self, x):
275 def append(self, x):
277 self.data.append(x)
276 self.data.append(x)
278
277
279 def extend(self, o):
278 def extend(self, o):
280 self.data.extend(o)
279 self.data.extend(o)
281
280
282 def __getitem__(self, i):
281 def __getitem__(self, i):
283 return self.data[i]
282 return self.data[i]
284
283
285
284
286 class SubprocessIOChunker(object):
285 class SubprocessIOChunker(object):
287 """
286 """
288 Processor class wrapping handling of subprocess IO.
287 Processor class wrapping handling of subprocess IO.
289
288
290 .. important::
289 .. important::
291
290
292 Watch out for the method `__del__` on this class. If this object
291 Watch out for the method `__del__` on this class. If this object
293 is deleted, it will kill the subprocess, so avoid to
292 is deleted, it will kill the subprocess, so avoid to
294 return the `output` attribute or usage of it like in the following
293 return the `output` attribute or usage of it like in the following
295 example::
294 example::
296
295
297 # `args` expected to run a program that produces a lot of output
296 # `args` expected to run a program that produces a lot of output
298 output = ''.join(SubprocessIOChunker(
297 output = ''.join(SubprocessIOChunker(
299 args, shell=False, inputstream=inputstream, env=environ).output)
298 args, shell=False, inputstream=inputstream, env=environ).output)
300
299
301 # `output` will not contain all the data, because the __del__ method
300 # `output` will not contain all the data, because the __del__ method
302 # has already killed the subprocess in this case before all output
301 # has already killed the subprocess in this case before all output
303 # has been consumed.
302 # has been consumed.
304
303
305
304
306
305
307 In a way, this is a "communicate()" replacement with a twist.
306 In a way, this is a "communicate()" replacement with a twist.
308
307
309 - We are multithreaded. Writing in and reading out, err are all sep threads.
308 - We are multithreaded. Writing in and reading out, err are all sep threads.
310 - We support concurrent (in and out) stream processing.
309 - We support concurrent (in and out) stream processing.
311 - The output is not a stream. It's a queue of read string (bytes, not unicode)
310 - The output is not a stream. It's a queue of read string (bytes, not unicode)
312 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
311 chunks. The object behaves as an iterable. You can "for chunk in obj:" us.
313 - We are non-blocking in more respects than communicate()
312 - We are non-blocking in more respects than communicate()
314 (reading from subprocess out pauses when internal buffer is full, but
313 (reading from subprocess out pauses when internal buffer is full, but
315 does not block the parent calling code. On the flip side, reading from
314 does not block the parent calling code. On the flip side, reading from
316 slow-yielding subprocess may block the iteration until data shows up. This
315 slow-yielding subprocess may block the iteration until data shows up. This
317 does not block the parallel inpipe reading occurring parallel thread.)
316 does not block the parallel inpipe reading occurring parallel thread.)
318
317
319 The purpose of the object is to allow us to wrap subprocess interactions into
318 The purpose of the object is to allow us to wrap subprocess interactions into
320 and interable that can be passed to a WSGI server as the application's return
319 and interable that can be passed to a WSGI server as the application's return
321 value. Because of stream-processing-ability, WSGI does not have to read ALL
320 value. Because of stream-processing-ability, WSGI does not have to read ALL
322 of the subprocess's output and buffer it, before handing it to WSGI server for
321 of the subprocess's output and buffer it, before handing it to WSGI server for
323 HTTP response. Instead, the class initializer reads just a bit of the stream
322 HTTP response. Instead, the class initializer reads just a bit of the stream
324 to figure out if error ocurred or likely to occur and if not, just hands the
323 to figure out if error ocurred or likely to occur and if not, just hands the
325 further iteration over subprocess output to the server for completion of HTTP
324 further iteration over subprocess output to the server for completion of HTTP
326 response.
325 response.
327
326
328 The real or perceived subprocess error is trapped and raised as one of
327 The real or perceived subprocess error is trapped and raised as one of
329 EnvironmentError family of exceptions
328 EnvironmentError family of exceptions
330
329
331 Example usage:
330 Example usage:
332 # try:
331 # try:
333 # answer = SubprocessIOChunker(
332 # answer = SubprocessIOChunker(
334 # cmd,
333 # cmd,
335 # input,
334 # input,
336 # buffer_size = 65536,
335 # buffer_size = 65536,
337 # chunk_size = 4096
336 # chunk_size = 4096
338 # )
337 # )
339 # except (EnvironmentError) as e:
338 # except (EnvironmentError) as e:
340 # print str(e)
339 # print str(e)
341 # raise e
340 # raise e
342 #
341 #
343 # return answer
342 # return answer
344
343
345
344
346 """
345 """
347
346
348 # TODO: johbo: This is used to make sure that the open end of the PIPE
347 # TODO: johbo: This is used to make sure that the open end of the PIPE
349 # is closed in the end. It would be way better to wrap this into an
348 # is closed in the end. It would be way better to wrap this into an
350 # object, so that it is closed automatically once it is consumed or
349 # object, so that it is closed automatically once it is consumed or
351 # something similar.
350 # something similar.
352 _close_input_fd = None
351 _close_input_fd = None
353
352
354 _closed = False
353 _closed = False
355
354
356 def __init__(self, cmd, inputstream=None, buffer_size=65536,
355 def __init__(self, cmd, inputstream=None, buffer_size=65536,
357 chunk_size=4096, starting_values=[], fail_on_stderr=True,
356 chunk_size=4096, starting_values=None, fail_on_stderr=True,
358 fail_on_return_code=True, **kwargs):
357 fail_on_return_code=True, **kwargs):
359 """
358 """
360 Initializes SubprocessIOChunker
359 Initializes SubprocessIOChunker
361
360
362 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
361 :param cmd: A Subprocess.Popen style "cmd". Can be string or array of strings
363 :param inputstream: (Default: None) A file-like, string, or file pointer.
362 :param inputstream: (Default: None) A file-like, string, or file pointer.
364 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
363 :param buffer_size: (Default: 65536) A size of total buffer per stream in bytes.
365 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
364 :param chunk_size: (Default: 4096) A max size of a chunk. Actual chunk may be smaller.
366 :param starting_values: (Default: []) An array of strings to put in front of output que.
365 :param starting_values: (Default: []) An array of strings to put in front of output que.
367 :param fail_on_stderr: (Default: True) Whether to raise an exception in
366 :param fail_on_stderr: (Default: True) Whether to raise an exception in
368 case something is written to stderr.
367 case something is written to stderr.
369 :param fail_on_return_code: (Default: True) Whether to raise an
368 :param fail_on_return_code: (Default: True) Whether to raise an
370 exception if the return code is not 0.
369 exception if the return code is not 0.
371 """
370 """
372
371
372 starting_values = starting_values or []
373 if inputstream:
373 if inputstream:
374 input_streamer = StreamFeeder(inputstream)
374 input_streamer = StreamFeeder(inputstream)
375 input_streamer.start()
375 input_streamer.start()
376 inputstream = input_streamer.output
376 inputstream = input_streamer.output
377 self._close_input_fd = inputstream
377 self._close_input_fd = inputstream
378
378
379 self._fail_on_stderr = fail_on_stderr
379 self._fail_on_stderr = fail_on_stderr
380 self._fail_on_return_code = fail_on_return_code
380 self._fail_on_return_code = fail_on_return_code
381
381
382 _shell = kwargs.get('shell', True)
382 _shell = kwargs.get('shell', True)
383 kwargs['shell'] = _shell
383 kwargs['shell'] = _shell
384
384
385 _p = subprocess.Popen(cmd, bufsize=-1,
385 _p = subprocess.Popen(cmd, bufsize=-1,
386 stdin=inputstream,
386 stdin=inputstream,
387 stdout=subprocess.PIPE,
387 stdout=subprocess.PIPE,
388 stderr=subprocess.PIPE,
388 stderr=subprocess.PIPE,
389 **kwargs)
389 **kwargs)
390
390
391 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
391 bg_out = BufferedGenerator(_p.stdout, buffer_size, chunk_size,
392 starting_values)
392 starting_values)
393 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
393 bg_err = BufferedGenerator(_p.stderr, 16000, 1, bottomless=True)
394
394
395 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
395 while not bg_out.done_reading and not bg_out.reading_paused and not bg_err.length:
396 # doing this until we reach either end of file, or end of buffer.
396 # doing this until we reach either end of file, or end of buffer.
397 bg_out.data_added_event.wait(1)
397 bg_out.data_added_event.wait(1)
398 bg_out.data_added_event.clear()
398 bg_out.data_added_event.clear()
399
399
400 # at this point it's still ambiguous if we are done reading or just full buffer.
400 # at this point it's still ambiguous if we are done reading or just full buffer.
401 # Either way, if error (returned by ended process, or implied based on
401 # Either way, if error (returned by ended process, or implied based on
402 # presence of stuff in stderr output) we error out.
402 # presence of stuff in stderr output) we error out.
403 # Else, we are happy.
403 # Else, we are happy.
404 _returncode = _p.poll()
404 _returncode = _p.poll()
405
405
406 if ((_returncode and fail_on_return_code) or
406 if ((_returncode and fail_on_return_code) or
407 (fail_on_stderr and _returncode is None and bg_err.length)):
407 (fail_on_stderr and _returncode is None and bg_err.length)):
408 try:
408 try:
409 _p.terminate()
409 _p.terminate()
410 except Exception:
410 except Exception:
411 pass
411 pass
412 bg_out.stop()
412 bg_out.stop()
413 bg_err.stop()
413 bg_err.stop()
414 if fail_on_stderr:
414 if fail_on_stderr:
415 err = ''.join(bg_err)
415 err = ''.join(bg_err)
416 raise EnvironmentError(
416 raise EnvironmentError(
417 "Subprocess exited due to an error:\n" + err)
417 "Subprocess exited due to an error:\n" + err)
418 if _returncode and fail_on_return_code:
418 if _returncode and fail_on_return_code:
419 err = ''.join(bg_err)
419 err = ''.join(bg_err)
420 if not err:
421 # maybe get empty stderr, try stdout instead
422 # in many cases git reports the errors on stdout too
423 err = ''.join(bg_out)
420 raise EnvironmentError(
424 raise EnvironmentError(
421 "Subprocess exited with non 0 ret code:%s: stderr:%s" % (
425 "Subprocess exited with non 0 ret code:%s: stderr:%s" % (
422 _returncode, err))
426 _returncode, err))
423
427
424 self.process = _p
428 self.process = _p
425 self.output = bg_out
429 self.output = bg_out
426 self.error = bg_err
430 self.error = bg_err
427
431
428 def __iter__(self):
432 def __iter__(self):
429 return self
433 return self
430
434
431 def next(self):
435 def next(self):
432 # Note: mikhail: We need to be sure that we are checking the return
436 # Note: mikhail: We need to be sure that we are checking the return
433 # code after the stdout stream is closed. Some processes, e.g. git
437 # code after the stdout stream is closed. Some processes, e.g. git
434 # are doing some magic in between closing stdout and terminating the
438 # are doing some magic in between closing stdout and terminating the
435 # process and, as a result, we are not getting return code on "slow"
439 # process and, as a result, we are not getting return code on "slow"
436 # systems.
440 # systems.
441 result = None
437 stop_iteration = None
442 stop_iteration = None
438 try:
443 try:
439 result = self.output.next()
444 result = self.output.next()
440 except StopIteration as e:
445 except StopIteration as e:
441 stop_iteration = e
446 stop_iteration = e
442
447
443 if self.process.poll() and self._fail_on_return_code:
448 if self.process.poll() and self._fail_on_return_code:
444 err = '%s' % ''.join(self.error)
449 err = '%s' % ''.join(self.error)
445 raise EnvironmentError(
450 raise EnvironmentError(
446 "Subprocess exited due to an error:\n" + err)
451 "Subprocess exited due to an error:\n" + err)
447
452
448 if stop_iteration:
453 if stop_iteration:
449 raise stop_iteration
454 raise stop_iteration
450 return result
455 return result
451
456
452 def throw(self, type, value=None, traceback=None):
457 def throw(self, type, value=None, traceback=None):
453 if self.output.length or not self.output.done_reading:
458 if self.output.length or not self.output.done_reading:
454 raise type(value)
459 raise type(value)
455
460
456 def close(self):
461 def close(self):
457 if self._closed:
462 if self._closed:
458 return
463 return
459 self._closed = True
464 self._closed = True
460 try:
465 try:
461 self.process.terminate()
466 self.process.terminate()
462 except:
467 except:
463 pass
468 pass
464 if self._close_input_fd:
469 if self._close_input_fd:
465 os.close(self._close_input_fd)
470 os.close(self._close_input_fd)
466 try:
471 try:
467 self.output.close()
472 self.output.close()
468 except:
473 except:
469 pass
474 pass
470 try:
475 try:
471 self.error.close()
476 self.error.close()
472 except:
477 except:
473 pass
478 pass
474
479
475 def __del__(self):
480 def __del__(self):
476 self.close()
481 self.close()
@@ -1,679 +1,679 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 import os
20 import os
21 from urllib2 import URLError
21 from urllib2 import URLError
22 import logging
22 import logging
23 import posixpath as vcspath
23 import posixpath as vcspath
24 import StringIO
24 import StringIO
25 import subprocess
25 import subprocess
26 import urllib
26 import urllib
27 import traceback
27 import traceback
28
28
29 import svn.client
29 import svn.client
30 import svn.core
30 import svn.core
31 import svn.delta
31 import svn.delta
32 import svn.diff
32 import svn.diff
33 import svn.fs
33 import svn.fs
34 import svn.repos
34 import svn.repos
35
35
36 from vcsserver import svn_diff
36 from vcsserver import svn_diff
37 from vcsserver import exceptions
37 from vcsserver import exceptions
38 from vcsserver.base import RepoFactory, raise_from_original
38 from vcsserver.base import RepoFactory, raise_from_original
39
39
40
40
41 log = logging.getLogger(__name__)
41 log = logging.getLogger(__name__)
42
42
43
43
44 # Set of svn compatible version flags.
44 # Set of svn compatible version flags.
45 # Compare with subversion/svnadmin/svnadmin.c
45 # Compare with subversion/svnadmin/svnadmin.c
46 svn_compatible_versions = set([
46 svn_compatible_versions = set([
47 'pre-1.4-compatible',
47 'pre-1.4-compatible',
48 'pre-1.5-compatible',
48 'pre-1.5-compatible',
49 'pre-1.6-compatible',
49 'pre-1.6-compatible',
50 'pre-1.8-compatible',
50 'pre-1.8-compatible',
51 'pre-1.9-compatible',
51 'pre-1.9-compatible',
52 ])
52 ])
53
53
54 svn_compatible_versions_map = {
54 svn_compatible_versions_map = {
55 'pre-1.4-compatible': '1.3',
55 'pre-1.4-compatible': '1.3',
56 'pre-1.5-compatible': '1.4',
56 'pre-1.5-compatible': '1.4',
57 'pre-1.6-compatible': '1.5',
57 'pre-1.6-compatible': '1.5',
58 'pre-1.8-compatible': '1.7',
58 'pre-1.8-compatible': '1.7',
59 'pre-1.9-compatible': '1.8',
59 'pre-1.9-compatible': '1.8',
60 }
60 }
61
61
62
62
63 def reraise_safe_exceptions(func):
63 def reraise_safe_exceptions(func):
64 """Decorator for converting svn exceptions to something neutral."""
64 """Decorator for converting svn exceptions to something neutral."""
65 def wrapper(*args, **kwargs):
65 def wrapper(*args, **kwargs):
66 try:
66 try:
67 return func(*args, **kwargs)
67 return func(*args, **kwargs)
68 except Exception as e:
68 except Exception as e:
69 if not hasattr(e, '_vcs_kind'):
69 if not hasattr(e, '_vcs_kind'):
70 log.exception("Unhandled exception in hg remote call")
70 log.exception("Unhandled exception in hg remote call")
71 raise_from_original(exceptions.UnhandledException)
71 raise_from_original(exceptions.UnhandledException)
72 raise
72 raise
73 return wrapper
73 return wrapper
74
74
75
75
76 class SubversionFactory(RepoFactory):
76 class SubversionFactory(RepoFactory):
77
77
78 def _create_repo(self, wire, create, compatible_version):
78 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
79 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
80 if create:
81 fs_config = {'compatible-version': '1.9'}
81 fs_config = {'compatible-version': '1.9'}
82 if compatible_version:
82 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
83 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
84 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
85 .format(compatible_version))
86 fs_config['compatible-version'] = \
86 fs_config['compatible-version'] = \
87 svn_compatible_versions_map[compatible_version]
87 svn_compatible_versions_map[compatible_version]
88
88
89 log.debug('Create SVN repo with config "%s"', fs_config)
89 log.debug('Create SVN repo with config "%s"', fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
90 repo = svn.repos.create(path, "", "", None, fs_config)
91 else:
91 else:
92 repo = svn.repos.open(path)
92 repo = svn.repos.open(path)
93
93
94 log.debug('Got SVN object: %s', repo)
94 log.debug('Got SVN object: %s', repo)
95 return repo
95 return repo
96
96
97 def repo(self, wire, create=False, compatible_version=None):
97 def repo(self, wire, create=False, compatible_version=None):
98 def create_new_repo():
98 def create_new_repo():
99 return self._create_repo(wire, create, compatible_version)
99 return self._create_repo(wire, create, compatible_version)
100
100
101 return self._repo(wire, create_new_repo)
101 return self._repo(wire, create_new_repo)
102
102
103
103
104 NODE_TYPE_MAPPING = {
104 NODE_TYPE_MAPPING = {
105 svn.core.svn_node_file: 'file',
105 svn.core.svn_node_file: 'file',
106 svn.core.svn_node_dir: 'dir',
106 svn.core.svn_node_dir: 'dir',
107 }
107 }
108
108
109
109
110 class SvnRemote(object):
110 class SvnRemote(object):
111
111
112 def __init__(self, factory, hg_factory=None):
112 def __init__(self, factory, hg_factory=None):
113 self._factory = factory
113 self._factory = factory
114 # TODO: Remove once we do not use internal Mercurial objects anymore
114 # TODO: Remove once we do not use internal Mercurial objects anymore
115 # for subversion
115 # for subversion
116 self._hg_factory = hg_factory
116 self._hg_factory = hg_factory
117
117
118 @reraise_safe_exceptions
118 @reraise_safe_exceptions
119 def discover_svn_version(self):
119 def discover_svn_version(self):
120 try:
120 try:
121 import svn.core
121 import svn.core
122 svn_ver = svn.core.SVN_VERSION
122 svn_ver = svn.core.SVN_VERSION
123 except ImportError:
123 except ImportError:
124 svn_ver = None
124 svn_ver = None
125 return svn_ver
125 return svn_ver
126
126
127 def check_url(self, url, config_items):
127 def check_url(self, url, config_items):
128 # this can throw exception if not installed, but we detect this
128 # this can throw exception if not installed, but we detect this
129 from hgsubversion import svnrepo
129 from hgsubversion import svnrepo
130
130
131 baseui = self._hg_factory._create_config(config_items)
131 baseui = self._hg_factory._create_config(config_items)
132 # uuid function get's only valid UUID from proper repo, else
132 # uuid function get's only valid UUID from proper repo, else
133 # throws exception
133 # throws exception
134 try:
134 try:
135 svnrepo.svnremoterepo(baseui, url).svn.uuid
135 svnrepo.svnremoterepo(baseui, url).svn.uuid
136 except Exception:
136 except Exception:
137 tb = traceback.format_exc()
137 tb = traceback.format_exc()
138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
138 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
139 raise URLError(
139 raise URLError(
140 '"%s" is not a valid Subversion source url.' % (url, ))
140 '"%s" is not a valid Subversion source url.' % (url, ))
141 return True
141 return True
142
142
143 def is_path_valid_repository(self, wire, path):
143 def is_path_valid_repository(self, wire, path):
144
144
145 # NOTE(marcink): short circuit the check for SVN repo
145 # NOTE(marcink): short circuit the check for SVN repo
146 # the repos.open might be expensive to check, but we have one cheap
146 # the repos.open might be expensive to check, but we have one cheap
147 # pre condition that we can use, to check for 'format' file
147 # pre condition that we can use, to check for 'format' file
148
148
149 if not os.path.isfile(os.path.join(path, 'format')):
149 if not os.path.isfile(os.path.join(path, 'format')):
150 return False
150 return False
151
151
152 try:
152 try:
153 svn.repos.open(path)
153 svn.repos.open(path)
154 except svn.core.SubversionException:
154 except svn.core.SubversionException:
155 tb = traceback.format_exc()
155 tb = traceback.format_exc()
156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
156 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
157 return False
157 return False
158 return True
158 return True
159
159
160 @reraise_safe_exceptions
160 @reraise_safe_exceptions
161 def verify(self, wire,):
161 def verify(self, wire,):
162 repo_path = wire['path']
162 repo_path = wire['path']
163 if not self.is_path_valid_repository(wire, repo_path):
163 if not self.is_path_valid_repository(wire, repo_path):
164 raise Exception(
164 raise Exception(
165 "Path %s is not a valid Subversion repository." % repo_path)
165 "Path %s is not a valid Subversion repository." % repo_path)
166
166
167 load = subprocess.Popen(
167 load = subprocess.Popen(
168 ['svnadmin', 'info', repo_path],
168 ['svnadmin', 'info', repo_path],
169 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
169 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
170 return ''.join(load.stdout)
170 return ''.join(load.stdout)
171
171
172 def lookup(self, wire, revision):
172 def lookup(self, wire, revision):
173 if revision not in [-1, None, 'HEAD']:
173 if revision not in [-1, None, 'HEAD']:
174 raise NotImplementedError
174 raise NotImplementedError
175 repo = self._factory.repo(wire)
175 repo = self._factory.repo(wire)
176 fs_ptr = svn.repos.fs(repo)
176 fs_ptr = svn.repos.fs(repo)
177 head = svn.fs.youngest_rev(fs_ptr)
177 head = svn.fs.youngest_rev(fs_ptr)
178 return head
178 return head
179
179
180 def lookup_interval(self, wire, start_ts, end_ts):
180 def lookup_interval(self, wire, start_ts, end_ts):
181 repo = self._factory.repo(wire)
181 repo = self._factory.repo(wire)
182 fsobj = svn.repos.fs(repo)
182 fsobj = svn.repos.fs(repo)
183 start_rev = None
183 start_rev = None
184 end_rev = None
184 end_rev = None
185 if start_ts:
185 if start_ts:
186 start_ts_svn = apr_time_t(start_ts)
186 start_ts_svn = apr_time_t(start_ts)
187 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
187 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
188 else:
188 else:
189 start_rev = 1
189 start_rev = 1
190 if end_ts:
190 if end_ts:
191 end_ts_svn = apr_time_t(end_ts)
191 end_ts_svn = apr_time_t(end_ts)
192 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
192 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
193 else:
193 else:
194 end_rev = svn.fs.youngest_rev(fsobj)
194 end_rev = svn.fs.youngest_rev(fsobj)
195 return start_rev, end_rev
195 return start_rev, end_rev
196
196
197 def revision_properties(self, wire, revision):
197 def revision_properties(self, wire, revision):
198 repo = self._factory.repo(wire)
198 repo = self._factory.repo(wire)
199 fs_ptr = svn.repos.fs(repo)
199 fs_ptr = svn.repos.fs(repo)
200 return svn.fs.revision_proplist(fs_ptr, revision)
200 return svn.fs.revision_proplist(fs_ptr, revision)
201
201
202 def revision_changes(self, wire, revision):
202 def revision_changes(self, wire, revision):
203
203
204 repo = self._factory.repo(wire)
204 repo = self._factory.repo(wire)
205 fsobj = svn.repos.fs(repo)
205 fsobj = svn.repos.fs(repo)
206 rev_root = svn.fs.revision_root(fsobj, revision)
206 rev_root = svn.fs.revision_root(fsobj, revision)
207
207
208 editor = svn.repos.ChangeCollector(fsobj, rev_root)
208 editor = svn.repos.ChangeCollector(fsobj, rev_root)
209 editor_ptr, editor_baton = svn.delta.make_editor(editor)
209 editor_ptr, editor_baton = svn.delta.make_editor(editor)
210 base_dir = ""
210 base_dir = ""
211 send_deltas = False
211 send_deltas = False
212 svn.repos.replay2(
212 svn.repos.replay2(
213 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
213 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
214 editor_ptr, editor_baton, None)
214 editor_ptr, editor_baton, None)
215
215
216 added = []
216 added = []
217 changed = []
217 changed = []
218 removed = []
218 removed = []
219
219
220 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
220 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
221 for path, change in editor.changes.iteritems():
221 for path, change in editor.changes.iteritems():
222 # TODO: Decide what to do with directory nodes. Subversion can add
222 # TODO: Decide what to do with directory nodes. Subversion can add
223 # empty directories.
223 # empty directories.
224
224
225 if change.item_kind == svn.core.svn_node_dir:
225 if change.item_kind == svn.core.svn_node_dir:
226 continue
226 continue
227 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
227 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
228 added.append(path)
228 added.append(path)
229 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
229 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
230 svn.repos.CHANGE_ACTION_REPLACE]:
230 svn.repos.CHANGE_ACTION_REPLACE]:
231 changed.append(path)
231 changed.append(path)
232 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
232 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
233 removed.append(path)
233 removed.append(path)
234 else:
234 else:
235 raise NotImplementedError(
235 raise NotImplementedError(
236 "Action %s not supported on path %s" % (
236 "Action %s not supported on path %s" % (
237 change.action, path))
237 change.action, path))
238
238
239 changes = {
239 changes = {
240 'added': added,
240 'added': added,
241 'changed': changed,
241 'changed': changed,
242 'removed': removed,
242 'removed': removed,
243 }
243 }
244 return changes
244 return changes
245
245
246 def node_history(self, wire, path, revision, limit):
246 def node_history(self, wire, path, revision, limit):
247 cross_copies = False
247 cross_copies = False
248 repo = self._factory.repo(wire)
248 repo = self._factory.repo(wire)
249 fsobj = svn.repos.fs(repo)
249 fsobj = svn.repos.fs(repo)
250 rev_root = svn.fs.revision_root(fsobj, revision)
250 rev_root = svn.fs.revision_root(fsobj, revision)
251
251
252 history_revisions = []
252 history_revisions = []
253 history = svn.fs.node_history(rev_root, path)
253 history = svn.fs.node_history(rev_root, path)
254 history = svn.fs.history_prev(history, cross_copies)
254 history = svn.fs.history_prev(history, cross_copies)
255 while history:
255 while history:
256 __, node_revision = svn.fs.history_location(history)
256 __, node_revision = svn.fs.history_location(history)
257 history_revisions.append(node_revision)
257 history_revisions.append(node_revision)
258 if limit and len(history_revisions) >= limit:
258 if limit and len(history_revisions) >= limit:
259 break
259 break
260 history = svn.fs.history_prev(history, cross_copies)
260 history = svn.fs.history_prev(history, cross_copies)
261 return history_revisions
261 return history_revisions
262
262
263 def node_properties(self, wire, path, revision):
263 def node_properties(self, wire, path, revision):
264 repo = self._factory.repo(wire)
264 repo = self._factory.repo(wire)
265 fsobj = svn.repos.fs(repo)
265 fsobj = svn.repos.fs(repo)
266 rev_root = svn.fs.revision_root(fsobj, revision)
266 rev_root = svn.fs.revision_root(fsobj, revision)
267 return svn.fs.node_proplist(rev_root, path)
267 return svn.fs.node_proplist(rev_root, path)
268
268
269 def file_annotate(self, wire, path, revision):
269 def file_annotate(self, wire, path, revision):
270 abs_path = 'file://' + urllib.pathname2url(
270 abs_path = 'file://' + urllib.pathname2url(
271 vcspath.join(wire['path'], path))
271 vcspath.join(wire['path'], path))
272 file_uri = svn.core.svn_path_canonicalize(abs_path)
272 file_uri = svn.core.svn_path_canonicalize(abs_path)
273
273
274 start_rev = svn_opt_revision_value_t(0)
274 start_rev = svn_opt_revision_value_t(0)
275 peg_rev = svn_opt_revision_value_t(revision)
275 peg_rev = svn_opt_revision_value_t(revision)
276 end_rev = peg_rev
276 end_rev = peg_rev
277
277
278 annotations = []
278 annotations = []
279
279
280 def receiver(line_no, revision, author, date, line, pool):
280 def receiver(line_no, revision, author, date, line, pool):
281 annotations.append((line_no, revision, line))
281 annotations.append((line_no, revision, line))
282
282
283 # TODO: Cannot use blame5, missing typemap function in the swig code
283 # TODO: Cannot use blame5, missing typemap function in the swig code
284 try:
284 try:
285 svn.client.blame2(
285 svn.client.blame2(
286 file_uri, peg_rev, start_rev, end_rev,
286 file_uri, peg_rev, start_rev, end_rev,
287 receiver, svn.client.create_context())
287 receiver, svn.client.create_context())
288 except svn.core.SubversionException as exc:
288 except svn.core.SubversionException as exc:
289 log.exception("Error during blame operation.")
289 log.exception("Error during blame operation.")
290 raise Exception(
290 raise Exception(
291 "Blame not supported or file does not exist at path %s. "
291 "Blame not supported or file does not exist at path %s. "
292 "Error %s." % (path, exc))
292 "Error %s." % (path, exc))
293
293
294 return annotations
294 return annotations
295
295
296 def get_node_type(self, wire, path, rev=None):
296 def get_node_type(self, wire, path, rev=None):
297 repo = self._factory.repo(wire)
297 repo = self._factory.repo(wire)
298 fs_ptr = svn.repos.fs(repo)
298 fs_ptr = svn.repos.fs(repo)
299 if rev is None:
299 if rev is None:
300 rev = svn.fs.youngest_rev(fs_ptr)
300 rev = svn.fs.youngest_rev(fs_ptr)
301 root = svn.fs.revision_root(fs_ptr, rev)
301 root = svn.fs.revision_root(fs_ptr, rev)
302 node = svn.fs.check_path(root, path)
302 node = svn.fs.check_path(root, path)
303 return NODE_TYPE_MAPPING.get(node, None)
303 return NODE_TYPE_MAPPING.get(node, None)
304
304
305 def get_nodes(self, wire, path, revision=None):
305 def get_nodes(self, wire, path, revision=None):
306 repo = self._factory.repo(wire)
306 repo = self._factory.repo(wire)
307 fsobj = svn.repos.fs(repo)
307 fsobj = svn.repos.fs(repo)
308 if revision is None:
308 if revision is None:
309 revision = svn.fs.youngest_rev(fsobj)
309 revision = svn.fs.youngest_rev(fsobj)
310 root = svn.fs.revision_root(fsobj, revision)
310 root = svn.fs.revision_root(fsobj, revision)
311 entries = svn.fs.dir_entries(root, path)
311 entries = svn.fs.dir_entries(root, path)
312 result = []
312 result = []
313 for entry_path, entry_info in entries.iteritems():
313 for entry_path, entry_info in entries.iteritems():
314 result.append(
314 result.append(
315 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
315 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
316 return result
316 return result
317
317
318 def get_file_content(self, wire, path, rev=None):
318 def get_file_content(self, wire, path, rev=None):
319 repo = self._factory.repo(wire)
319 repo = self._factory.repo(wire)
320 fsobj = svn.repos.fs(repo)
320 fsobj = svn.repos.fs(repo)
321 if rev is None:
321 if rev is None:
322 rev = svn.fs.youngest_revision(fsobj)
322 rev = svn.fs.youngest_revision(fsobj)
323 root = svn.fs.revision_root(fsobj, rev)
323 root = svn.fs.revision_root(fsobj, rev)
324 content = svn.core.Stream(svn.fs.file_contents(root, path))
324 content = svn.core.Stream(svn.fs.file_contents(root, path))
325 return content.read()
325 return content.read()
326
326
327 def get_file_size(self, wire, path, revision=None):
327 def get_file_size(self, wire, path, revision=None):
328 repo = self._factory.repo(wire)
328 repo = self._factory.repo(wire)
329 fsobj = svn.repos.fs(repo)
329 fsobj = svn.repos.fs(repo)
330 if revision is None:
330 if revision is None:
331 revision = svn.fs.youngest_revision(fsobj)
331 revision = svn.fs.youngest_revision(fsobj)
332 root = svn.fs.revision_root(fsobj, revision)
332 root = svn.fs.revision_root(fsobj, revision)
333 size = svn.fs.file_length(root, path)
333 size = svn.fs.file_length(root, path)
334 return size
334 return size
335
335
336 def create_repository(self, wire, compatible_version=None):
336 def create_repository(self, wire, compatible_version=None):
337 log.info('Creating Subversion repository in path "%s"', wire['path'])
337 log.info('Creating Subversion repository in path "%s"', wire['path'])
338 self._factory.repo(wire, create=True,
338 self._factory.repo(wire, create=True,
339 compatible_version=compatible_version)
339 compatible_version=compatible_version)
340
340
341 def import_remote_repository(self, wire, src_url):
341 def import_remote_repository(self, wire, src_url):
342 repo_path = wire['path']
342 repo_path = wire['path']
343 if not self.is_path_valid_repository(wire, repo_path):
343 if not self.is_path_valid_repository(wire, repo_path):
344 raise Exception(
344 raise Exception(
345 "Path %s is not a valid Subversion repository." % repo_path)
345 "Path %s is not a valid Subversion repository." % repo_path)
346 # TODO: johbo: URL checks ?
346 # TODO: johbo: URL checks ?
347 rdump = subprocess.Popen(
347 rdump = subprocess.Popen(
348 ['svnrdump', 'dump', '--non-interactive', src_url],
348 ['svnrdump', 'dump', '--non-interactive', src_url],
349 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
349 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
350 load = subprocess.Popen(
350 load = subprocess.Popen(
351 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
351 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
352
352
353 # TODO: johbo: This can be a very long operation, might be better
353 # TODO: johbo: This can be a very long operation, might be better
354 # to track some kind of status and provide an api to check if the
354 # to track some kind of status and provide an api to check if the
355 # import is done.
355 # import is done.
356 rdump.wait()
356 rdump.wait()
357 load.wait()
357 load.wait()
358
358
359 if rdump.returncode != 0:
359 if rdump.returncode != 0:
360 errors = rdump.stderr.read()
360 errors = rdump.stderr.read()
361 log.error('svnrdump dump failed: statuscode %s: message: %s',
361 log.error('svnrdump dump failed: statuscode %s: message: %s',
362 rdump.returncode, errors)
362 rdump.returncode, errors)
363 reason = 'UNKNOWN'
363 reason = 'UNKNOWN'
364 if 'svnrdump: E230001:' in errors:
364 if 'svnrdump: E230001:' in errors:
365 reason = 'INVALID_CERTIFICATE'
365 reason = 'INVALID_CERTIFICATE'
366 raise Exception(
366 raise Exception(
367 'Failed to dump the remote repository from %s.' % src_url,
367 'Failed to dump the remote repository from %s.' % src_url,
368 reason)
368 reason)
369 if load.returncode != 0:
369 if load.returncode != 0:
370 raise Exception(
370 raise Exception(
371 'Failed to load the dump of remote repository from %s.' %
371 'Failed to load the dump of remote repository from %s.' %
372 (src_url, ))
372 (src_url, ))
373
373
374 def commit(self, wire, message, author, timestamp, updated, removed):
374 def commit(self, wire, message, author, timestamp, updated, removed):
375 assert isinstance(message, str)
375 assert isinstance(message, str)
376 assert isinstance(author, str)
376 assert isinstance(author, str)
377
377
378 repo = self._factory.repo(wire)
378 repo = self._factory.repo(wire)
379 fsobj = svn.repos.fs(repo)
379 fsobj = svn.repos.fs(repo)
380
380
381 rev = svn.fs.youngest_rev(fsobj)
381 rev = svn.fs.youngest_rev(fsobj)
382 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
382 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
383 txn_root = svn.fs.txn_root(txn)
383 txn_root = svn.fs.txn_root(txn)
384
384
385 for node in updated:
385 for node in updated:
386 TxnNodeProcessor(node, txn_root).update()
386 TxnNodeProcessor(node, txn_root).update()
387 for node in removed:
387 for node in removed:
388 TxnNodeProcessor(node, txn_root).remove()
388 TxnNodeProcessor(node, txn_root).remove()
389
389
390 commit_id = svn.repos.fs_commit_txn(repo, txn)
390 commit_id = svn.repos.fs_commit_txn(repo, txn)
391
391
392 if timestamp:
392 if timestamp:
393 apr_time = apr_time_t(timestamp)
393 apr_time = apr_time_t(timestamp)
394 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
394 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
395 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
395 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
396
396
397 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
397 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
398 return commit_id
398 return commit_id
399
399
400 def diff(self, wire, rev1, rev2, path1=None, path2=None,
400 def diff(self, wire, rev1, rev2, path1=None, path2=None,
401 ignore_whitespace=False, context=3):
401 ignore_whitespace=False, context=3):
402
402
403 wire.update(cache=False)
403 wire.update(cache=False)
404 repo = self._factory.repo(wire)
404 repo = self._factory.repo(wire)
405 diff_creator = SvnDiffer(
405 diff_creator = SvnDiffer(
406 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
406 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
407 try:
407 try:
408 return diff_creator.generate_diff()
408 return diff_creator.generate_diff()
409 except svn.core.SubversionException as e:
409 except svn.core.SubversionException as e:
410 log.exception(
410 log.exception(
411 "Error during diff operation operation. "
411 "Error during diff operation operation. "
412 "Path might not exist %s, %s" % (path1, path2))
412 "Path might not exist %s, %s" % (path1, path2))
413 return ""
413 return ""
414
414
415 @reraise_safe_exceptions
415 @reraise_safe_exceptions
416 def is_large_file(self, wire, path):
416 def is_large_file(self, wire, path):
417 return False
417 return False
418
418
419
419
420 class SvnDiffer(object):
420 class SvnDiffer(object):
421 """
421 """
422 Utility to create diffs based on difflib and the Subversion api
422 Utility to create diffs based on difflib and the Subversion api
423 """
423 """
424
424
425 binary_content = False
425 binary_content = False
426
426
427 def __init__(
427 def __init__(
428 self, repo, src_rev, src_path, tgt_rev, tgt_path,
428 self, repo, src_rev, src_path, tgt_rev, tgt_path,
429 ignore_whitespace, context):
429 ignore_whitespace, context):
430 self.repo = repo
430 self.repo = repo
431 self.ignore_whitespace = ignore_whitespace
431 self.ignore_whitespace = ignore_whitespace
432 self.context = context
432 self.context = context
433
433
434 fsobj = svn.repos.fs(repo)
434 fsobj = svn.repos.fs(repo)
435
435
436 self.tgt_rev = tgt_rev
436 self.tgt_rev = tgt_rev
437 self.tgt_path = tgt_path or ''
437 self.tgt_path = tgt_path or ''
438 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
438 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
439 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
439 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
440
440
441 self.src_rev = src_rev
441 self.src_rev = src_rev
442 self.src_path = src_path or self.tgt_path
442 self.src_path = src_path or self.tgt_path
443 self.src_root = svn.fs.revision_root(fsobj, src_rev)
443 self.src_root = svn.fs.revision_root(fsobj, src_rev)
444 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
444 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
445
445
446 self._validate()
446 self._validate()
447
447
448 def _validate(self):
448 def _validate(self):
449 if (self.tgt_kind != svn.core.svn_node_none and
449 if (self.tgt_kind != svn.core.svn_node_none and
450 self.src_kind != svn.core.svn_node_none and
450 self.src_kind != svn.core.svn_node_none and
451 self.src_kind != self.tgt_kind):
451 self.src_kind != self.tgt_kind):
452 # TODO: johbo: proper error handling
452 # TODO: johbo: proper error handling
453 raise Exception(
453 raise Exception(
454 "Source and target are not compatible for diff generation. "
454 "Source and target are not compatible for diff generation. "
455 "Source type: %s, target type: %s" %
455 "Source type: %s, target type: %s" %
456 (self.src_kind, self.tgt_kind))
456 (self.src_kind, self.tgt_kind))
457
457
458 def generate_diff(self):
458 def generate_diff(self):
459 buf = StringIO.StringIO()
459 buf = StringIO.StringIO()
460 if self.tgt_kind == svn.core.svn_node_dir:
460 if self.tgt_kind == svn.core.svn_node_dir:
461 self._generate_dir_diff(buf)
461 self._generate_dir_diff(buf)
462 else:
462 else:
463 self._generate_file_diff(buf)
463 self._generate_file_diff(buf)
464 return buf.getvalue()
464 return buf.getvalue()
465
465
466 def _generate_dir_diff(self, buf):
466 def _generate_dir_diff(self, buf):
467 editor = DiffChangeEditor()
467 editor = DiffChangeEditor()
468 editor_ptr, editor_baton = svn.delta.make_editor(editor)
468 editor_ptr, editor_baton = svn.delta.make_editor(editor)
469 svn.repos.dir_delta2(
469 svn.repos.dir_delta2(
470 self.src_root,
470 self.src_root,
471 self.src_path,
471 self.src_path,
472 '', # src_entry
472 '', # src_entry
473 self.tgt_root,
473 self.tgt_root,
474 self.tgt_path,
474 self.tgt_path,
475 editor_ptr, editor_baton,
475 editor_ptr, editor_baton,
476 authorization_callback_allow_all,
476 authorization_callback_allow_all,
477 False, # text_deltas
477 False, # text_deltas
478 svn.core.svn_depth_infinity, # depth
478 svn.core.svn_depth_infinity, # depth
479 False, # entry_props
479 False, # entry_props
480 False, # ignore_ancestry
480 False, # ignore_ancestry
481 )
481 )
482
482
483 for path, __, change in sorted(editor.changes):
483 for path, __, change in sorted(editor.changes):
484 self._generate_node_diff(
484 self._generate_node_diff(
485 buf, change, path, self.tgt_path, path, self.src_path)
485 buf, change, path, self.tgt_path, path, self.src_path)
486
486
487 def _generate_file_diff(self, buf):
487 def _generate_file_diff(self, buf):
488 change = None
488 change = None
489 if self.src_kind == svn.core.svn_node_none:
489 if self.src_kind == svn.core.svn_node_none:
490 change = "add"
490 change = "add"
491 elif self.tgt_kind == svn.core.svn_node_none:
491 elif self.tgt_kind == svn.core.svn_node_none:
492 change = "delete"
492 change = "delete"
493 tgt_base, tgt_path = vcspath.split(self.tgt_path)
493 tgt_base, tgt_path = vcspath.split(self.tgt_path)
494 src_base, src_path = vcspath.split(self.src_path)
494 src_base, src_path = vcspath.split(self.src_path)
495 self._generate_node_diff(
495 self._generate_node_diff(
496 buf, change, tgt_path, tgt_base, src_path, src_base)
496 buf, change, tgt_path, tgt_base, src_path, src_base)
497
497
498 def _generate_node_diff(
498 def _generate_node_diff(
499 self, buf, change, tgt_path, tgt_base, src_path, src_base):
499 self, buf, change, tgt_path, tgt_base, src_path, src_base):
500
500
501 if self.src_rev == self.tgt_rev and tgt_base == src_base:
501 if self.src_rev == self.tgt_rev and tgt_base == src_base:
502 # makes consistent behaviour with git/hg to return empty diff if
502 # makes consistent behaviour with git/hg to return empty diff if
503 # we compare same revisions
503 # we compare same revisions
504 return
504 return
505
505
506 tgt_full_path = vcspath.join(tgt_base, tgt_path)
506 tgt_full_path = vcspath.join(tgt_base, tgt_path)
507 src_full_path = vcspath.join(src_base, src_path)
507 src_full_path = vcspath.join(src_base, src_path)
508
508
509 self.binary_content = False
509 self.binary_content = False
510 mime_type = self._get_mime_type(tgt_full_path)
510 mime_type = self._get_mime_type(tgt_full_path)
511
511
512 if mime_type and not mime_type.startswith('text'):
512 if mime_type and not mime_type.startswith('text'):
513 self.binary_content = True
513 self.binary_content = True
514 buf.write("=" * 67 + '\n')
514 buf.write("=" * 67 + '\n')
515 buf.write("Cannot display: file marked as a binary type.\n")
515 buf.write("Cannot display: file marked as a binary type.\n")
516 buf.write("svn:mime-type = %s\n" % mime_type)
516 buf.write("svn:mime-type = %s\n" % mime_type)
517 buf.write("Index: %s\n" % (tgt_path, ))
517 buf.write("Index: %s\n" % (tgt_path, ))
518 buf.write("=" * 67 + '\n')
518 buf.write("=" * 67 + '\n')
519 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
519 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
520 'tgt_path': tgt_path})
520 'tgt_path': tgt_path})
521
521
522 if change == 'add':
522 if change == 'add':
523 # TODO: johbo: SVN is missing a zero here compared to git
523 # TODO: johbo: SVN is missing a zero here compared to git
524 buf.write("new file mode 10644\n")
524 buf.write("new file mode 10644\n")
525
525
526 #TODO(marcink): intro to binary detection of svn patches
526 #TODO(marcink): intro to binary detection of svn patches
527 # if self.binary_content:
527 # if self.binary_content:
528 # buf.write('GIT binary patch\n')
528 # buf.write('GIT binary patch\n')
529
529
530 buf.write("--- /dev/null\t(revision 0)\n")
530 buf.write("--- /dev/null\t(revision 0)\n")
531 src_lines = []
531 src_lines = []
532 else:
532 else:
533 if change == 'delete':
533 if change == 'delete':
534 buf.write("deleted file mode 10644\n")
534 buf.write("deleted file mode 10644\n")
535
535
536 #TODO(marcink): intro to binary detection of svn patches
536 #TODO(marcink): intro to binary detection of svn patches
537 # if self.binary_content:
537 # if self.binary_content:
538 # buf.write('GIT binary patch\n')
538 # buf.write('GIT binary patch\n')
539
539
540 buf.write("--- a/%s\t(revision %s)\n" % (
540 buf.write("--- a/%s\t(revision %s)\n" % (
541 src_path, self.src_rev))
541 src_path, self.src_rev))
542 src_lines = self._svn_readlines(self.src_root, src_full_path)
542 src_lines = self._svn_readlines(self.src_root, src_full_path)
543
543
544 if change == 'delete':
544 if change == 'delete':
545 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
545 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
546 tgt_lines = []
546 tgt_lines = []
547 else:
547 else:
548 buf.write("+++ b/%s\t(revision %s)\n" % (
548 buf.write("+++ b/%s\t(revision %s)\n" % (
549 tgt_path, self.tgt_rev))
549 tgt_path, self.tgt_rev))
550 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
550 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
551
551
552 if not self.binary_content:
552 if not self.binary_content:
553 udiff = svn_diff.unified_diff(
553 udiff = svn_diff.unified_diff(
554 src_lines, tgt_lines, context=self.context,
554 src_lines, tgt_lines, context=self.context,
555 ignore_blank_lines=self.ignore_whitespace,
555 ignore_blank_lines=self.ignore_whitespace,
556 ignore_case=False,
556 ignore_case=False,
557 ignore_space_changes=self.ignore_whitespace)
557 ignore_space_changes=self.ignore_whitespace)
558 buf.writelines(udiff)
558 buf.writelines(udiff)
559
559
560 def _get_mime_type(self, path):
560 def _get_mime_type(self, path):
561 try:
561 try:
562 mime_type = svn.fs.node_prop(
562 mime_type = svn.fs.node_prop(
563 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
563 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
564 except svn.core.SubversionException:
564 except svn.core.SubversionException:
565 mime_type = svn.fs.node_prop(
565 mime_type = svn.fs.node_prop(
566 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
566 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
567 return mime_type
567 return mime_type
568
568
569 def _svn_readlines(self, fs_root, node_path):
569 def _svn_readlines(self, fs_root, node_path):
570 if self.binary_content:
570 if self.binary_content:
571 return []
571 return []
572 node_kind = svn.fs.check_path(fs_root, node_path)
572 node_kind = svn.fs.check_path(fs_root, node_path)
573 if node_kind not in (
573 if node_kind not in (
574 svn.core.svn_node_file, svn.core.svn_node_symlink):
574 svn.core.svn_node_file, svn.core.svn_node_symlink):
575 return []
575 return []
576 content = svn.core.Stream(
576 content = svn.core.Stream(
577 svn.fs.file_contents(fs_root, node_path)).read()
577 svn.fs.file_contents(fs_root, node_path)).read()
578 return content.splitlines(True)
578 return content.splitlines(True)
579
579
580
580
581 class DiffChangeEditor(svn.delta.Editor):
581 class DiffChangeEditor(svn.delta.Editor):
582 """
582 """
583 Records changes between two given revisions
583 Records changes between two given revisions
584 """
584 """
585
585
586 def __init__(self):
586 def __init__(self):
587 self.changes = []
587 self.changes = []
588
588
589 def delete_entry(self, path, revision, parent_baton, pool=None):
589 def delete_entry(self, path, revision, parent_baton, pool=None):
590 self.changes.append((path, None, 'delete'))
590 self.changes.append((path, None, 'delete'))
591
591
592 def add_file(
592 def add_file(
593 self, path, parent_baton, copyfrom_path, copyfrom_revision,
593 self, path, parent_baton, copyfrom_path, copyfrom_revision,
594 file_pool=None):
594 file_pool=None):
595 self.changes.append((path, 'file', 'add'))
595 self.changes.append((path, 'file', 'add'))
596
596
597 def open_file(self, path, parent_baton, base_revision, file_pool=None):
597 def open_file(self, path, parent_baton, base_revision, file_pool=None):
598 self.changes.append((path, 'file', 'change'))
598 self.changes.append((path, 'file', 'change'))
599
599
600
600
601 def authorization_callback_allow_all(root, path, pool):
601 def authorization_callback_allow_all(root, path, pool):
602 return True
602 return True
603
603
604
604
605 class TxnNodeProcessor(object):
605 class TxnNodeProcessor(object):
606 """
606 """
607 Utility to process the change of one node within a transaction root.
607 Utility to process the change of one node within a transaction root.
608
608
609 It encapsulates the knowledge of how to add, update or remove
609 It encapsulates the knowledge of how to add, update or remove
610 a node for a given transaction root. The purpose is to support the method
610 a node for a given transaction root. The purpose is to support the method
611 `SvnRemote.commit`.
611 `SvnRemote.commit`.
612 """
612 """
613
613
614 def __init__(self, node, txn_root):
614 def __init__(self, node, txn_root):
615 assert isinstance(node['path'], str)
615 assert isinstance(node['path'], str)
616
616
617 self.node = node
617 self.node = node
618 self.txn_root = txn_root
618 self.txn_root = txn_root
619
619
620 def update(self):
620 def update(self):
621 self._ensure_parent_dirs()
621 self._ensure_parent_dirs()
622 self._add_file_if_node_does_not_exist()
622 self._add_file_if_node_does_not_exist()
623 self._update_file_content()
623 self._update_file_content()
624 self._update_file_properties()
624 self._update_file_properties()
625
625
626 def remove(self):
626 def remove(self):
627 svn.fs.delete(self.txn_root, self.node['path'])
627 svn.fs.delete(self.txn_root, self.node['path'])
628 # TODO: Clean up directory if empty
628 # TODO: Clean up directory if empty
629
629
630 def _ensure_parent_dirs(self):
630 def _ensure_parent_dirs(self):
631 curdir = vcspath.dirname(self.node['path'])
631 curdir = vcspath.dirname(self.node['path'])
632 dirs_to_create = []
632 dirs_to_create = []
633 while not self._svn_path_exists(curdir):
633 while not self._svn_path_exists(curdir):
634 dirs_to_create.append(curdir)
634 dirs_to_create.append(curdir)
635 curdir = vcspath.dirname(curdir)
635 curdir = vcspath.dirname(curdir)
636
636
637 for curdir in reversed(dirs_to_create):
637 for curdir in reversed(dirs_to_create):
638 log.debug('Creating missing directory "%s"', curdir)
638 log.debug('Creating missing directory "%s"', curdir)
639 svn.fs.make_dir(self.txn_root, curdir)
639 svn.fs.make_dir(self.txn_root, curdir)
640
640
641 def _svn_path_exists(self, path):
641 def _svn_path_exists(self, path):
642 path_status = svn.fs.check_path(self.txn_root, path)
642 path_status = svn.fs.check_path(self.txn_root, path)
643 return path_status != svn.core.svn_node_none
643 return path_status != svn.core.svn_node_none
644
644
645 def _add_file_if_node_does_not_exist(self):
645 def _add_file_if_node_does_not_exist(self):
646 kind = svn.fs.check_path(self.txn_root, self.node['path'])
646 kind = svn.fs.check_path(self.txn_root, self.node['path'])
647 if kind == svn.core.svn_node_none:
647 if kind == svn.core.svn_node_none:
648 svn.fs.make_file(self.txn_root, self.node['path'])
648 svn.fs.make_file(self.txn_root, self.node['path'])
649
649
650 def _update_file_content(self):
650 def _update_file_content(self):
651 assert isinstance(self.node['content'], str)
651 assert isinstance(self.node['content'], str)
652 handler, baton = svn.fs.apply_textdelta(
652 handler, baton = svn.fs.apply_textdelta(
653 self.txn_root, self.node['path'], None, None)
653 self.txn_root, self.node['path'], None, None)
654 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
654 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
655
655
656 def _update_file_properties(self):
656 def _update_file_properties(self):
657 properties = self.node.get('properties', {})
657 properties = self.node.get('properties', {})
658 for key, value in properties.iteritems():
658 for key, value in properties.iteritems():
659 svn.fs.change_node_prop(
659 svn.fs.change_node_prop(
660 self.txn_root, self.node['path'], key, value)
660 self.txn_root, self.node['path'], key, value)
661
661
662
662
663 def apr_time_t(timestamp):
663 def apr_time_t(timestamp):
664 """
664 """
665 Convert a Python timestamp into APR timestamp type apr_time_t
665 Convert a Python timestamp into APR timestamp type apr_time_t
666 """
666 """
667 return timestamp * 1E6
667 return timestamp * 1E6
668
668
669
669
670 def svn_opt_revision_value_t(num):
670 def svn_opt_revision_value_t(num):
671 """
671 """
672 Put `num` into a `svn_opt_revision_value_t` structure.
672 Put `num` into a `svn_opt_revision_value_t` structure.
673 """
673 """
674 value = svn.core.svn_opt_revision_value_t()
674 value = svn.core.svn_opt_revision_value_t()
675 value.number = num
675 value.number = num
676 revision = svn.core.svn_opt_revision_t()
676 revision = svn.core.svn_opt_revision_t()
677 revision.kind = svn.core.svn_opt_revision_number
677 revision.kind = svn.core.svn_opt_revision_number
678 revision.value = value
678 revision.value = value
679 return revision
679 return revision
@@ -1,57 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import socket
18 import socket
19
19
20 import pytest
20 import pytest
21
21
22
22
23 def pytest_addoption(parser):
23 def pytest_addoption(parser):
24 parser.addoption(
24 parser.addoption(
25 '--repeat', type=int, default=100,
25 '--repeat', type=int, default=100,
26 help="Number of repetitions in performance tests.")
26 help="Number of repetitions in performance tests.")
27
27
28
28
29 @pytest.fixture(scope='session')
29 @pytest.fixture(scope='session')
30 def repeat(request):
30 def repeat(request):
31 """
31 """
32 The number of repetitions is based on this fixture.
32 The number of repetitions is based on this fixture.
33
33
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 tests are not too slow in our default test suite.
35 tests are not too slow in our default test suite.
36 """
36 """
37 return request.config.getoption('--repeat')
37 return request.config.getoption('--repeat')
38
38
39
39
40 @pytest.fixture(scope='session')
40 @pytest.fixture(scope='session')
41 def vcsserver_port(request):
41 def vcsserver_port(request):
42 port = get_available_port()
42 port = get_available_port()
43 print 'Using vcsserver port %s' % (port, )
43 print 'Using vcsserver port %s' % (port, )
44 return port
44 return port
45
45
46
46
47 def get_available_port():
47 def get_available_port():
48 family = socket.AF_INET
48 family = socket.AF_INET
49 socktype = socket.SOCK_STREAM
49 socktype = socket.SOCK_STREAM
50 host = '127.0.0.1'
50 host = '127.0.0.1'
51
51
52 mysocket = socket.socket(family, socktype)
52 mysocket = socket.socket(family, socktype)
53 mysocket.bind((host, 0))
53 mysocket.bind((host, 0))
54 port = mysocket.getsockname()[1]
54 port = mysocket.getsockname()[1]
55 mysocket.close()
55 mysocket.close()
56 del mysocket
56 del mysocket
57 return port
57 return port
@@ -1,71 +1,71 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import tempfile
20 import tempfile
21
21
22 import configobj
22 import configobj
23
23
24
24
25 class ContextINI(object):
25 class ContextINI(object):
26 """
26 """
27 Allows to create a new test.ini file as a copy of existing one with edited
27 Allows to create a new test.ini file as a copy of existing one with edited
28 data. If existing file is not present, it creates a new one. Example usage::
28 data. If existing file is not present, it creates a new one. Example usage::
29
29
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 print 'vcsserver --config=%s' % new_test_ini
31 print 'vcsserver --config=%s' % new_test_ini
32 """
32 """
33
33
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 destroy=True):
35 destroy=True):
36 self.ini_file_path = ini_file_path
36 self.ini_file_path = ini_file_path
37 self.ini_params = ini_params
37 self.ini_params = ini_params
38 self.new_path = None
38 self.new_path = None
39 self.new_path_prefix = new_file_prefix or 'test'
39 self.new_path_prefix = new_file_prefix or 'test'
40 self.destroy = destroy
40 self.destroy = destroy
41
41
42 def __enter__(self):
42 def __enter__(self):
43 _, pref = tempfile.mkstemp()
43 _, pref = tempfile.mkstemp()
44 loc = tempfile.gettempdir()
44 loc = tempfile.gettempdir()
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 pref, self.new_path_prefix, self.ini_file_path))
46 pref, self.new_path_prefix, self.ini_file_path))
47
47
48 # copy ini file and modify according to the params, if we re-use a file
48 # copy ini file and modify according to the params, if we re-use a file
49 if os.path.isfile(self.ini_file_path):
49 if os.path.isfile(self.ini_file_path):
50 shutil.copy(self.ini_file_path, self.new_path)
50 shutil.copy(self.ini_file_path, self.new_path)
51 else:
51 else:
52 # create new dump file for configObj to write to.
52 # create new dump file for configObj to write to.
53 with open(self.new_path, 'wb'):
53 with open(self.new_path, 'wb'):
54 pass
54 pass
55
55
56 config = configobj.ConfigObj(
56 config = configobj.ConfigObj(
57 self.new_path, file_error=True, write_empty_values=True)
57 self.new_path, file_error=True, write_empty_values=True)
58
58
59 for data in self.ini_params:
59 for data in self.ini_params:
60 section, ini_params = data.items()[0]
60 section, ini_params = data.items()[0]
61 key, val = ini_params.items()[0]
61 key, val = ini_params.items()[0]
62 if section not in config:
62 if section not in config:
63 config[section] = {}
63 config[section] = {}
64 config[section][key] = val
64 config[section][key] = val
65
65
66 config.write()
66 config.write()
67 return self.new_path
67 return self.new_path
68
68
69 def __exit__(self, exc_type, exc_val, exc_tb):
69 def __exit__(self, exc_type, exc_val, exc_tb):
70 if self.destroy:
70 if self.destroy:
71 os.remove(self.new_path)
71 os.remove(self.new_path)
@@ -1,162 +1,162 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.fetch(
81 self.remote_git.fetch(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire=None, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102 def test_remove_ref(self):
102 def test_remove_ref(self):
103 ref_to_remove = 'refs/tags/v0.1.9'
103 ref_to_remove = 'refs/tags/v0.1.9'
104 self.mock_repo.refs = SAMPLE_REFS.copy()
104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 self.remote_git.remove_ref(None, ref_to_remove)
105 self.remote_git.remove_ref(None, ref_to_remove)
106 assert ref_to_remove not in self.mock_repo.refs
106 assert ref_to_remove not in self.mock_repo.refs
107
107
108
108
109 class TestReraiseSafeExceptions(object):
109 class TestReraiseSafeExceptions(object):
110 def test_method_decorated_with_reraise_safe_exceptions(self):
110 def test_method_decorated_with_reraise_safe_exceptions(self):
111 factory = Mock()
111 factory = Mock()
112 git_remote = git.GitRemote(factory)
112 git_remote = git.GitRemote(factory)
113
113
114 def fake_function():
114 def fake_function():
115 return None
115 return None
116
116
117 decorator = git.reraise_safe_exceptions(fake_function)
117 decorator = git.reraise_safe_exceptions(fake_function)
118
118
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 for method_name, method in methods:
120 for method_name, method in methods:
121 if not method_name.startswith('_'):
121 if not method_name.startswith('_'):
122 assert method.im_func.__code__ == decorator.__code__
122 assert method.im_func.__code__ == decorator.__code__
123
123
124 @pytest.mark.parametrize('side_effect, expected_type', [
124 @pytest.mark.parametrize('side_effect, expected_type', [
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 (dulwich.errors.HangupException(), 'error'),
129 (dulwich.errors.HangupException(), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 ])
131 ])
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 @git.reraise_safe_exceptions
133 @git.reraise_safe_exceptions
134 def fake_method():
134 def fake_method():
135 raise side_effect
135 raise side_effect
136
136
137 with pytest.raises(Exception) as exc_info:
137 with pytest.raises(Exception) as exc_info:
138 fake_method()
138 fake_method()
139 assert type(exc_info.value) == Exception
139 assert type(exc_info.value) == Exception
140 assert exc_info.value._vcs_kind == expected_type
140 assert exc_info.value._vcs_kind == expected_type
141
141
142
142
143 class TestDulwichRepoWrapper(object):
143 class TestDulwichRepoWrapper(object):
144 def test_calls_close_on_delete(self):
144 def test_calls_close_on_delete(self):
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 with isdir_patcher:
146 with isdir_patcher:
147 repo = git.Repo('/tmp/abcde')
147 repo = git.Repo('/tmp/abcde')
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 del repo
149 del repo
150 close_mock.assert_called_once_with()
150 close_mock.assert_called_once_with()
151
151
152
152
153 class TestGitFactory(object):
153 class TestGitFactory(object):
154 def test_create_repo_returns_dulwich_wrapper(self):
154 def test_create_repo_returns_dulwich_wrapper(self):
155 factory = git.GitFactory(repo_cache=Mock())
155 factory = git.GitFactory(repo_cache=Mock())
156 wire = {
156 wire = {
157 'path': '/tmp/abcde'
157 'path': '/tmp/abcde'
158 }
158 }
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 with isdir_patcher:
160 with isdir_patcher:
161 result = factory._create_repo(wire, True)
161 result = factory._create_repo(wire, True)
162 assert isinstance(result, git.Repo)
162 assert isinstance(result, git.Repo)
@@ -1,127 +1,127 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, MagicMock, patch
24 from mock import Mock, MagicMock, patch
25
25
26 from vcsserver import exceptions, hg, hgcompat
26 from vcsserver import exceptions, hg, hgcompat
27
27
28
28
29 class TestHGLookup(object):
29 class TestHGLookup(object):
30 def setup(self):
30 def setup(self):
31 self.mock_repo = MagicMock()
31 self.mock_repo = MagicMock()
32 self.mock_repo.__getitem__.side_effect = LookupError(
32 self.mock_repo.__getitem__.side_effect = LookupError(
33 'revision_or_commit_id', 'index', 'message')
33 'revision_or_commit_id', 'index', 'message')
34 factory = Mock()
34 factory = Mock()
35 factory.repo = Mock(return_value=self.mock_repo)
35 factory.repo = Mock(return_value=self.mock_repo)
36 self.remote_hg = hg.HgRemote(factory)
36 self.remote_hg = hg.HgRemote(factory)
37
37
38 def test_fail_lookup_hg(self):
38 def test_fail_lookup_hg(self):
39 with pytest.raises(Exception) as exc_info:
39 with pytest.raises(Exception) as exc_info:
40 self.remote_hg.lookup(
40 self.remote_hg.lookup(
41 wire=None, revision='revision_or_commit_id', both=True)
41 wire=None, revision='revision_or_commit_id', both=True)
42
42
43 assert exc_info.value._vcs_kind == 'lookup'
43 assert exc_info.value._vcs_kind == 'lookup'
44 assert 'revision_or_commit_id' in exc_info.value.args
44 assert 'revision_or_commit_id' in exc_info.value.args
45
45
46
46
47 class TestDiff(object):
47 class TestDiff(object):
48 def test_raising_safe_exception_when_lookup_failed(self):
48 def test_raising_safe_exception_when_lookup_failed(self):
49 repo = Mock()
49 repo = Mock()
50 factory = Mock()
50 factory = Mock()
51 factory.repo = Mock(return_value=repo)
51 factory.repo = Mock(return_value=repo)
52 hg_remote = hg.HgRemote(factory)
52 hg_remote = hg.HgRemote(factory)
53 with patch('mercurial.patch.diff') as diff_mock:
53 with patch('mercurial.patch.diff') as diff_mock:
54 diff_mock.side_effect = LookupError(
54 diff_mock.side_effect = LookupError(
55 'deadbeef', 'index', 'message')
55 'deadbeef', 'index', 'message')
56 with pytest.raises(Exception) as exc_info:
56 with pytest.raises(Exception) as exc_info:
57 hg_remote.diff(
57 hg_remote.diff(
58 wire=None, rev1='deadbeef', rev2='deadbee1',
58 wire=None, rev1='deadbeef', rev2='deadbee1',
59 file_filter=None, opt_git=True, opt_ignorews=True,
59 file_filter=None, opt_git=True, opt_ignorews=True,
60 context=3)
60 context=3)
61 assert type(exc_info.value) == Exception
61 assert type(exc_info.value) == Exception
62 assert exc_info.value._vcs_kind == 'lookup'
62 assert exc_info.value._vcs_kind == 'lookup'
63
63
64
64
65 class TestReraiseSafeExceptions(object):
65 class TestReraiseSafeExceptions(object):
66 def test_method_decorated_with_reraise_safe_exceptions(self):
66 def test_method_decorated_with_reraise_safe_exceptions(self):
67 factory = Mock()
67 factory = Mock()
68 hg_remote = hg.HgRemote(factory)
68 hg_remote = hg.HgRemote(factory)
69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
70 decorator = hg.reraise_safe_exceptions(None)
70 decorator = hg.reraise_safe_exceptions(None)
71 for method_name, method in methods:
71 for method_name, method in methods:
72 if not method_name.startswith('_'):
72 if not method_name.startswith('_'):
73 assert method.im_func.__code__ == decorator.__code__
73 assert method.im_func.__code__ == decorator.__code__
74
74
75 @pytest.mark.parametrize('side_effect, expected_type', [
75 @pytest.mark.parametrize('side_effect, expected_type', [
76 (hgcompat.Abort(), 'abort'),
76 (hgcompat.Abort(), 'abort'),
77 (hgcompat.InterventionRequired(), 'abort'),
77 (hgcompat.InterventionRequired(), 'abort'),
78 (hgcompat.RepoLookupError(), 'lookup'),
78 (hgcompat.RepoLookupError(), 'lookup'),
79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
80 (hgcompat.RepoError(), 'error'),
80 (hgcompat.RepoError(), 'error'),
81 (hgcompat.RequirementError(), 'requirement'),
81 (hgcompat.RequirementError(), 'requirement'),
82 ])
82 ])
83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
84 @hg.reraise_safe_exceptions
84 @hg.reraise_safe_exceptions
85 def fake_method():
85 def fake_method():
86 raise side_effect
86 raise side_effect
87
87
88 with pytest.raises(Exception) as exc_info:
88 with pytest.raises(Exception) as exc_info:
89 fake_method()
89 fake_method()
90 assert type(exc_info.value) == Exception
90 assert type(exc_info.value) == Exception
91 assert exc_info.value._vcs_kind == expected_type
91 assert exc_info.value._vcs_kind == expected_type
92
92
93 def test_keeps_original_traceback(self):
93 def test_keeps_original_traceback(self):
94 @hg.reraise_safe_exceptions
94 @hg.reraise_safe_exceptions
95 def fake_method():
95 def fake_method():
96 try:
96 try:
97 raise hgcompat.Abort()
97 raise hgcompat.Abort()
98 except:
98 except:
99 self.original_traceback = traceback.format_tb(
99 self.original_traceback = traceback.format_tb(
100 sys.exc_info()[2])
100 sys.exc_info()[2])
101 raise
101 raise
102
102
103 try:
103 try:
104 fake_method()
104 fake_method()
105 except Exception:
105 except Exception:
106 new_traceback = traceback.format_tb(sys.exc_info()[2])
106 new_traceback = traceback.format_tb(sys.exc_info()[2])
107
107
108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
109 assert new_traceback_tail == self.original_traceback
109 assert new_traceback_tail == self.original_traceback
110
110
111 def test_maps_unknow_exceptions_to_unhandled(self):
111 def test_maps_unknow_exceptions_to_unhandled(self):
112 @hg.reraise_safe_exceptions
112 @hg.reraise_safe_exceptions
113 def stub_method():
113 def stub_method():
114 raise ValueError('stub')
114 raise ValueError('stub')
115
115
116 with pytest.raises(Exception) as exc_info:
116 with pytest.raises(Exception) as exc_info:
117 stub_method()
117 stub_method()
118 assert exc_info.value._vcs_kind == 'unhandled'
118 assert exc_info.value._vcs_kind == 'unhandled'
119
119
120 def test_does_not_map_known_exceptions(self):
120 def test_does_not_map_known_exceptions(self):
121 @hg.reraise_safe_exceptions
121 @hg.reraise_safe_exceptions
122 def stub_method():
122 def stub_method():
123 raise exceptions.LookupException('stub')
123 raise exceptions.LookupException('stub')
124
124
125 with pytest.raises(Exception) as exc_info:
125 with pytest.raises(Exception) as exc_info:
126 stub_method()
126 stub_method()
127 assert exc_info.value._vcs_kind == 'lookup'
127 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,125 +1,130 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import hgcompat, hgpatches
21 from vcsserver import hgcompat, hgpatches
22
22
23
23
24 LARGEFILES_CAPABILITY = 'largefiles=serve'
24 LARGEFILES_CAPABILITY = 'largefiles=serve'
25
25
26
26
27 def test_patch_largefiles_capabilities_applies_patch(
27 def test_patch_largefiles_capabilities_applies_patch(
28 patched_capabilities):
28 patched_capabilities):
29 lfproto = hgcompat.largefiles.proto
29 lfproto = hgcompat.largefiles.proto
30 hgpatches.patch_largefiles_capabilities()
30 hgpatches.patch_largefiles_capabilities()
31 assert lfproto.capabilities.func_name == '_dynamic_capabilities'
31 assert lfproto.capabilities.func_name == '_dynamic_capabilities'
32
32
33
33
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
36 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
36 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 hgcompat.largefiles.proto, stub_extensions)
37 hgcompat.largefiles.proto, stub_extensions)
38
38
39 caps = dynamic_capabilities(stub_repo, stub_proto)
39 caps = dynamic_capabilities(stub_repo, stub_proto)
40
40
41 stub_extensions.assert_called_once_with(stub_ui)
41 stub_extensions.assert_called_once_with(stub_ui)
42 assert LARGEFILES_CAPABILITY not in caps
42 assert LARGEFILES_CAPABILITY not in caps
43
43
44
44
45 def test_dynamic_capabilities_uses_updated_capabilitiesorig(
45 def test_dynamic_capabilities_uses_updated_capabilitiesorig(
46 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
46 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
47 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
47 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
48 hgcompat.largefiles.proto, stub_extensions)
48 hgcompat.largefiles.proto, stub_extensions)
49
49
50 # This happens when the extension is loaded for the first time, important
50 # This happens when the extension is loaded for the first time, important
51 # to ensure that an updated function is correctly picked up.
51 # to ensure that an updated function is correctly picked up.
52 hgcompat.largefiles.proto.capabilitiesorig = mock.Mock(
52 hgcompat.largefiles.proto.capabilitiesorig = mock.Mock(
53 return_value='REPLACED')
53 return_value='REPLACED')
54
54
55 caps = dynamic_capabilities(stub_repo, stub_proto)
55 caps = dynamic_capabilities(stub_repo, stub_proto)
56 assert 'REPLACED' == caps
56 assert 'REPLACED' == caps
57
57
58
58
59 def test_dynamic_capabilities_ignores_updated_capabilities(
59 def test_dynamic_capabilities_ignores_updated_capabilities(
60 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
60 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
61 stub_extensions.return_value = [('largefiles', mock.Mock())]
61 stub_extensions.return_value = [('largefiles', mock.Mock())]
62 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
62 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
63 hgcompat.largefiles.proto, stub_extensions)
63 hgcompat.largefiles.proto, stub_extensions)
64
64
65 # This happens when the extension is loaded for the first time, important
65 # This happens when the extension is loaded for the first time, important
66 # to ensure that an updated function is correctly picked up.
66 # to ensure that an updated function is correctly picked up.
67 hgcompat.largefiles.proto.capabilities = mock.Mock(
67 hgcompat.largefiles.proto.capabilities = mock.Mock(
68 side_effect=Exception('Must not be called'))
68 side_effect=Exception('Must not be called'))
69
69
70 dynamic_capabilities(stub_repo, stub_proto)
70 dynamic_capabilities(stub_repo, stub_proto)
71
71
72
72
73 def test_dynamic_capabilities_uses_largefiles_if_enabled(
73 def test_dynamic_capabilities_uses_largefiles_if_enabled(
74 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
74 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
75 stub_extensions.return_value = [('largefiles', mock.Mock())]
75 stub_extensions.return_value = [('largefiles', mock.Mock())]
76
76
77 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
77 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
78 hgcompat.largefiles.proto, stub_extensions)
78 hgcompat.largefiles.proto, stub_extensions)
79
79
80 caps = dynamic_capabilities(stub_repo, stub_proto)
80 caps = dynamic_capabilities(stub_repo, stub_proto)
81
81
82 stub_extensions.assert_called_once_with(stub_ui)
82 stub_extensions.assert_called_once_with(stub_ui)
83 assert LARGEFILES_CAPABILITY in caps
83 assert LARGEFILES_CAPABILITY in caps
84
84
85
85
86 def test_hgsubversion_import():
87 from hgsubversion import svnrepo
88 assert svnrepo
89
90
86 @pytest.fixture
91 @pytest.fixture
87 def patched_capabilities(request):
92 def patched_capabilities(request):
88 """
93 """
89 Patch in `capabilitiesorig` and restore both capability functions.
94 Patch in `capabilitiesorig` and restore both capability functions.
90 """
95 """
91 lfproto = hgcompat.largefiles.proto
96 lfproto = hgcompat.largefiles.proto
92 orig_capabilities = lfproto.capabilities
97 orig_capabilities = lfproto.capabilities
93 orig_capabilitiesorig = lfproto.capabilitiesorig
98 orig_capabilitiesorig = lfproto.capabilitiesorig
94
99
95 lfproto.capabilitiesorig = mock.Mock(return_value='ORIG')
100 lfproto.capabilitiesorig = mock.Mock(return_value='ORIG')
96
101
97 @request.addfinalizer
102 @request.addfinalizer
98 def restore():
103 def restore():
99 lfproto.capabilities = orig_capabilities
104 lfproto.capabilities = orig_capabilities
100 lfproto.capabilitiesorig = orig_capabilitiesorig
105 lfproto.capabilitiesorig = orig_capabilitiesorig
101
106
102
107
103 @pytest.fixture
108 @pytest.fixture
104 def stub_repo(stub_ui):
109 def stub_repo(stub_ui):
105 repo = mock.Mock()
110 repo = mock.Mock()
106 repo.ui = stub_ui
111 repo.ui = stub_ui
107 return repo
112 return repo
108
113
109
114
110 @pytest.fixture
115 @pytest.fixture
111 def stub_proto(stub_ui):
116 def stub_proto(stub_ui):
112 proto = mock.Mock()
117 proto = mock.Mock()
113 proto.ui = stub_ui
118 proto.ui = stub_ui
114 return proto
119 return proto
115
120
116
121
117 @pytest.fixture
122 @pytest.fixture
118 def stub_ui():
123 def stub_ui():
119 return hgcompat.ui.ui()
124 return hgcompat.ui.ui()
120
125
121
126
122 @pytest.fixture
127 @pytest.fixture
123 def stub_extensions():
128 def stub_extensions():
124 extensions = mock.Mock(return_value=tuple())
129 extensions = mock.Mock(return_value=tuple())
125 return extensions
130 return extensions
@@ -1,241 +1,241 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import contextlib
18 import contextlib
19 import io
19 import io
20 import threading
20 import threading
21 from BaseHTTPServer import BaseHTTPRequestHandler
21 from BaseHTTPServer import BaseHTTPRequestHandler
22 from SocketServer import TCPServer
22 from SocketServer import TCPServer
23
23
24 import mercurial.ui
24 import mercurial.ui
25 import mock
25 import mock
26 import pytest
26 import pytest
27 import simplejson as json
27 import simplejson as json
28
28
29 from vcsserver import hooks
29 from vcsserver import hooks
30
30
31
31
32 def get_hg_ui(extras=None):
32 def get_hg_ui(extras=None):
33 """Create a Config object with a valid RC_SCM_DATA entry."""
33 """Create a Config object with a valid RC_SCM_DATA entry."""
34 extras = extras or {}
34 extras = extras or {}
35 required_extras = {
35 required_extras = {
36 'username': '',
36 'username': '',
37 'repository': '',
37 'repository': '',
38 'locked_by': '',
38 'locked_by': '',
39 'scm': '',
39 'scm': '',
40 'make_lock': '',
40 'make_lock': '',
41 'action': '',
41 'action': '',
42 'ip': '',
42 'ip': '',
43 'hooks_uri': 'fake_hooks_uri',
43 'hooks_uri': 'fake_hooks_uri',
44 }
44 }
45 required_extras.update(extras)
45 required_extras.update(extras)
46 hg_ui = mercurial.ui.ui()
46 hg_ui = mercurial.ui.ui()
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
48
48
49 return hg_ui
49 return hg_ui
50
50
51
51
52 def test_git_pre_receive_is_disabled():
52 def test_git_pre_receive_is_disabled():
53 extras = {'hooks': ['pull']}
53 extras = {'hooks': ['pull']}
54 response = hooks.git_pre_receive(None, None,
54 response = hooks.git_pre_receive(None, None,
55 {'RC_SCM_DATA': json.dumps(extras)})
55 {'RC_SCM_DATA': json.dumps(extras)})
56
56
57 assert response == 0
57 assert response == 0
58
58
59
59
60 def test_git_post_receive_is_disabled():
60 def test_git_post_receive_is_disabled():
61 extras = {'hooks': ['pull']}
61 extras = {'hooks': ['pull']}
62 response = hooks.git_post_receive(None, '',
62 response = hooks.git_post_receive(None, '',
63 {'RC_SCM_DATA': json.dumps(extras)})
63 {'RC_SCM_DATA': json.dumps(extras)})
64
64
65 assert response == 0
65 assert response == 0
66
66
67
67
68 def test_git_post_receive_calls_repo_size():
68 def test_git_post_receive_calls_repo_size():
69 extras = {'hooks': ['push', 'repo_size']}
69 extras = {'hooks': ['push', 'repo_size']}
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 hooks.git_post_receive(
71 hooks.git_post_receive(
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
73 extras.update({'commit_ids': [],
73 extras.update({'commit_ids': [],
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
75 expected_calls = [
75 expected_calls = [
76 mock.call('repo_size', extras, mock.ANY),
76 mock.call('repo_size', extras, mock.ANY),
77 mock.call('post_push', extras, mock.ANY),
77 mock.call('post_push', extras, mock.ANY),
78 ]
78 ]
79 assert call_hook_mock.call_args_list == expected_calls
79 assert call_hook_mock.call_args_list == expected_calls
80
80
81
81
82 def test_git_post_receive_does_not_call_disabled_repo_size():
82 def test_git_post_receive_does_not_call_disabled_repo_size():
83 extras = {'hooks': ['push']}
83 extras = {'hooks': ['push']}
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 hooks.git_post_receive(
85 hooks.git_post_receive(
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
87 extras.update({'commit_ids': [],
87 extras.update({'commit_ids': [],
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
89 expected_calls = [
89 expected_calls = [
90 mock.call('post_push', extras, mock.ANY)
90 mock.call('post_push', extras, mock.ANY)
91 ]
91 ]
92 assert call_hook_mock.call_args_list == expected_calls
92 assert call_hook_mock.call_args_list == expected_calls
93
93
94
94
95 def test_repo_size_exception_does_not_affect_git_post_receive():
95 def test_repo_size_exception_does_not_affect_git_post_receive():
96 extras = {'hooks': ['push', 'repo_size']}
96 extras = {'hooks': ['push', 'repo_size']}
97 status = 0
97 status = 0
98
98
99 def side_effect(name, *args, **kwargs):
99 def side_effect(name, *args, **kwargs):
100 if name == 'repo_size':
100 if name == 'repo_size':
101 raise Exception('Fake exception')
101 raise Exception('Fake exception')
102 else:
102 else:
103 return status
103 return status
104
104
105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
106 call_hook_mock.side_effect = side_effect
106 call_hook_mock.side_effect = side_effect
107 result = hooks.git_post_receive(
107 result = hooks.git_post_receive(
108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
109 assert result == status
109 assert result == status
110
110
111
111
112 def test_git_pre_pull_is_disabled():
112 def test_git_pre_pull_is_disabled():
113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
114
114
115
115
116 def test_git_post_pull_is_disabled():
116 def test_git_post_pull_is_disabled():
117 assert (
117 assert (
118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
119
119
120
120
121 class TestGetHooksClient(object):
121 class TestGetHooksClient(object):
122
122
123 def test_returns_http_client_when_protocol_matches(self):
123 def test_returns_http_client_when_protocol_matches(self):
124 hooks_uri = 'localhost:8000'
124 hooks_uri = 'localhost:8000'
125 result = hooks._get_hooks_client({
125 result = hooks._get_hooks_client({
126 'hooks_uri': hooks_uri,
126 'hooks_uri': hooks_uri,
127 'hooks_protocol': 'http'
127 'hooks_protocol': 'http'
128 })
128 })
129 assert isinstance(result, hooks.HooksHttpClient)
129 assert isinstance(result, hooks.HooksHttpClient)
130 assert result.hooks_uri == hooks_uri
130 assert result.hooks_uri == hooks_uri
131
131
132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
133 fake_module = mock.Mock()
133 fake_module = mock.Mock()
134 import_patcher = mock.patch.object(
134 import_patcher = mock.patch.object(
135 hooks.importlib, 'import_module', return_value=fake_module)
135 hooks.importlib, 'import_module', return_value=fake_module)
136 fake_module_name = 'fake.module'
136 fake_module_name = 'fake.module'
137 with import_patcher as import_mock:
137 with import_patcher as import_mock:
138 result = hooks._get_hooks_client(
138 result = hooks._get_hooks_client(
139 {'hooks_module': fake_module_name})
139 {'hooks_module': fake_module_name})
140
140
141 import_mock.assert_called_once_with(fake_module_name)
141 import_mock.assert_called_once_with(fake_module_name)
142 assert isinstance(result, hooks.HooksDummyClient)
142 assert isinstance(result, hooks.HooksDummyClient)
143 assert result._hooks_module == fake_module
143 assert result._hooks_module == fake_module
144
144
145
145
146 class TestHooksHttpClient(object):
146 class TestHooksHttpClient(object):
147 def test_init_sets_hooks_uri(self):
147 def test_init_sets_hooks_uri(self):
148 uri = 'localhost:3000'
148 uri = 'localhost:3000'
149 client = hooks.HooksHttpClient(uri)
149 client = hooks.HooksHttpClient(uri)
150 assert client.hooks_uri == uri
150 assert client.hooks_uri == uri
151
151
152 def test_serialize_returns_json_string(self):
152 def test_serialize_returns_json_string(self):
153 client = hooks.HooksHttpClient('localhost:3000')
153 client = hooks.HooksHttpClient('localhost:3000')
154 hook_name = 'test'
154 hook_name = 'test'
155 extras = {
155 extras = {
156 'first': 1,
156 'first': 1,
157 'second': 'two'
157 'second': 'two'
158 }
158 }
159 result = client._serialize(hook_name, extras)
159 result = client._serialize(hook_name, extras)
160 expected_result = json.dumps({
160 expected_result = json.dumps({
161 'method': hook_name,
161 'method': hook_name,
162 'extras': extras
162 'extras': extras
163 })
163 })
164 assert result == expected_result
164 assert result == expected_result
165
165
166 def test_call_queries_http_server(self, http_mirror):
166 def test_call_queries_http_server(self, http_mirror):
167 client = hooks.HooksHttpClient(http_mirror.uri)
167 client = hooks.HooksHttpClient(http_mirror.uri)
168 hook_name = 'test'
168 hook_name = 'test'
169 extras = {
169 extras = {
170 'first': 1,
170 'first': 1,
171 'second': 'two'
171 'second': 'two'
172 }
172 }
173 result = client(hook_name, extras)
173 result = client(hook_name, extras)
174 expected_result = {
174 expected_result = {
175 'method': hook_name,
175 'method': hook_name,
176 'extras': extras
176 'extras': extras
177 }
177 }
178 assert result == expected_result
178 assert result == expected_result
179
179
180
180
181 class TestHooksDummyClient(object):
181 class TestHooksDummyClient(object):
182 def test_init_imports_hooks_module(self):
182 def test_init_imports_hooks_module(self):
183 hooks_module_name = 'rhodecode.fake.module'
183 hooks_module_name = 'rhodecode.fake.module'
184 hooks_module = mock.MagicMock()
184 hooks_module = mock.MagicMock()
185
185
186 import_patcher = mock.patch.object(
186 import_patcher = mock.patch.object(
187 hooks.importlib, 'import_module', return_value=hooks_module)
187 hooks.importlib, 'import_module', return_value=hooks_module)
188 with import_patcher as import_mock:
188 with import_patcher as import_mock:
189 client = hooks.HooksDummyClient(hooks_module_name)
189 client = hooks.HooksDummyClient(hooks_module_name)
190 import_mock.assert_called_once_with(hooks_module_name)
190 import_mock.assert_called_once_with(hooks_module_name)
191 assert client._hooks_module == hooks_module
191 assert client._hooks_module == hooks_module
192
192
193 def test_call_returns_hook_result(self):
193 def test_call_returns_hook_result(self):
194 hooks_module_name = 'rhodecode.fake.module'
194 hooks_module_name = 'rhodecode.fake.module'
195 hooks_module = mock.MagicMock()
195 hooks_module = mock.MagicMock()
196 import_patcher = mock.patch.object(
196 import_patcher = mock.patch.object(
197 hooks.importlib, 'import_module', return_value=hooks_module)
197 hooks.importlib, 'import_module', return_value=hooks_module)
198 with import_patcher:
198 with import_patcher:
199 client = hooks.HooksDummyClient(hooks_module_name)
199 client = hooks.HooksDummyClient(hooks_module_name)
200
200
201 result = client('post_push', {})
201 result = client('post_push', {})
202 hooks_module.Hooks.assert_called_once_with()
202 hooks_module.Hooks.assert_called_once_with()
203 assert result == hooks_module.Hooks().__enter__().post_push()
203 assert result == hooks_module.Hooks().__enter__().post_push()
204
204
205
205
206 @pytest.fixture
206 @pytest.fixture
207 def http_mirror(request):
207 def http_mirror(request):
208 server = MirrorHttpServer()
208 server = MirrorHttpServer()
209 request.addfinalizer(server.stop)
209 request.addfinalizer(server.stop)
210 return server
210 return server
211
211
212
212
213 class MirrorHttpHandler(BaseHTTPRequestHandler):
213 class MirrorHttpHandler(BaseHTTPRequestHandler):
214 def do_POST(self):
214 def do_POST(self):
215 length = int(self.headers['Content-Length'])
215 length = int(self.headers['Content-Length'])
216 body = self.rfile.read(length).decode('utf-8')
216 body = self.rfile.read(length).decode('utf-8')
217 self.send_response(200)
217 self.send_response(200)
218 self.end_headers()
218 self.end_headers()
219 self.wfile.write(body)
219 self.wfile.write(body)
220
220
221
221
222 class MirrorHttpServer(object):
222 class MirrorHttpServer(object):
223 ip_address = '127.0.0.1'
223 ip_address = '127.0.0.1'
224 port = 0
224 port = 0
225
225
226 def __init__(self):
226 def __init__(self):
227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
228 _, self.port = self._daemon.server_address
228 _, self.port = self._daemon.server_address
229 self._thread = threading.Thread(target=self._daemon.serve_forever)
229 self._thread = threading.Thread(target=self._daemon.serve_forever)
230 self._thread.daemon = True
230 self._thread.daemon = True
231 self._thread.start()
231 self._thread.start()
232
232
233 def stop(self):
233 def stop(self):
234 self._daemon.shutdown()
234 self._daemon.shutdown()
235 self._thread.join()
235 self._thread.join()
236 self._daemon = None
236 self._daemon = None
237 self._thread = None
237 self._thread = None
238
238
239 @property
239 @property
240 def uri(self):
240 def uri(self):
241 return '{}:{}'.format(self.ip_address, self.port)
241 return '{}:{}'.format(self.ip_address, self.port)
@@ -1,57 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import http_main
21 from vcsserver import http_main
22 from vcsserver.base import obfuscate_qs
22 from vcsserver.base import obfuscate_qs
23
23
24
24
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
28 http_main.main([])
28 http_main.main([])
29 patch_largefiles_capabilities.assert_called_once_with()
29 patch_largefiles_capabilities.assert_called_once_with()
30
30
31
31
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
34 @mock.patch(
34 @mock.patch(
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
36 mock.Mock(side_effect=Exception("Must not be called")))
36 mock.Mock(side_effect=Exception("Must not be called")))
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
38 http_main.main([])
38 http_main.main([])
39
39
40
40
41 @pytest.mark.parametrize('given, expected', [
41 @pytest.mark.parametrize('given, expected', [
42 ('bad', 'bad'),
42 ('bad', 'bad'),
43 ('query&foo=bar', 'query&foo=bar'),
43 ('query&foo=bar', 'query&foo=bar'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret',
45 ('a;b;c;query&foo=bar&auth_token=secret',
46 'a&b&c&query&foo=bar&auth_token=*****'),
46 'a&b&c&query&foo=bar&auth_token=*****'),
47 ('', ''),
47 ('', ''),
48 (None, None),
48 (None, None),
49 ('foo=bar', 'foo=bar'),
49 ('foo=bar', 'foo=bar'),
50 ('auth_token=secret', 'auth_token=*****'),
50 ('auth_token=secret', 'auth_token=*****'),
51 ('auth_token=secret&api_key=secret2',
51 ('auth_token=secret&api_key=secret2',
52 'auth_token=*****&api_key=*****'),
52 'auth_token=*****&api_key=*****'),
53 ('auth_token=secret&api_key=secret2&param=value',
53 ('auth_token=secret&api_key=secret2&param=value',
54 'auth_token=*****&api_key=*****&param=value'),
54 'auth_token=*****&api_key=*****&param=value'),
55 ])
55 ])
56 def test_obfuscate_qs(given, expected):
56 def test_obfuscate_qs(given, expected):
57 assert expected == obfuscate_qs(given)
57 assert expected == obfuscate_qs(given)
@@ -1,249 +1,249 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19
19
20 import dulwich.protocol
20 import dulwich.protocol
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import webob
23 import webob
24 import webtest
24 import webtest
25
25
26 from vcsserver import hooks, pygrack
26 from vcsserver import hooks, pygrack
27
27
28 # pylint: disable=redefined-outer-name,protected-access
28 # pylint: disable=redefined-outer-name,protected-access
29
29
30
30
31 @pytest.fixture()
31 @pytest.fixture()
32 def pygrack_instance(tmpdir):
32 def pygrack_instance(tmpdir):
33 """
33 """
34 Creates a pygrack app instance.
34 Creates a pygrack app instance.
35
35
36 Right now, it does not much helpful regarding the passed directory.
36 Right now, it does not much helpful regarding the passed directory.
37 It just contains the required folders to pass the signature test.
37 It just contains the required folders to pass the signature test.
38 """
38 """
39 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
39 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
40 tmpdir.mkdir(dir_name)
40 tmpdir.mkdir(dir_name)
41
41
42 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
42 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
43
43
44
44
45 @pytest.fixture()
45 @pytest.fixture()
46 def pygrack_app(pygrack_instance):
46 def pygrack_app(pygrack_instance):
47 """
47 """
48 Creates a pygrack app wrapped in webtest.TestApp.
48 Creates a pygrack app wrapped in webtest.TestApp.
49 """
49 """
50 return webtest.TestApp(pygrack_instance)
50 return webtest.TestApp(pygrack_instance)
51
51
52
52
53 def test_invalid_service_info_refs_returns_403(pygrack_app):
53 def test_invalid_service_info_refs_returns_403(pygrack_app):
54 response = pygrack_app.get('/info/refs?service=git-upload-packs',
54 response = pygrack_app.get('/info/refs?service=git-upload-packs',
55 expect_errors=True)
55 expect_errors=True)
56
56
57 assert response.status_int == 403
57 assert response.status_int == 403
58
58
59
59
60 def test_invalid_endpoint_returns_403(pygrack_app):
60 def test_invalid_endpoint_returns_403(pygrack_app):
61 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
61 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
62
62
63 assert response.status_int == 403
63 assert response.status_int == 403
64
64
65
65
66 @pytest.mark.parametrize('sideband', [
66 @pytest.mark.parametrize('sideband', [
67 'side-band-64k',
67 'side-band-64k',
68 'side-band',
68 'side-band',
69 'side-band no-progress',
69 'side-band no-progress',
70 ])
70 ])
71 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
71 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
72 request = ''.join([
72 request = ''.join([
73 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
73 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
74 'multi_ack %s ofs-delta\n' % sideband,
74 'multi_ack %s ofs-delta\n' % sideband,
75 '0000',
75 '0000',
76 '0009done\n',
76 '0009done\n',
77 ])
77 ])
78 with mock.patch('vcsserver.hooks.git_pre_pull',
78 with mock.patch('vcsserver.hooks.git_pre_pull',
79 return_value=hooks.HookResponse(1, 'foo')):
79 return_value=hooks.HookResponse(1, 'foo')):
80 response = pygrack_app.post(
80 response = pygrack_app.post(
81 '/git-upload-pack', params=request,
81 '/git-upload-pack', params=request,
82 content_type='application/x-git-upload-pack')
82 content_type='application/x-git-upload-pack')
83
83
84 data = io.BytesIO(response.body)
84 data = io.BytesIO(response.body)
85 proto = dulwich.protocol.Protocol(data.read, None)
85 proto = dulwich.protocol.Protocol(data.read, None)
86 packets = list(proto.read_pkt_seq())
86 packets = list(proto.read_pkt_seq())
87
87
88 expected_packets = [
88 expected_packets = [
89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
91 ]
91 ]
92 assert packets == expected_packets
92 assert packets == expected_packets
93
93
94
94
95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
96 request = ''.join([
96 request = ''.join([
97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
98 'multi_ack ofs-delta\n'
98 'multi_ack ofs-delta\n'
99 '0000',
99 '0000',
100 '0009done\n',
100 '0009done\n',
101 ])
101 ])
102 with mock.patch('vcsserver.hooks.git_pre_pull',
102 with mock.patch('vcsserver.hooks.git_pre_pull',
103 return_value=hooks.HookResponse(1, 'foo')):
103 return_value=hooks.HookResponse(1, 'foo')):
104 response = pygrack_app.post(
104 response = pygrack_app.post(
105 '/git-upload-pack', params=request,
105 '/git-upload-pack', params=request,
106 content_type='application/x-git-upload-pack')
106 content_type='application/x-git-upload-pack')
107
107
108 assert response.body == pygrack.GitRepository.EMPTY_PACK
108 assert response.body == pygrack.GitRepository.EMPTY_PACK
109
109
110
110
111 def test_pull_has_hook_messages(pygrack_app):
111 def test_pull_has_hook_messages(pygrack_app):
112 request = ''.join([
112 request = ''.join([
113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
114 'multi_ack side-band-64k ofs-delta\n'
114 'multi_ack side-band-64k ofs-delta\n'
115 '0000',
115 '0000',
116 '0009done\n',
116 '0009done\n',
117 ])
117 ])
118 with mock.patch('vcsserver.hooks.git_pre_pull',
118 with mock.patch('vcsserver.hooks.git_pre_pull',
119 return_value=hooks.HookResponse(0, 'foo')):
119 return_value=hooks.HookResponse(0, 'foo')):
120 with mock.patch('vcsserver.hooks.git_post_pull',
120 with mock.patch('vcsserver.hooks.git_post_pull',
121 return_value=hooks.HookResponse(1, 'bar')):
121 return_value=hooks.HookResponse(1, 'bar')):
122 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
122 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
123 return_value=['0008NAK\n0009subp\n0000']):
123 return_value=['0008NAK\n0009subp\n0000']):
124 response = pygrack_app.post(
124 response = pygrack_app.post(
125 '/git-upload-pack', params=request,
125 '/git-upload-pack', params=request,
126 content_type='application/x-git-upload-pack')
126 content_type='application/x-git-upload-pack')
127
127
128 data = io.BytesIO(response.body)
128 data = io.BytesIO(response.body)
129 proto = dulwich.protocol.Protocol(data.read, None)
129 proto = dulwich.protocol.Protocol(data.read, None)
130 packets = list(proto.read_pkt_seq())
130 packets = list(proto.read_pkt_seq())
131
131
132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
133
133
134
134
135 def test_get_want_capabilities(pygrack_instance):
135 def test_get_want_capabilities(pygrack_instance):
136 data = io.BytesIO(
136 data = io.BytesIO(
137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
139
139
140 request = webob.Request({
140 request = webob.Request({
141 'wsgi.input': data,
141 'wsgi.input': data,
142 'REQUEST_METHOD': 'POST',
142 'REQUEST_METHOD': 'POST',
143 'webob.is_body_seekable': True
143 'webob.is_body_seekable': True
144 })
144 })
145
145
146 capabilities = pygrack_instance._get_want_capabilities(request)
146 capabilities = pygrack_instance._get_want_capabilities(request)
147
147
148 assert capabilities == frozenset(
148 assert capabilities == frozenset(
149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
150 assert data.tell() == 0
150 assert data.tell() == 0
151
151
152
152
153 @pytest.mark.parametrize('data,capabilities,expected', [
153 @pytest.mark.parametrize('data,capabilities,expected', [
154 ('foo', [], []),
154 ('foo', [], []),
155 ('', ['side-band-64k'], []),
155 ('', ['side-band-64k'], []),
156 ('', ['side-band'], []),
156 ('', ['side-band'], []),
157 ('foo', ['side-band-64k'], ['0008\x02foo']),
157 ('foo', ['side-band-64k'], ['0008\x02foo']),
158 ('foo', ['side-band'], ['0008\x02foo']),
158 ('foo', ['side-band'], ['0008\x02foo']),
159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
163 ], ids=[
163 ], ids=[
164 'foo-empty',
164 'foo-empty',
165 'empty-64k', 'empty',
165 'empty-64k', 'empty',
166 'foo-64k', 'foo',
166 'foo-64k', 'foo',
167 'f-1000-64k', 'f-1000',
167 'f-1000-64k', 'f-1000',
168 'f-65520-64k', 'f-65520'])
168 'f-65520-64k', 'f-65520'])
169 def test_get_messages(pygrack_instance, data, capabilities, expected):
169 def test_get_messages(pygrack_instance, data, capabilities, expected):
170 messages = pygrack_instance._get_messages(data, capabilities)
170 messages = pygrack_instance._get_messages(data, capabilities)
171
171
172 assert messages == expected
172 assert messages == expected
173
173
174
174
175 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
175 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
176 # Unexpected response
176 # Unexpected response
177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
178 # No sideband
178 # No sideband
179 ('no-sideband', [], 'foo', 'bar'),
179 ('no-sideband', [], 'foo', 'bar'),
180 # No messages
180 # No messages
181 ('no-messages', ['side-band-64k'], '', ''),
181 ('no-messages', ['side-band-64k'], '', ''),
182 ])
182 ])
183 def test_inject_messages_to_response_nothing_to_do(
183 def test_inject_messages_to_response_nothing_to_do(
184 pygrack_instance, response, capabilities, pre_pull_messages,
184 pygrack_instance, response, capabilities, pre_pull_messages,
185 post_pull_messages):
185 post_pull_messages):
186 new_response = pygrack_instance._inject_messages_to_response(
186 new_response = pygrack_instance._inject_messages_to_response(
187 response, capabilities, pre_pull_messages, post_pull_messages)
187 response, capabilities, pre_pull_messages, post_pull_messages)
188
188
189 assert new_response == response
189 assert new_response == response
190
190
191
191
192 @pytest.mark.parametrize('capabilities', [
192 @pytest.mark.parametrize('capabilities', [
193 ['side-band'],
193 ['side-band'],
194 ['side-band-64k'],
194 ['side-band-64k'],
195 ])
195 ])
196 def test_inject_messages_to_response_single_element(pygrack_instance,
196 def test_inject_messages_to_response_single_element(pygrack_instance,
197 capabilities):
197 capabilities):
198 response = ['0008NAK\n0009subp\n0000']
198 response = ['0008NAK\n0009subp\n0000']
199 new_response = pygrack_instance._inject_messages_to_response(
199 new_response = pygrack_instance._inject_messages_to_response(
200 response, capabilities, 'foo', 'bar')
200 response, capabilities, 'foo', 'bar')
201
201
202 expected_response = [
202 expected_response = [
203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
204
204
205 assert new_response == expected_response
205 assert new_response == expected_response
206
206
207
207
208 @pytest.mark.parametrize('capabilities', [
208 @pytest.mark.parametrize('capabilities', [
209 ['side-band'],
209 ['side-band'],
210 ['side-band-64k'],
210 ['side-band-64k'],
211 ])
211 ])
212 def test_inject_messages_to_response_multi_element(pygrack_instance,
212 def test_inject_messages_to_response_multi_element(pygrack_instance,
213 capabilities):
213 capabilities):
214 response = [
214 response = [
215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
216 new_response = pygrack_instance._inject_messages_to_response(
216 new_response = pygrack_instance._inject_messages_to_response(
217 response, capabilities, 'foo', 'bar')
217 response, capabilities, 'foo', 'bar')
218
218
219 expected_response = [
219 expected_response = [
220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
221 '000asubp4\n', '0008\x02bar', '0000'
221 '000asubp4\n', '0008\x02bar', '0000'
222 ]
222 ]
223
223
224 assert new_response == expected_response
224 assert new_response == expected_response
225
225
226
226
227 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
227 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
228 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
228 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
229
229
230 assert response == [pygrack.GitRepository.EMPTY_PACK]
230 assert response == [pygrack.GitRepository.EMPTY_PACK]
231
231
232
232
233 @pytest.mark.parametrize('capabilities', [
233 @pytest.mark.parametrize('capabilities', [
234 ['side-band'],
234 ['side-band'],
235 ['side-band-64k'],
235 ['side-band-64k'],
236 ['side-band-64k', 'no-progress'],
236 ['side-band-64k', 'no-progress'],
237 ])
237 ])
238 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
238 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
239 response = pygrack_instance._build_failed_pre_pull_response(
239 response = pygrack_instance._build_failed_pre_pull_response(
240 capabilities, 'foo')
240 capabilities, 'foo')
241
241
242 expected_response = [
242 expected_response = [
243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
245 pygrack.GitRepository.EMPTY_PACK),
245 pygrack.GitRepository.EMPTY_PACK),
246 '0000',
246 '0000',
247 ]
247 ]
248
248
249 assert response == expected_response
249 assert response == expected_response
@@ -1,86 +1,86 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 import mercurial.hg
20 import mercurial.hg
21 import mercurial.ui
21 import mercurial.ui
22 import mercurial.error
22 import mercurial.error
23 import mock
23 import mock
24 import pytest
24 import pytest
25 import webtest
25 import webtest
26
26
27 from vcsserver import scm_app
27 from vcsserver import scm_app
28
28
29
29
30 def test_hg_does_not_accept_invalid_cmd(tmpdir):
30 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
32 app = webtest.TestApp(scm_app.HgWeb(repo))
32 app = webtest.TestApp(scm_app.HgWeb(repo))
33
33
34 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
34 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
35
35
36 assert response.status_int == 400
36 assert response.status_int == 400
37
37
38
38
39 def test_create_hg_wsgi_app_requirement_error(tmpdir):
39 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
41 config = (
41 config = (
42 ('paths', 'default', ''),
42 ('paths', 'default', ''),
43 )
43 )
44 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
44 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
45 hgweb_mock.side_effect = mercurial.error.RequirementError()
45 hgweb_mock.side_effect = mercurial.error.RequirementError()
46 with pytest.raises(Exception):
46 with pytest.raises(Exception):
47 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
47 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
48
48
49
49
50 def test_git_returns_not_found(tmpdir):
50 def test_git_returns_not_found(tmpdir):
51 app = webtest.TestApp(
51 app = webtest.TestApp(
52 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
52 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
53
53
54 response = app.get('/repo_name/inforefs?service=git-upload-pack',
54 response = app.get('/repo_name/inforefs?service=git-upload-pack',
55 expect_errors=True)
55 expect_errors=True)
56
56
57 assert response.status_int == 404
57 assert response.status_int == 404
58
58
59
59
60 def test_git(tmpdir):
60 def test_git(tmpdir):
61 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
61 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
62 tmpdir.mkdir(dir_name)
62 tmpdir.mkdir(dir_name)
63
63
64 app = webtest.TestApp(
64 app = webtest.TestApp(
65 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
65 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
66
66
67 # We set service to git-upload-packs to trigger a 403
67 # We set service to git-upload-packs to trigger a 403
68 response = app.get('/repo_name/inforefs?service=git-upload-packs',
68 response = app.get('/repo_name/inforefs?service=git-upload-packs',
69 expect_errors=True)
69 expect_errors=True)
70
70
71 assert response.status_int == 403
71 assert response.status_int == 403
72
72
73
73
74 def test_git_fallbacks_to_git_folder(tmpdir):
74 def test_git_fallbacks_to_git_folder(tmpdir):
75 tmpdir.mkdir('.git')
75 tmpdir.mkdir('.git')
76 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
76 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
77 tmpdir.mkdir(os.path.join('.git', dir_name))
77 tmpdir.mkdir(os.path.join('.git', dir_name))
78
78
79 app = webtest.TestApp(
79 app = webtest.TestApp(
80 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
80 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
81
81
82 # We set service to git-upload-packs to trigger a 403
82 # We set service to git-upload-packs to trigger a 403
83 response = app.get('/repo_name/inforefs?service=git-upload-packs',
83 response = app.get('/repo_name/inforefs?service=git-upload-packs',
84 expect_errors=True)
84 expect_errors=True)
85
85
86 assert response.status_int == 403
86 assert response.status_int == 403
@@ -1,39 +1,39 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 from vcsserver.server import VcsServer
23 from vcsserver.server import VcsServer
24
24
25
25
26 def test_provides_the_pid(server):
26 def test_provides_the_pid(server):
27 pid = server.get_pid()
27 pid = server.get_pid()
28 assert pid == os.getpid()
28 assert pid == os.getpid()
29
29
30
30
31 def test_allows_to_trigger_the_garbage_collector(server):
31 def test_allows_to_trigger_the_garbage_collector(server):
32 with mock.patch('gc.collect') as collect:
32 with mock.patch('gc.collect') as collect:
33 server.run_gc()
33 server.run_gc()
34 assert collect.called
34 assert collect.called
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture
38 def server():
38 def server():
39 return VcsServer()
39 return VcsServer()
@@ -1,122 +1,122 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21
21
22 import pytest
22 import pytest
23
23
24 from vcsserver import subprocessio
24 from vcsserver import subprocessio
25
25
26
26
27 @pytest.fixture(scope='module')
27 @pytest.fixture(scope='module')
28 def environ():
28 def environ():
29 """Delete coverage variables, as they make the tests fail."""
29 """Delete coverage variables, as they make the tests fail."""
30 env = dict(os.environ)
30 env = dict(os.environ)
31 for key in env.keys():
31 for key in env.keys():
32 if key.startswith('COV_CORE_'):
32 if key.startswith('COV_CORE_'):
33 del env[key]
33 del env[key]
34
34
35 return env
35 return env
36
36
37
37
38 def _get_python_args(script):
38 def _get_python_args(script):
39 return [sys.executable, '-c',
39 return [sys.executable, '-c',
40 'import sys; import time; import shutil; ' + script]
40 'import sys; import time; import shutil; ' + script]
41
41
42
42
43 def test_raise_exception_on_non_zero_return_code(environ):
43 def test_raise_exception_on_non_zero_return_code(environ):
44 args = _get_python_args('sys.exit(1)')
44 args = _get_python_args('sys.exit(1)')
45 with pytest.raises(EnvironmentError):
45 with pytest.raises(EnvironmentError):
46 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
46 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
47
47
48
48
49 def test_does_not_fail_on_non_zero_return_code(environ):
49 def test_does_not_fail_on_non_zero_return_code(environ):
50 args = _get_python_args('sys.exit(1)')
50 args = _get_python_args('sys.exit(1)')
51 output = ''.join(subprocessio.SubprocessIOChunker(
51 output = ''.join(subprocessio.SubprocessIOChunker(
52 args, shell=False, fail_on_return_code=False, env=environ))
52 args, shell=False, fail_on_return_code=False, env=environ))
53
53
54 assert output == ''
54 assert output == ''
55
55
56
56
57 def test_raise_exception_on_stderr(environ):
57 def test_raise_exception_on_stderr(environ):
58 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
58 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
59 with pytest.raises(EnvironmentError) as excinfo:
59 with pytest.raises(EnvironmentError) as excinfo:
60 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
60 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
61
61
62 assert 'exited due to an error:\nX' in str(excinfo.value)
62 assert 'exited due to an error:\nX' in str(excinfo.value)
63
63
64
64
65 def test_does_not_fail_on_stderr(environ):
65 def test_does_not_fail_on_stderr(environ):
66 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
66 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
67 output = ''.join(subprocessio.SubprocessIOChunker(
67 output = ''.join(subprocessio.SubprocessIOChunker(
68 args, shell=False, fail_on_stderr=False, env=environ))
68 args, shell=False, fail_on_stderr=False, env=environ))
69
69
70 assert output == ''
70 assert output == ''
71
71
72
72
73 @pytest.mark.parametrize('size', [1, 10**5])
73 @pytest.mark.parametrize('size', [1, 10**5])
74 def test_output_with_no_input(size, environ):
74 def test_output_with_no_input(size, environ):
75 print type(environ)
75 print type(environ)
76 data = 'X'
76 data = 'X'
77 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
77 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
78 output = ''.join(subprocessio.SubprocessIOChunker(
78 output = ''.join(subprocessio.SubprocessIOChunker(
79 args, shell=False, env=environ))
79 args, shell=False, env=environ))
80
80
81 assert output == data * size
81 assert output == data * size
82
82
83
83
84 @pytest.mark.parametrize('size', [1, 10**5])
84 @pytest.mark.parametrize('size', [1, 10**5])
85 def test_output_with_no_input_does_not_fail(size, environ):
85 def test_output_with_no_input_does_not_fail(size, environ):
86 data = 'X'
86 data = 'X'
87 args = _get_python_args(
87 args = _get_python_args(
88 'sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
88 'sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
89 output = ''.join(subprocessio.SubprocessIOChunker(
89 output = ''.join(subprocessio.SubprocessIOChunker(
90 args, shell=False, fail_on_return_code=False, env=environ))
90 args, shell=False, fail_on_return_code=False, env=environ))
91
91
92 print len(data * size), len(output)
92 print len(data * size), len(output)
93 assert output == data * size
93 assert output == data * size
94
94
95
95
96 @pytest.mark.parametrize('size', [1, 10**5])
96 @pytest.mark.parametrize('size', [1, 10**5])
97 def test_output_with_input(size, environ):
97 def test_output_with_input(size, environ):
98 data = 'X' * size
98 data = 'X' * size
99 inputstream = io.BytesIO(data)
99 inputstream = io.BytesIO(data)
100 # This acts like the cat command.
100 # This acts like the cat command.
101 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
101 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
102 output = ''.join(subprocessio.SubprocessIOChunker(
102 output = ''.join(subprocessio.SubprocessIOChunker(
103 args, shell=False, inputstream=inputstream, env=environ))
103 args, shell=False, inputstream=inputstream, env=environ))
104
104
105 print len(data), len(output)
105 print len(data), len(output)
106 assert output == data
106 assert output == data
107
107
108
108
109 @pytest.mark.parametrize('size', [1, 10**5])
109 @pytest.mark.parametrize('size', [1, 10**5])
110 def test_output_with_input_skipping_iterator(size, environ):
110 def test_output_with_input_skipping_iterator(size, environ):
111 data = 'X' * size
111 data = 'X' * size
112 inputstream = io.BytesIO(data)
112 inputstream = io.BytesIO(data)
113 # This acts like the cat command.
113 # This acts like the cat command.
114 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
114 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
115
115
116 # Note: assigning the chunker makes sure that it is not deleted too early
116 # Note: assigning the chunker makes sure that it is not deleted too early
117 chunker = subprocessio.SubprocessIOChunker(
117 chunker = subprocessio.SubprocessIOChunker(
118 args, shell=False, inputstream=inputstream, env=environ)
118 args, shell=False, inputstream=inputstream, env=environ)
119 output = ''.join(chunker.output)
119 output = ''.join(chunker.output)
120
120
121 print len(data), len(output)
121 print len(data), len(output)
122 assert output == data
122 assert output == data
@@ -1,67 +1,67 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import mock
19 import mock
20 import pytest
20 import pytest
21 import sys
21 import sys
22
22
23
23
24 class MockPopen(object):
24 class MockPopen(object):
25 def __init__(self, stderr):
25 def __init__(self, stderr):
26 self.stdout = io.BytesIO('')
26 self.stdout = io.BytesIO('')
27 self.stderr = io.BytesIO(stderr)
27 self.stderr = io.BytesIO(stderr)
28 self.returncode = 1
28 self.returncode = 1
29
29
30 def wait(self):
30 def wait(self):
31 pass
31 pass
32
32
33
33
34 INVALID_CERTIFICATE_STDERR = '\n'.join([
34 INVALID_CERTIFICATE_STDERR = '\n'.join([
35 'svnrdump: E230001: Unable to connect to a repository at URL url',
35 'svnrdump: E230001: Unable to connect to a repository at URL url',
36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
37 ])
37 ])
38
38
39
39
40 @pytest.mark.parametrize('stderr,expected_reason', [
40 @pytest.mark.parametrize('stderr,expected_reason', [
41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
42 ('svnrdump: E123456', 'UNKNOWN'),
42 ('svnrdump: E123456', 'UNKNOWN'),
43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
44 @pytest.mark.xfail(sys.platform == "cygwin",
44 @pytest.mark.xfail(sys.platform == "cygwin",
45 reason="SVN not packaged for Cygwin")
45 reason="SVN not packaged for Cygwin")
46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 from vcsserver import svn
47 from vcsserver import svn
48
48
49 remote = svn.SvnRemote(None)
49 remote = svn.SvnRemote(None)
50 remote.is_path_valid_repository = lambda wire, path: True
50 remote.is_path_valid_repository = lambda wire, path: True
51
51
52 with mock.patch('subprocess.Popen',
52 with mock.patch('subprocess.Popen',
53 return_value=MockPopen(stderr)):
53 return_value=MockPopen(stderr)):
54 with pytest.raises(Exception) as excinfo:
54 with pytest.raises(Exception) as excinfo:
55 remote.import_remote_repository({'path': 'path'}, 'url')
55 remote.import_remote_repository({'path': 'path'}, 'url')
56
56
57 expected_error_args = (
57 expected_error_args = (
58 'Failed to dump the remote repository from url.',
58 'Failed to dump the remote repository from url.',
59 expected_reason)
59 expected_reason)
60
60
61 assert excinfo.value.args == expected_error_args
61 assert excinfo.value.args == expected_error_args
62
62
63
63
64 def test_svn_libraries_can_be_imported():
64 def test_svn_libraries_can_be_imported():
65 import svn
65 import svn
66 import svn.client
66 import svn.client
67 assert svn.client is not None
67 assert svn.client is not None
@@ -1,96 +1,96 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import wsgiref.simple_server
18 import wsgiref.simple_server
19 import wsgiref.validate
19 import wsgiref.validate
20
20
21 from vcsserver import wsgi_app_caller
21 from vcsserver import wsgi_app_caller
22
22
23
23
24 # pylint: disable=protected-access,too-many-public-methods
24 # pylint: disable=protected-access,too-many-public-methods
25
25
26
26
27 @wsgiref.validate.validator
27 @wsgiref.validate.validator
28 def demo_app(environ, start_response):
28 def demo_app(environ, start_response):
29 """WSGI app used for testing."""
29 """WSGI app used for testing."""
30 data = [
30 data = [
31 'Hello World!\n',
31 'Hello World!\n',
32 'input_data=%s\n' % environ['wsgi.input'].read(),
32 'input_data=%s\n' % environ['wsgi.input'].read(),
33 ]
33 ]
34 for key, value in sorted(environ.items()):
34 for key, value in sorted(environ.items()):
35 data.append('%s=%s\n' % (key, value))
35 data.append('%s=%s\n' % (key, value))
36
36
37 write = start_response("200 OK", [('Content-Type', 'text/plain')])
37 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 write('Old school write method\n')
38 write('Old school write method\n')
39 write('***********************\n')
39 write('***********************\n')
40 return data
40 return data
41
41
42
42
43 BASE_ENVIRON = {
43 BASE_ENVIRON = {
44 'REQUEST_METHOD': 'GET',
44 'REQUEST_METHOD': 'GET',
45 'SERVER_NAME': 'localhost',
45 'SERVER_NAME': 'localhost',
46 'SERVER_PORT': '80',
46 'SERVER_PORT': '80',
47 'SCRIPT_NAME': '',
47 'SCRIPT_NAME': '',
48 'PATH_INFO': '/',
48 'PATH_INFO': '/',
49 'QUERY_STRING': '',
49 'QUERY_STRING': '',
50 'foo.var': 'bla',
50 'foo.var': 'bla',
51 }
51 }
52
52
53
53
54 def test_complete_environ():
54 def test_complete_environ():
55 environ = dict(BASE_ENVIRON)
55 environ = dict(BASE_ENVIRON)
56 data = "data"
56 data = "data"
57 wsgi_app_caller._complete_environ(environ, data)
57 wsgi_app_caller._complete_environ(environ, data)
58 wsgiref.validate.check_environ(environ)
58 wsgiref.validate.check_environ(environ)
59
59
60 assert data == environ['wsgi.input'].read()
60 assert data == environ['wsgi.input'].read()
61
61
62
62
63 def test_start_response():
63 def test_start_response():
64 start_response = wsgi_app_caller._StartResponse()
64 start_response = wsgi_app_caller._StartResponse()
65 status = '200 OK'
65 status = '200 OK'
66 headers = [('Content-Type', 'text/plain')]
66 headers = [('Content-Type', 'text/plain')]
67 start_response(status, headers)
67 start_response(status, headers)
68
68
69 assert status == start_response.status
69 assert status == start_response.status
70 assert headers == start_response.headers
70 assert headers == start_response.headers
71
71
72
72
73 def test_start_response_with_error():
73 def test_start_response_with_error():
74 start_response = wsgi_app_caller._StartResponse()
74 start_response = wsgi_app_caller._StartResponse()
75 status = '500 Internal Server Error'
75 status = '500 Internal Server Error'
76 headers = [('Content-Type', 'text/plain')]
76 headers = [('Content-Type', 'text/plain')]
77 start_response(status, headers, (None, None, None))
77 start_response(status, headers, (None, None, None))
78
78
79 assert status == start_response.status
79 assert status == start_response.status
80 assert headers == start_response.headers
80 assert headers == start_response.headers
81
81
82
82
83 def test_wsgi_app_caller():
83 def test_wsgi_app_caller():
84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
85 environ = dict(BASE_ENVIRON)
85 environ = dict(BASE_ENVIRON)
86 input_data = 'some text'
86 input_data = 'some text'
87 responses, status, headers = caller.handle(environ, input_data)
87 responses, status, headers = caller.handle(environ, input_data)
88 response = ''.join(responses)
88 response = ''.join(responses)
89
89
90 assert status == '200 OK'
90 assert status == '200 OK'
91 assert headers == [('Content-Type', 'text/plain')]
91 assert headers == [('Content-Type', 'text/plain')]
92 assert response.startswith(
92 assert response.startswith(
93 'Old school write method\n***********************\n')
93 'Old school write method\n***********************\n')
94 assert 'Hello World!\n' in response
94 assert 'Hello World!\n' in response
95 assert 'foo.var=bla\n' in response
95 assert 'foo.var=bla\n' in response
96 assert 'input_data=%s\n' % input_data in response
96 assert 'input_data=%s\n' % input_data in response
@@ -1,60 +1,60 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19
19
20 import time
20 import time
21 import logging
21 import logging
22
22
23
23
24 from vcsserver.utils import safe_str
24 from vcsserver.utils import safe_str
25
25
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29
29
30 def get_access_path(request):
30 def get_access_path(request):
31 environ = request.environ
31 environ = request.environ
32 return environ.get('PATH_INFO')
32 return environ.get('PATH_INFO')
33
33
34
34
35 class RequestWrapperTween(object):
35 class RequestWrapperTween(object):
36 def __init__(self, handler, registry):
36 def __init__(self, handler, registry):
37 self.handler = handler
37 self.handler = handler
38 self.registry = registry
38 self.registry = registry
39
39
40 # one-time configuration code goes here
40 # one-time configuration code goes here
41
41
42 def __call__(self, request):
42 def __call__(self, request):
43 start = time.time()
43 start = time.time()
44 try:
44 try:
45 response = self.handler(request)
45 response = self.handler(request)
46 finally:
46 finally:
47 end = time.time()
47 end = time.time()
48
48
49 log.info('IP: %s Request to path: `%s` time: %.3fs' % (
49 log.info('IP: %s Request to path: `%s` time: %.3fs' % (
50 '127.0.0.1',
50 '127.0.0.1',
51 safe_str(get_access_path(request)), end - start)
51 safe_str(get_access_path(request)), end - start)
52 )
52 )
53
53
54 return response
54 return response
55
55
56
56
57 def includeme(config):
57 def includeme(config):
58 config.add_tween(
58 config.add_tween(
59 'vcsserver.tweens.RequestWrapperTween',
59 'vcsserver.tweens.RequestWrapperTween',
60 )
60 )
@@ -1,72 +1,72 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 def safe_int(val, default=None):
19 def safe_int(val, default=None):
20 """
20 """
21 Returns int() of val if val is not convertable to int use default
21 Returns int() of val if val is not convertable to int use default
22 instead
22 instead
23
23
24 :param val:
24 :param val:
25 :param default:
25 :param default:
26 """
26 """
27
27
28 try:
28 try:
29 val = int(val)
29 val = int(val)
30 except (ValueError, TypeError):
30 except (ValueError, TypeError):
31 val = default
31 val = default
32
32
33 return val
33 return val
34
34
35
35
36 def safe_str(unicode_, to_encoding=['utf8']):
36 def safe_str(unicode_, to_encoding=['utf8']):
37 """
37 """
38 safe str function. Does few trick to turn unicode_ into string
38 safe str function. Does few trick to turn unicode_ into string
39
39
40 In case of UnicodeEncodeError, we try to return it with encoding detected
40 In case of UnicodeEncodeError, we try to return it with encoding detected
41 by chardet library if it fails fallback to string with errors replaced
41 by chardet library if it fails fallback to string with errors replaced
42
42
43 :param unicode_: unicode to encode
43 :param unicode_: unicode to encode
44 :rtype: str
44 :rtype: str
45 :returns: str object
45 :returns: str object
46 """
46 """
47
47
48 # if it's not basestr cast to str
48 # if it's not basestr cast to str
49 if not isinstance(unicode_, basestring):
49 if not isinstance(unicode_, basestring):
50 return str(unicode_)
50 return str(unicode_)
51
51
52 if isinstance(unicode_, str):
52 if isinstance(unicode_, str):
53 return unicode_
53 return unicode_
54
54
55 if not isinstance(to_encoding, (list, tuple)):
55 if not isinstance(to_encoding, (list, tuple)):
56 to_encoding = [to_encoding]
56 to_encoding = [to_encoding]
57
57
58 for enc in to_encoding:
58 for enc in to_encoding:
59 try:
59 try:
60 return unicode_.encode(enc)
60 return unicode_.encode(enc)
61 except UnicodeEncodeError:
61 except UnicodeEncodeError:
62 pass
62 pass
63
63
64 try:
64 try:
65 import chardet
65 import chardet
66 encoding = chardet.detect(unicode_)['encoding']
66 encoding = chardet.detect(unicode_)['encoding']
67 if encoding is None:
67 if encoding is None:
68 raise UnicodeEncodeError()
68 raise UnicodeEncodeError()
69
69
70 return unicode_.encode(encoding)
70 return unicode_.encode(encoding)
71 except (ImportError, UnicodeEncodeError):
71 except (ImportError, UnicodeEncodeError):
72 return unicode_.encode(to_encoding[0], 'replace')
72 return unicode_.encode(to_encoding[0], 'replace')
@@ -1,116 +1,116 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Extract the responses of a WSGI app."""
18 """Extract the responses of a WSGI app."""
19
19
20 __all__ = ('WSGIAppCaller',)
20 __all__ = ('WSGIAppCaller',)
21
21
22 import io
22 import io
23 import logging
23 import logging
24 import os
24 import os
25
25
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29 DEV_NULL = open(os.devnull)
29 DEV_NULL = open(os.devnull)
30
30
31
31
32 def _complete_environ(environ, input_data):
32 def _complete_environ(environ, input_data):
33 """Update the missing wsgi.* variables of a WSGI environment.
33 """Update the missing wsgi.* variables of a WSGI environment.
34
34
35 :param environ: WSGI environment to update
35 :param environ: WSGI environment to update
36 :type environ: dict
36 :type environ: dict
37 :param input_data: data to be read by the app
37 :param input_data: data to be read by the app
38 :type input_data: str
38 :type input_data: str
39 """
39 """
40 environ.update({
40 environ.update({
41 'wsgi.version': (1, 0),
41 'wsgi.version': (1, 0),
42 'wsgi.url_scheme': 'http',
42 'wsgi.url_scheme': 'http',
43 'wsgi.multithread': True,
43 'wsgi.multithread': True,
44 'wsgi.multiprocess': True,
44 'wsgi.multiprocess': True,
45 'wsgi.run_once': False,
45 'wsgi.run_once': False,
46 'wsgi.input': io.BytesIO(input_data),
46 'wsgi.input': io.BytesIO(input_data),
47 'wsgi.errors': DEV_NULL,
47 'wsgi.errors': DEV_NULL,
48 })
48 })
49
49
50
50
51 # pylint: disable=too-few-public-methods
51 # pylint: disable=too-few-public-methods
52 class _StartResponse(object):
52 class _StartResponse(object):
53 """Save the arguments of a start_response call."""
53 """Save the arguments of a start_response call."""
54
54
55 __slots__ = ['status', 'headers', 'content']
55 __slots__ = ['status', 'headers', 'content']
56
56
57 def __init__(self):
57 def __init__(self):
58 self.status = None
58 self.status = None
59 self.headers = None
59 self.headers = None
60 self.content = []
60 self.content = []
61
61
62 def __call__(self, status, headers, exc_info=None):
62 def __call__(self, status, headers, exc_info=None):
63 # TODO(skreft): do something meaningful with the exc_info
63 # TODO(skreft): do something meaningful with the exc_info
64 exc_info = None # avoid dangling circular reference
64 exc_info = None # avoid dangling circular reference
65 self.status = status
65 self.status = status
66 self.headers = headers
66 self.headers = headers
67
67
68 return self.write
68 return self.write
69
69
70 def write(self, content):
70 def write(self, content):
71 """Write method returning when calling this object.
71 """Write method returning when calling this object.
72
72
73 All the data written is then available in content.
73 All the data written is then available in content.
74 """
74 """
75 self.content.append(content)
75 self.content.append(content)
76
76
77
77
78 class WSGIAppCaller(object):
78 class WSGIAppCaller(object):
79 """Calls a WSGI app."""
79 """Calls a WSGI app."""
80
80
81 def __init__(self, app):
81 def __init__(self, app):
82 """
82 """
83 :param app: WSGI app to call
83 :param app: WSGI app to call
84 """
84 """
85 self.app = app
85 self.app = app
86
86
87 def handle(self, environ, input_data):
87 def handle(self, environ, input_data):
88 """Process a request with the WSGI app.
88 """Process a request with the WSGI app.
89
89
90 The returned data of the app is fully consumed into a list.
90 The returned data of the app is fully consumed into a list.
91
91
92 :param environ: WSGI environment to update
92 :param environ: WSGI environment to update
93 :type environ: dict
93 :type environ: dict
94 :param input_data: data to be read by the app
94 :param input_data: data to be read by the app
95 :type input_data: str
95 :type input_data: str
96
96
97 :returns: a tuple with the contents, status and headers
97 :returns: a tuple with the contents, status and headers
98 :rtype: (list<str>, str, list<(str, str)>)
98 :rtype: (list<str>, str, list<(str, str)>)
99 """
99 """
100 _complete_environ(environ, input_data)
100 _complete_environ(environ, input_data)
101 start_response = _StartResponse()
101 start_response = _StartResponse()
102 log.debug("Calling wrapped WSGI application")
102 log.debug("Calling wrapped WSGI application")
103 responses = self.app(environ, start_response)
103 responses = self.app(environ, start_response)
104 responses_list = list(responses)
104 responses_list = list(responses)
105 existing_responses = start_response.content
105 existing_responses = start_response.content
106 if existing_responses:
106 if existing_responses:
107 log.debug(
107 log.debug(
108 "Adding returned response to response written via write()")
108 "Adding returned response to response written via write()")
109 existing_responses.extend(responses_list)
109 existing_responses.extend(responses_list)
110 responses_list = existing_responses
110 responses_list = existing_responses
111 if hasattr(responses, 'close'):
111 if hasattr(responses, 'close'):
112 log.debug("Closing iterator from WSGI application")
112 log.debug("Closing iterator from WSGI application")
113 responses.close()
113 responses.close()
114
114
115 log.debug("Handling of WSGI request done, returning response")
115 log.debug("Handling of WSGI request done, returning response")
116 return responses_list, start_response.status, start_response.headers
116 return responses_list, start_response.status, start_response.headers
General Comments 0
You need to be logged in to leave comments. Login now