##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r156:c4fd9054 merge stable
parent child Browse files
Show More
@@ -0,0 +1,18 b''
1
2 .PHONY: clean test test-clean test-only
3
4
5 clean:
6 make test-clean
7 find . -type f \( -iname '*.c' -o -iname '*.pyc' -o -iname '*.so' \) -exec rm '{}' ';'
8
9 test:
10 make test-clean
11 make test-only
12
13 test-clean:
14 rm -rf coverage.xml htmlcov junit.xml pylint.log result
15 find . -type d -name "__pycache__" -prune -exec rm -rf '{}' ';'
16
17 test-only:
18 PYTHONHASHSEED=random py.test -vv -r xw --cov=vcsserver --cov-report=term-missing --cov-report=html vcsserver
@@ -0,0 +1,3 b''
1 [pytest]
2 testpaths = ./vcsserver
3 addopts = -v
@@ -0,0 +1,15 b''
1 # test related requirements
2 pytest==3.0.5
3 py==1.4.31
4 pytest-cov==2.4.0
5 pytest-sugar==0.7.1
6 pytest-runner==2.9.0
7 pytest-catchlog==1.2.2
8 pytest-profiling==1.2.2
9 gprof2dot==2016.10.13
10 pytest-timeout==1.2.0
11
12 mock==1.0.1
13 WebTest==1.4.3
14 cov-core==1.15.0
15 coverage==3.7.1
@@ -0,0 +1,2 b''
1 [aliases]
2 test = pytest
@@ -0,0 +1,60 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18
19
20 import time
21 import logging
22
23
24 from vcsserver.utils import safe_str
25
26
27 log = logging.getLogger(__name__)
28
29
30 def get_access_path(request):
31 environ = request.environ
32 return environ.get('PATH_INFO')
33
34
35 class RequestWrapperTween(object):
36 def __init__(self, handler, registry):
37 self.handler = handler
38 self.registry = registry
39
40 # one-time configuration code goes here
41
42 def __call__(self, request):
43 start = time.time()
44 try:
45 response = self.handler(request)
46 finally:
47 end = time.time()
48
49 log.info('IP: %s Request to %s time: %.3fs' % (
50 '127.0.0.1',
51 safe_str(get_access_path(request)), end - start)
52 )
53
54 return response
55
56
57 def includeme(config):
58 config.add_tween(
59 'vcsserver.tweens.RequestWrapperTween',
60 )
@@ -1,6 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.5.2
2 current_version = 4.6.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
6
@@ -1,36 +1,38 b''
1 syntax: glob
1 syntax: glob
2 *.orig
2 *.orig
3 *.pyc
3 *.pyc
4 *.swp
4 *.swp
5 *.sqlite
5 *.tox
6 *.tox
6 *.egg-info
7 *.egg-info
7 *.egg
8 *.egg
9 *.eggs
8 *.idea
10 *.idea
9 .DS_Store*
11 .DS_Store*
10
12
11
13
12 syntax: regexp
14 syntax: regexp
13
15
14 #.filename
16 #.filename
15 ^\.settings$
17 ^\.settings$
16 ^\.project$
18 ^\.project$
17 ^\.pydevproject$
19 ^\.pydevproject$
18 ^\.coverage$
20 ^\.coverage$
19 ^\.cache.*$
21 ^\.cache.*$
20 ^\.rhodecode$
22 ^\.rhodecode$
21
23
22 ^_dev
24 ^_dev
23 ^._dev
25 ^._dev
24 ^build/
26 ^build/
25 ^coverage\.xml$
27 ^coverage\.xml$
26 ^data$
28 ^data$
27 ^dev.ini$
29 ^dev.ini$
28 ^acceptance_tests/dev.*\.ini$
30 ^acceptance_tests/dev.*\.ini$
29 ^dist/
31 ^dist/
30 ^fabfile.py
32 ^fabfile.py
31 ^htmlcov
33 ^htmlcov
32 ^junit\.xml$
34 ^junit\.xml$
33 ^node_modules/
35 ^node_modules/
34 ^pylint.log$
36 ^pylint.log$
35 ^build$
37 ^build$
36 ^result$
38 ^result$
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.5.2
13 version = 4.6.0
16
14
@@ -1,15 +1,13 b''
1 # top level files
1 # top level files
2 include test.ini
2 include *.rst
3 include MANIFEST.in
3 include *.txt
4 include README.rst
5 include CHANGES.rst
6 include LICENSE.txt
7
4
5 # package extras
8 include vcsserver/VERSION
6 include vcsserver/VERSION
9
7
10 # all config files
8 # all config files
11 recursive-include configs *
9 recursive-include configs *
12
10
13 # skip any tests files
11 # skip any tests files
14 recursive-exclude tests *
12 recursive-exclude vcsserver/tests *
15
13
@@ -1,138 +1,153 b''
1 # Nix environment for the community edition
1 # Nix environment for the community edition
2 #
2 #
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6
6
7 { pkgs ? (import <nixpkgs> {})
7 { pkgs ? (import <nixpkgs> {})
8 , pythonPackages ? "python27Packages"
8 , pythonPackages ? "python27Packages"
9 , pythonExternalOverrides ? self: super: {}
9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? true
10 , doCheck ? true
11 }:
11 }:
12
12
13 let pkgs_ = pkgs; in
13 let pkgs_ = pkgs; in
14
14
15 let
15 let
16 pkgs = pkgs_.overridePackages (self: super: {
16 pkgs = pkgs_.overridePackages (self: super: {
17 # bump GIT version
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 name = "git-2.9.3";
20 src = pkgs.fetchurl {
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.9.3.tar.xz";
22 sha256 = "0qzs681a64k3shh5p0rg41l1z16fbk5sj0xga45k34hp1hsp654z";
23 };
24
25 });
26
17 # Override subversion derivation to
27 # Override subversion derivation to
18 # - activate python bindings
28 # - activate python bindings
19 subversion = let
29 subversion = let
20 subversionWithPython = super.subversion.override {
30 subversionWithPython = super.subversion.override {
21 httpSupport = true;
31 httpSupport = true;
22 pythonBindings = true;
32 pythonBindings = true;
23 python = self.python27Packages.python;
33 python = self.python27Packages.python;
24 };
34 };
25 in pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
35
36 in
37
38 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
26 patches = (oldAttrs.patches or []) ++
39 patches = (oldAttrs.patches or []) ++
27 pkgs.lib.optionals pkgs.stdenv.isDarwin [
40 pkgs.lib.optionals pkgs.stdenv.isDarwin [
28 # johbo: "import svn.client" fails on darwin currently.
41 # johbo: "import svn.client" fails on darwin currently.
29 ./pkgs/subversion-1.9.4-darwin.patch
42 ./pkgs/subversion-1.9.4-darwin.patch
30 ];
43 ];
31 });
44 });
45
32 });
46 });
33
47
34 inherit (pkgs.lib) fix extends;
48 inherit (pkgs.lib) fix extends;
35
36 basePythonPackages = with builtins; if isAttrs pythonPackages
49 basePythonPackages = with builtins; if isAttrs pythonPackages
37 then pythonPackages
50 then pythonPackages
38 else getAttr pythonPackages pkgs;
51 else getAttr pythonPackages pkgs;
39
52
40 elem = builtins.elem;
53 elem = builtins.elem;
41 basename = path: with pkgs.lib; last (splitString "/" path);
54 basename = path: with pkgs.lib; last (splitString "/" path);
42 startsWith = prefix: full: let
55 startsWith = prefix: full: let
43 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
56 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
44 in actualPrefix == prefix;
57 in actualPrefix == prefix;
45
58
46 src-filter = path: type: with pkgs.lib;
59 src-filter = path: type: with pkgs.lib;
47 let
60 let
48 ext = last (splitString "." path);
61 ext = last (splitString "." path);
49 in
62 in
50 !elem (basename path) [
63 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
51 ".git" ".hg" "__pycache__" ".eggs" "node_modules"
64 "node_modules" "build" "data" "tmp"] &&
52 "build" "data" "tmp"] &&
53 !elem ext ["egg-info" "pyc"] &&
65 !elem ext ["egg-info" "pyc"] &&
54 !startsWith "result" path;
66 !startsWith "result" path;
55
67
56 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
68 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
57
69
58 pythonGeneratedPackages = self: basePythonPackages.override (a: {
70 pythonGeneratedPackages = self: basePythonPackages.override (a: {
59 inherit self;
71 inherit self;
60 })
72 }) // (scopedImport {
61 // (scopedImport {
62 self = self;
73 self = self;
63 super = basePythonPackages;
74 super = basePythonPackages;
64 inherit pkgs;
75 inherit pkgs;
65 inherit (pkgs) fetchurl fetchgit;
76 inherit (pkgs) fetchurl fetchgit;
66 } ./pkgs/python-packages.nix);
77 } ./pkgs/python-packages.nix);
67
78
68 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
79 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
69 inherit
80 inherit basePythonPackages pkgs;
70 basePythonPackages
71 pkgs;
72 };
81 };
73
82
74 version = builtins.readFile ./vcsserver/VERSION;
83 version = builtins.readFile ./vcsserver/VERSION;
75
84
76 pythonLocalOverrides = self: super: {
85 pythonLocalOverrides = self: super: {
77 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
86 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
78 inherit
87 inherit doCheck version;
79 doCheck
88
80 version;
81 name = "rhodecode-vcsserver-${version}";
89 name = "rhodecode-vcsserver-${version}";
82 releaseName = "RhodeCodeVCSServer-${version}";
90 releaseName = "RhodeCodeVCSServer-${version}";
83 src = rhodecode-vcsserver-src;
91 src = rhodecode-vcsserver-src;
84
92
85 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
93 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
86 pkgs.git
94 pkgs.git
87 pkgs.subversion
95 pkgs.subversion
88 ]);
96 ]);
89
97
90 # TODO: johbo: Make a nicer way to expose the parts. Maybe
98 # TODO: johbo: Make a nicer way to expose the parts. Maybe
91 # pkgs/default.nix?
99 # pkgs/default.nix?
92 passthru = {
100 passthru = {
93 pythonPackages = self;
101 pythonPackages = self;
94 };
102 };
95
103
96 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
104 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
97 preCheck = ''
105 preCheck = ''
98 export PATH="$out/bin:$PATH"
106 export PATH="$out/bin:$PATH"
99 '';
107 '';
100
108
109 # put custom attrs here
110 checkPhase = ''
111 runHook preCheck
112 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
113 runHook postCheck
114 '';
115
101 postInstall = ''
116 postInstall = ''
102 echo "Writing meta information for rccontrol to nix-support/rccontrol"
117 echo "Writing meta information for rccontrol to nix-support/rccontrol"
103 mkdir -p $out/nix-support/rccontrol
118 mkdir -p $out/nix-support/rccontrol
104 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
119 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
105 echo "DONE: Meta information for rccontrol written"
120 echo "DONE: Meta information for rccontrol written"
106
121
107 ln -s ${self.pyramid}/bin/* $out/bin/
122 ln -s ${self.pyramid}/bin/* $out/bin/
108 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
123 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
109
124
110 # Symlink version control utilities
125 # Symlink version control utilities
111 #
126 #
112 # We ensure that always the correct version is available as a symlink.
127 # We ensure that always the correct version is available as a symlink.
113 # So that users calling them via the profile path will always use the
128 # So that users calling them via the profile path will always use the
114 # correct version.
129 # correct version.
115 ln -s ${pkgs.git}/bin/git $out/bin
130 ln -s ${pkgs.git}/bin/git $out/bin
116 ln -s ${self.mercurial}/bin/hg $out/bin
131 ln -s ${self.mercurial}/bin/hg $out/bin
117 ln -s ${pkgs.subversion}/bin/svn* $out/bin
132 ln -s ${pkgs.subversion}/bin/svn* $out/bin
118
133
119 for file in $out/bin/*; do
134 for file in $out/bin/*; do
120 wrapProgram $file \
135 wrapProgram $file \
121 --set PATH $PATH \
136 --set PATH $PATH \
122 --set PYTHONPATH $PYTHONPATH \
137 --set PYTHONPATH $PYTHONPATH \
123 --set PYTHONHASHSEED random
138 --set PYTHONHASHSEED random
124 done
139 done
125 '';
140 '';
126
141
127 });
142 });
128 };
143 };
129
144
130 # Apply all overrides and fix the final package set
145 # Apply all overrides and fix the final package set
131 myPythonPackages =
146 myPythonPackages =
132 (fix
147 (fix
133 (extends pythonExternalOverrides
148 (extends pythonExternalOverrides
134 (extends pythonLocalOverrides
149 (extends pythonLocalOverrides
135 (extends pythonOverrides
150 (extends pythonOverrides
136 pythonGeneratedPackages))));
151 pythonGeneratedPackages))));
137
152
138 in myPythonPackages.rhodecode-vcsserver
153 in myPythonPackages.rhodecode-vcsserver
@@ -1,57 +1,47 b''
1 # Overrides for the generated python-packages.nix
1 # Overrides for the generated python-packages.nix
2 #
2 #
3 # This function is intended to be used as an extension to the generated file
3 # This function is intended to be used as an extension to the generated file
4 # python-packages.nix. The main objective is to add needed dependencies of C
4 # python-packages.nix. The main objective is to add needed dependencies of C
5 # libraries and tweak the build instructions where needed.
5 # libraries and tweak the build instructions where needed.
6
6
7 { pkgs, basePythonPackages }:
7 { pkgs, basePythonPackages }:
8
8
9 let
9 let
10 sed = "sed -i";
10 sed = "sed -i";
11 in
11 in
12
12
13 self: super: {
13 self: super: {
14
14
15 subvertpy = super.subvertpy.override (attrs: {
15 subvertpy = super.subvertpy.override (attrs: {
16 # TODO: johbo: Remove the "or" once we drop 16.03 support
16 # TODO: johbo: Remove the "or" once we drop 16.03 support
17 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
17 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
18 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
18 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
19 pkgs.aprutil
19 pkgs.aprutil
20 pkgs.subversion
20 pkgs.subversion
21 ];
21 ];
22 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
22 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
23 ${sed} -e "s/'gcc'/'clang'/" setup.py
23 ${sed} -e "s/'gcc'/'clang'/" setup.py
24 '';
24 '';
25 });
25 });
26
26
27 mercurial = super.mercurial.override (attrs: {
27 mercurial = super.mercurial.override (attrs: {
28 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
28 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
29 self.python.modules.curses
29 self.python.modules.curses
30 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
30 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
31 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
31 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
32 });
32 });
33
33
34 pyramid = super.pyramid.override (attrs: {
34 pyramid = super.pyramid.override (attrs: {
35 postFixup = ''
35 postFixup = ''
36 wrapPythonPrograms
36 wrapPythonPrograms
37 # TODO: johbo: "wrapPython" adds this magic line which
37 # TODO: johbo: "wrapPython" adds this magic line which
38 # confuses pserve.
38 # confuses pserve.
39 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
39 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
40 '';
40 '';
41 });
41 });
42
42
43 Pyro4 = super.Pyro4.override (attrs: {
44 # TODO: Was not able to generate this version, needs further
45 # investigation.
46 name = "Pyro4-4.35";
47 src = pkgs.fetchurl {
48 url = "https://pypi.python.org/packages/source/P/Pyro4/Pyro4-4.35.src.tar.gz";
49 md5 = "cbe6cb855f086a0f092ca075005855f3";
50 };
51 });
52
53 # Avoid that setuptools is replaced, this leads to trouble
43 # Avoid that setuptools is replaced, this leads to trouble
54 # with buildPythonPackage.
44 # with buildPythonPackage.
55 setuptools = basePythonPackages.setuptools;
45 setuptools = basePythonPackages.setuptools;
56
46
57 }
47 }
@@ -1,694 +1,812 b''
1 # Generated by pip2nix 0.4.0
1 # Generated by pip2nix 0.4.0
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 {
4 {
5 Beaker = super.buildPythonPackage {
5 Beaker = super.buildPythonPackage {
6 name = "Beaker-1.7.0";
6 name = "Beaker-1.7.0";
7 buildInputs = with self; [];
7 buildInputs = with self; [];
8 doCheck = false;
8 doCheck = false;
9 propagatedBuildInputs = with self; [];
9 propagatedBuildInputs = with self; [];
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
12 md5 = "386be3f7fe427358881eee4622b428b3";
12 md5 = "386be3f7fe427358881eee4622b428b3";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.bsdOriginal ];
15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 };
16 };
17 };
17 };
18 Jinja2 = super.buildPythonPackage {
18 Jinja2 = super.buildPythonPackage {
19 name = "Jinja2-2.8";
19 name = "Jinja2-2.8";
20 buildInputs = with self; [];
20 buildInputs = with self; [];
21 doCheck = false;
21 doCheck = false;
22 propagatedBuildInputs = with self; [MarkupSafe];
22 propagatedBuildInputs = with self; [MarkupSafe];
23 src = fetchurl {
23 src = fetchurl {
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
26 };
26 };
27 meta = {
27 meta = {
28 license = [ pkgs.lib.licenses.bsdOriginal ];
28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 };
29 };
30 };
30 };
31 Mako = super.buildPythonPackage {
31 Mako = super.buildPythonPackage {
32 name = "Mako-1.0.6";
32 name = "Mako-1.0.6";
33 buildInputs = with self; [];
33 buildInputs = with self; [];
34 doCheck = false;
34 doCheck = false;
35 propagatedBuildInputs = with self; [MarkupSafe];
35 propagatedBuildInputs = with self; [MarkupSafe];
36 src = fetchurl {
36 src = fetchurl {
37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
38 md5 = "a28e22a339080316b2acc352b9ee631c";
38 md5 = "a28e22a339080316b2acc352b9ee631c";
39 };
39 };
40 meta = {
40 meta = {
41 license = [ pkgs.lib.licenses.mit ];
41 license = [ pkgs.lib.licenses.mit ];
42 };
42 };
43 };
43 };
44 MarkupSafe = super.buildPythonPackage {
44 MarkupSafe = super.buildPythonPackage {
45 name = "MarkupSafe-0.23";
45 name = "MarkupSafe-0.23";
46 buildInputs = with self; [];
46 buildInputs = with self; [];
47 doCheck = false;
47 doCheck = false;
48 propagatedBuildInputs = with self; [];
48 propagatedBuildInputs = with self; [];
49 src = fetchurl {
49 src = fetchurl {
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
52 };
52 };
53 meta = {
53 meta = {
54 license = [ pkgs.lib.licenses.bsdOriginal ];
54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 };
55 };
56 };
56 };
57 PasteDeploy = super.buildPythonPackage {
57 PasteDeploy = super.buildPythonPackage {
58 name = "PasteDeploy-1.5.2";
58 name = "PasteDeploy-1.5.2";
59 buildInputs = with self; [];
59 buildInputs = with self; [];
60 doCheck = false;
60 doCheck = false;
61 propagatedBuildInputs = with self; [];
61 propagatedBuildInputs = with self; [];
62 src = fetchurl {
62 src = fetchurl {
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 md5 = "352b7205c78c8de4987578d19431af3b";
64 md5 = "352b7205c78c8de4987578d19431af3b";
65 };
65 };
66 meta = {
66 meta = {
67 license = [ pkgs.lib.licenses.mit ];
67 license = [ pkgs.lib.licenses.mit ];
68 };
68 };
69 };
69 };
70 Pyro4 = super.buildPythonPackage {
70 Pyro4 = super.buildPythonPackage {
71 name = "Pyro4-4.41";
71 name = "Pyro4-4.41";
72 buildInputs = with self; [];
72 buildInputs = with self; [];
73 doCheck = false;
73 doCheck = false;
74 propagatedBuildInputs = with self; [serpent];
74 propagatedBuildInputs = with self; [serpent];
75 src = fetchurl {
75 src = fetchurl {
76 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
76 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
77 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
77 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
78 };
78 };
79 meta = {
79 meta = {
80 license = [ pkgs.lib.licenses.mit ];
80 license = [ pkgs.lib.licenses.mit ];
81 };
81 };
82 };
82 };
83 WebOb = super.buildPythonPackage {
83 WebOb = super.buildPythonPackage {
84 name = "WebOb-1.3.1";
84 name = "WebOb-1.3.1";
85 buildInputs = with self; [];
85 buildInputs = with self; [];
86 doCheck = false;
86 doCheck = false;
87 propagatedBuildInputs = with self; [];
87 propagatedBuildInputs = with self; [];
88 src = fetchurl {
88 src = fetchurl {
89 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
89 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
90 md5 = "20918251c5726956ba8fef22d1556177";
90 md5 = "20918251c5726956ba8fef22d1556177";
91 };
91 };
92 meta = {
92 meta = {
93 license = [ pkgs.lib.licenses.mit ];
93 license = [ pkgs.lib.licenses.mit ];
94 };
94 };
95 };
95 };
96 WebTest = super.buildPythonPackage {
96 WebTest = super.buildPythonPackage {
97 name = "WebTest-1.4.3";
97 name = "WebTest-1.4.3";
98 buildInputs = with self; [];
98 buildInputs = with self; [];
99 doCheck = false;
99 doCheck = false;
100 propagatedBuildInputs = with self; [WebOb];
100 propagatedBuildInputs = with self; [WebOb];
101 src = fetchurl {
101 src = fetchurl {
102 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
102 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
103 md5 = "631ce728bed92c681a4020a36adbc353";
103 md5 = "631ce728bed92c681a4020a36adbc353";
104 };
104 };
105 meta = {
105 meta = {
106 license = [ pkgs.lib.licenses.mit ];
106 license = [ pkgs.lib.licenses.mit ];
107 };
107 };
108 };
108 };
109 backports.shutil-get-terminal-size = super.buildPythonPackage {
109 backports.shutil-get-terminal-size = super.buildPythonPackage {
110 name = "backports.shutil-get-terminal-size-1.0.0";
110 name = "backports.shutil-get-terminal-size-1.0.0";
111 buildInputs = with self; [];
111 buildInputs = with self; [];
112 doCheck = false;
112 doCheck = false;
113 propagatedBuildInputs = with self; [];
113 propagatedBuildInputs = with self; [];
114 src = fetchurl {
114 src = fetchurl {
115 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
115 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
116 md5 = "03267762480bd86b50580dc19dff3c66";
116 md5 = "03267762480bd86b50580dc19dff3c66";
117 };
117 };
118 meta = {
118 meta = {
119 license = [ pkgs.lib.licenses.mit ];
119 license = [ pkgs.lib.licenses.mit ];
120 };
120 };
121 };
121 };
122 configobj = super.buildPythonPackage {
122 configobj = super.buildPythonPackage {
123 name = "configobj-5.0.6";
123 name = "configobj-5.0.6";
124 buildInputs = with self; [];
124 buildInputs = with self; [];
125 doCheck = false;
125 doCheck = false;
126 propagatedBuildInputs = with self; [six];
126 propagatedBuildInputs = with self; [six];
127 src = fetchurl {
127 src = fetchurl {
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
130 };
130 };
131 meta = {
131 meta = {
132 license = [ pkgs.lib.licenses.bsdOriginal ];
132 license = [ pkgs.lib.licenses.bsdOriginal ];
133 };
133 };
134 };
134 };
135 cov-core = super.buildPythonPackage {
136 name = "cov-core-1.15.0";
137 buildInputs = with self; [];
138 doCheck = false;
139 propagatedBuildInputs = with self; [coverage];
140 src = fetchurl {
141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
143 };
144 meta = {
145 license = [ pkgs.lib.licenses.mit ];
146 };
147 };
148 coverage = super.buildPythonPackage {
149 name = "coverage-3.7.1";
150 buildInputs = with self; [];
151 doCheck = false;
152 propagatedBuildInputs = with self; [];
153 src = fetchurl {
154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
156 };
157 meta = {
158 license = [ pkgs.lib.licenses.bsdOriginal ];
159 };
160 };
135 decorator = super.buildPythonPackage {
161 decorator = super.buildPythonPackage {
136 name = "decorator-4.0.10";
162 name = "decorator-4.0.10";
137 buildInputs = with self; [];
163 buildInputs = with self; [];
138 doCheck = false;
164 doCheck = false;
139 propagatedBuildInputs = with self; [];
165 propagatedBuildInputs = with self; [];
140 src = fetchurl {
166 src = fetchurl {
141 url = "https://pypi.python.org/packages/13/8a/4eed41e338e8dcc13ca41c94b142d4d20c0de684ee5065523fee406ce76f/decorator-4.0.10.tar.gz";
167 url = "https://pypi.python.org/packages/13/8a/4eed41e338e8dcc13ca41c94b142d4d20c0de684ee5065523fee406ce76f/decorator-4.0.10.tar.gz";
142 md5 = "434b57fdc3230c500716c5aff8896100";
168 md5 = "434b57fdc3230c500716c5aff8896100";
143 };
169 };
144 meta = {
170 meta = {
145 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
146 };
172 };
147 };
173 };
148 dulwich = super.buildPythonPackage {
174 dulwich = super.buildPythonPackage {
149 name = "dulwich-0.13.0";
175 name = "dulwich-0.13.0";
150 buildInputs = with self; [];
176 buildInputs = with self; [];
151 doCheck = false;
177 doCheck = false;
152 propagatedBuildInputs = with self; [];
178 propagatedBuildInputs = with self; [];
153 src = fetchurl {
179 src = fetchurl {
154 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
155 md5 = "6dede0626657c2bd08f48ca1221eea91";
181 md5 = "6dede0626657c2bd08f48ca1221eea91";
156 };
182 };
157 meta = {
183 meta = {
158 license = [ pkgs.lib.licenses.gpl2Plus ];
184 license = [ pkgs.lib.licenses.gpl2Plus ];
159 };
185 };
160 };
186 };
161 enum34 = super.buildPythonPackage {
187 enum34 = super.buildPythonPackage {
162 name = "enum34-1.1.6";
188 name = "enum34-1.1.6";
163 buildInputs = with self; [];
189 buildInputs = with self; [];
164 doCheck = false;
190 doCheck = false;
165 propagatedBuildInputs = with self; [];
191 propagatedBuildInputs = with self; [];
166 src = fetchurl {
192 src = fetchurl {
167 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
168 md5 = "5f13a0841a61f7fc295c514490d120d0";
194 md5 = "5f13a0841a61f7fc295c514490d120d0";
169 };
195 };
170 meta = {
196 meta = {
171 license = [ pkgs.lib.licenses.bsdOriginal ];
197 license = [ pkgs.lib.licenses.bsdOriginal ];
172 };
198 };
173 };
199 };
174 greenlet = super.buildPythonPackage {
200 gevent = super.buildPythonPackage {
175 name = "greenlet-0.4.7";
201 name = "gevent-1.1.2";
202 buildInputs = with self; [];
203 doCheck = false;
204 propagatedBuildInputs = with self; [greenlet];
205 src = fetchurl {
206 url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz";
207 md5 = "bb32a2f852a4997138014d5007215c6e";
208 };
209 meta = {
210 license = [ pkgs.lib.licenses.mit ];
211 };
212 };
213 gprof2dot = super.buildPythonPackage {
214 name = "gprof2dot-2016.10.13";
176 buildInputs = with self; [];
215 buildInputs = with self; [];
177 doCheck = false;
216 doCheck = false;
178 propagatedBuildInputs = with self; [];
217 propagatedBuildInputs = with self; [];
179 src = fetchurl {
218 src = fetchurl {
180 url = "https://pypi.python.org/packages/7a/9f/a1a0d9bdf3203ae1502c5a8434fe89d323599d78a106985bc327351a69d4/greenlet-0.4.7.zip";
219 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
181 md5 = "c2333a8ff30fa75c5d5ec0e67b461086";
220 md5 = "0125401f15fd2afe1df686a76c64a4fd";
221 };
222 meta = {
223 license = [ { fullName = "LGPL"; } ];
224 };
225 };
226 greenlet = super.buildPythonPackage {
227 name = "greenlet-0.4.10";
228 buildInputs = with self; [];
229 doCheck = false;
230 propagatedBuildInputs = with self; [];
231 src = fetchurl {
232 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
233 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
182 };
234 };
183 meta = {
235 meta = {
184 license = [ pkgs.lib.licenses.mit ];
236 license = [ pkgs.lib.licenses.mit ];
185 };
237 };
186 };
238 };
187 gunicorn = super.buildPythonPackage {
239 gunicorn = super.buildPythonPackage {
188 name = "gunicorn-19.6.0";
240 name = "gunicorn-19.6.0";
189 buildInputs = with self; [];
241 buildInputs = with self; [];
190 doCheck = false;
242 doCheck = false;
191 propagatedBuildInputs = with self; [];
243 propagatedBuildInputs = with self; [];
192 src = fetchurl {
244 src = fetchurl {
193 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
245 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
194 md5 = "338e5e8a83ea0f0625f768dba4597530";
246 md5 = "338e5e8a83ea0f0625f768dba4597530";
195 };
247 };
196 meta = {
248 meta = {
197 license = [ pkgs.lib.licenses.mit ];
249 license = [ pkgs.lib.licenses.mit ];
198 };
250 };
199 };
251 };
200 hgsubversion = super.buildPythonPackage {
252 hgsubversion = super.buildPythonPackage {
201 name = "hgsubversion-1.8.6";
253 name = "hgsubversion-1.8.6";
202 buildInputs = with self; [];
254 buildInputs = with self; [];
203 doCheck = false;
255 doCheck = false;
204 propagatedBuildInputs = with self; [mercurial subvertpy];
256 propagatedBuildInputs = with self; [mercurial subvertpy];
205 src = fetchurl {
257 src = fetchurl {
206 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
258 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
207 md5 = "9310cb266031cf8d0779885782a84a5b";
259 md5 = "9310cb266031cf8d0779885782a84a5b";
208 };
260 };
209 meta = {
261 meta = {
210 license = [ pkgs.lib.licenses.gpl1 ];
262 license = [ pkgs.lib.licenses.gpl1 ];
211 };
263 };
212 };
264 };
213 infrae.cache = super.buildPythonPackage {
265 infrae.cache = super.buildPythonPackage {
214 name = "infrae.cache-1.0.1";
266 name = "infrae.cache-1.0.1";
215 buildInputs = with self; [];
267 buildInputs = with self; [];
216 doCheck = false;
268 doCheck = false;
217 propagatedBuildInputs = with self; [Beaker repoze.lru];
269 propagatedBuildInputs = with self; [Beaker repoze.lru];
218 src = fetchurl {
270 src = fetchurl {
219 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
271 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
220 md5 = "b09076a766747e6ed2a755cc62088e32";
272 md5 = "b09076a766747e6ed2a755cc62088e32";
221 };
273 };
222 meta = {
274 meta = {
223 license = [ pkgs.lib.licenses.zpt21 ];
275 license = [ pkgs.lib.licenses.zpt21 ];
224 };
276 };
225 };
277 };
226 ipdb = super.buildPythonPackage {
278 ipdb = super.buildPythonPackage {
227 name = "ipdb-0.10.1";
279 name = "ipdb-0.10.1";
228 buildInputs = with self; [];
280 buildInputs = with self; [];
229 doCheck = false;
281 doCheck = false;
230 propagatedBuildInputs = with self; [ipython setuptools];
282 propagatedBuildInputs = with self; [ipython setuptools];
231 src = fetchurl {
283 src = fetchurl {
232 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
284 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
233 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
285 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
234 };
286 };
235 meta = {
287 meta = {
236 license = [ pkgs.lib.licenses.bsdOriginal ];
288 license = [ pkgs.lib.licenses.bsdOriginal ];
237 };
289 };
238 };
290 };
239 ipython = super.buildPythonPackage {
291 ipython = super.buildPythonPackage {
240 name = "ipython-5.1.0";
292 name = "ipython-5.1.0";
241 buildInputs = with self; [];
293 buildInputs = with self; [];
242 doCheck = false;
294 doCheck = false;
243 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
295 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
244 src = fetchurl {
296 src = fetchurl {
245 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
297 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
246 md5 = "47c8122420f65b58784cb4b9b4af35e3";
298 md5 = "47c8122420f65b58784cb4b9b4af35e3";
247 };
299 };
248 meta = {
300 meta = {
249 license = [ pkgs.lib.licenses.bsdOriginal ];
301 license = [ pkgs.lib.licenses.bsdOriginal ];
250 };
302 };
251 };
303 };
252 ipython-genutils = super.buildPythonPackage {
304 ipython-genutils = super.buildPythonPackage {
253 name = "ipython-genutils-0.1.0";
305 name = "ipython-genutils-0.1.0";
254 buildInputs = with self; [];
306 buildInputs = with self; [];
255 doCheck = false;
307 doCheck = false;
256 propagatedBuildInputs = with self; [];
308 propagatedBuildInputs = with self; [];
257 src = fetchurl {
309 src = fetchurl {
258 url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz";
310 url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz";
259 md5 = "9a8afbe0978adbcbfcb3b35b2d015a56";
311 md5 = "9a8afbe0978adbcbfcb3b35b2d015a56";
260 };
312 };
261 meta = {
313 meta = {
262 license = [ pkgs.lib.licenses.bsdOriginal ];
314 license = [ pkgs.lib.licenses.bsdOriginal ];
263 };
315 };
264 };
316 };
265 mercurial = super.buildPythonPackage {
317 mercurial = super.buildPythonPackage {
266 name = "mercurial-3.8.4";
318 name = "mercurial-4.0.2";
267 buildInputs = with self; [];
319 buildInputs = with self; [];
268 doCheck = false;
320 doCheck = false;
269 propagatedBuildInputs = with self; [];
321 propagatedBuildInputs = with self; [];
270 src = fetchurl {
322 src = fetchurl {
271 url = "https://pypi.python.org/packages/bc/16/b66eef0b70ee2b4ebb8e76622fe21bbed834606dd8c1bd30d6936ebf6f45/mercurial-3.8.4.tar.gz";
323 url = "https://pypi.python.org/packages/85/1b/0296aacd697228974a473d2508f013532f987ed6b1bacfe5abd6d5be6332/mercurial-4.0.2.tar.gz";
272 md5 = "cec2c3db688cb87142809089c6ae13e9";
324 md5 = "fa72a08e2723e4fa2a21c4e66437f3fa";
273 };
325 };
274 meta = {
326 meta = {
275 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
327 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
276 };
328 };
277 };
329 };
278 mock = super.buildPythonPackage {
330 mock = super.buildPythonPackage {
279 name = "mock-1.0.1";
331 name = "mock-1.0.1";
280 buildInputs = with self; [];
332 buildInputs = with self; [];
281 doCheck = false;
333 doCheck = false;
282 propagatedBuildInputs = with self; [];
334 propagatedBuildInputs = with self; [];
283 src = fetchurl {
335 src = fetchurl {
284 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
336 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
285 md5 = "869f08d003c289a97c1a6610faf5e913";
337 md5 = "869f08d003c289a97c1a6610faf5e913";
286 };
338 };
287 meta = {
339 meta = {
288 license = [ pkgs.lib.licenses.bsdOriginal ];
340 license = [ pkgs.lib.licenses.bsdOriginal ];
289 };
341 };
290 };
342 };
291 msgpack-python = super.buildPythonPackage {
343 msgpack-python = super.buildPythonPackage {
292 name = "msgpack-python-0.4.6";
344 name = "msgpack-python-0.4.8";
293 buildInputs = with self; [];
345 buildInputs = with self; [];
294 doCheck = false;
346 doCheck = false;
295 propagatedBuildInputs = with self; [];
347 propagatedBuildInputs = with self; [];
296 src = fetchurl {
348 src = fetchurl {
297 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
349 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
298 md5 = "8b317669314cf1bc881716cccdaccb30";
350 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
299 };
351 };
300 meta = {
352 meta = {
301 license = [ pkgs.lib.licenses.asl20 ];
353 license = [ pkgs.lib.licenses.asl20 ];
302 };
354 };
303 };
355 };
304 pathlib2 = super.buildPythonPackage {
356 pathlib2 = super.buildPythonPackage {
305 name = "pathlib2-2.1.0";
357 name = "pathlib2-2.1.0";
306 buildInputs = with self; [];
358 buildInputs = with self; [];
307 doCheck = false;
359 doCheck = false;
308 propagatedBuildInputs = with self; [six];
360 propagatedBuildInputs = with self; [six];
309 src = fetchurl {
361 src = fetchurl {
310 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
362 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
311 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
363 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
312 };
364 };
313 meta = {
365 meta = {
314 license = [ pkgs.lib.licenses.mit ];
366 license = [ pkgs.lib.licenses.mit ];
315 };
367 };
316 };
368 };
317 pexpect = super.buildPythonPackage {
369 pexpect = super.buildPythonPackage {
318 name = "pexpect-4.2.1";
370 name = "pexpect-4.2.1";
319 buildInputs = with self; [];
371 buildInputs = with self; [];
320 doCheck = false;
372 doCheck = false;
321 propagatedBuildInputs = with self; [ptyprocess];
373 propagatedBuildInputs = with self; [ptyprocess];
322 src = fetchurl {
374 src = fetchurl {
323 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
375 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
324 md5 = "3694410001a99dff83f0b500a1ca1c95";
376 md5 = "3694410001a99dff83f0b500a1ca1c95";
325 };
377 };
326 meta = {
378 meta = {
327 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
379 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
328 };
380 };
329 };
381 };
330 pickleshare = super.buildPythonPackage {
382 pickleshare = super.buildPythonPackage {
331 name = "pickleshare-0.7.4";
383 name = "pickleshare-0.7.4";
332 buildInputs = with self; [];
384 buildInputs = with self; [];
333 doCheck = false;
385 doCheck = false;
334 propagatedBuildInputs = with self; [pathlib2];
386 propagatedBuildInputs = with self; [pathlib2];
335 src = fetchurl {
387 src = fetchurl {
336 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
388 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
337 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
389 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
338 };
390 };
339 meta = {
391 meta = {
340 license = [ pkgs.lib.licenses.mit ];
392 license = [ pkgs.lib.licenses.mit ];
341 };
393 };
342 };
394 };
343 prompt-toolkit = super.buildPythonPackage {
395 prompt-toolkit = super.buildPythonPackage {
344 name = "prompt-toolkit-1.0.9";
396 name = "prompt-toolkit-1.0.9";
345 buildInputs = with self; [];
397 buildInputs = with self; [];
346 doCheck = false;
398 doCheck = false;
347 propagatedBuildInputs = with self; [six wcwidth];
399 propagatedBuildInputs = with self; [six wcwidth];
348 src = fetchurl {
400 src = fetchurl {
349 url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz";
401 url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz";
350 md5 = "a39f91a54308fb7446b1a421c11f227c";
402 md5 = "a39f91a54308fb7446b1a421c11f227c";
351 };
403 };
352 meta = {
404 meta = {
353 license = [ pkgs.lib.licenses.bsdOriginal ];
405 license = [ pkgs.lib.licenses.bsdOriginal ];
354 };
406 };
355 };
407 };
356 ptyprocess = super.buildPythonPackage {
408 ptyprocess = super.buildPythonPackage {
357 name = "ptyprocess-0.5.1";
409 name = "ptyprocess-0.5.1";
358 buildInputs = with self; [];
410 buildInputs = with self; [];
359 doCheck = false;
411 doCheck = false;
360 propagatedBuildInputs = with self; [];
412 propagatedBuildInputs = with self; [];
361 src = fetchurl {
413 src = fetchurl {
362 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
414 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
363 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
415 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
364 };
416 };
365 meta = {
417 meta = {
366 license = [ ];
418 license = [ ];
367 };
419 };
368 };
420 };
369 py = super.buildPythonPackage {
421 py = super.buildPythonPackage {
370 name = "py-1.4.29";
422 name = "py-1.4.31";
371 buildInputs = with self; [];
423 buildInputs = with self; [];
372 doCheck = false;
424 doCheck = false;
373 propagatedBuildInputs = with self; [];
425 propagatedBuildInputs = with self; [];
374 src = fetchurl {
426 src = fetchurl {
375 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
427 url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz";
376 md5 = "c28e0accba523a29b35a48bb703fb96c";
428 md5 = "5d2c63c56dc3f2115ec35c066ecd582b";
377 };
429 };
378 meta = {
430 meta = {
379 license = [ pkgs.lib.licenses.mit ];
431 license = [ pkgs.lib.licenses.mit ];
380 };
432 };
381 };
433 };
382 pygments = super.buildPythonPackage {
434 pygments = super.buildPythonPackage {
383 name = "pygments-2.1.3";
435 name = "pygments-2.1.3";
384 buildInputs = with self; [];
436 buildInputs = with self; [];
385 doCheck = false;
437 doCheck = false;
386 propagatedBuildInputs = with self; [];
438 propagatedBuildInputs = with self; [];
387 src = fetchurl {
439 src = fetchurl {
388 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
440 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
389 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
441 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
390 };
442 };
391 meta = {
443 meta = {
392 license = [ pkgs.lib.licenses.bsdOriginal ];
444 license = [ pkgs.lib.licenses.bsdOriginal ];
393 };
445 };
394 };
446 };
395 pyramid = super.buildPythonPackage {
447 pyramid = super.buildPythonPackage {
396 name = "pyramid-1.6.1";
448 name = "pyramid-1.6.1";
397 buildInputs = with self; [];
449 buildInputs = with self; [];
398 doCheck = false;
450 doCheck = false;
399 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
451 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
400 src = fetchurl {
452 src = fetchurl {
401 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
453 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
402 md5 = "b18688ff3cc33efdbb098a35b45dd122";
454 md5 = "b18688ff3cc33efdbb098a35b45dd122";
403 };
455 };
404 meta = {
456 meta = {
405 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
457 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
406 };
458 };
407 };
459 };
408 pyramid-jinja2 = super.buildPythonPackage {
460 pyramid-jinja2 = super.buildPythonPackage {
409 name = "pyramid-jinja2-2.5";
461 name = "pyramid-jinja2-2.5";
410 buildInputs = with self; [];
462 buildInputs = with self; [];
411 doCheck = false;
463 doCheck = false;
412 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
464 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
413 src = fetchurl {
465 src = fetchurl {
414 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
466 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
415 md5 = "07cb6547204ac5e6f0b22a954ccee928";
467 md5 = "07cb6547204ac5e6f0b22a954ccee928";
416 };
468 };
417 meta = {
469 meta = {
418 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
470 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
419 };
471 };
420 };
472 };
421 pyramid-mako = super.buildPythonPackage {
473 pyramid-mako = super.buildPythonPackage {
422 name = "pyramid-mako-1.0.2";
474 name = "pyramid-mako-1.0.2";
423 buildInputs = with self; [];
475 buildInputs = with self; [];
424 doCheck = false;
476 doCheck = false;
425 propagatedBuildInputs = with self; [pyramid Mako];
477 propagatedBuildInputs = with self; [pyramid Mako];
426 src = fetchurl {
478 src = fetchurl {
427 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
479 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
428 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
480 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
429 };
481 };
430 meta = {
482 meta = {
431 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
483 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
432 };
484 };
433 };
485 };
434 pytest = super.buildPythonPackage {
486 pytest = super.buildPythonPackage {
435 name = "pytest-2.8.5";
487 name = "pytest-3.0.5";
436 buildInputs = with self; [];
488 buildInputs = with self; [];
437 doCheck = false;
489 doCheck = false;
438 propagatedBuildInputs = with self; [py];
490 propagatedBuildInputs = with self; [py];
439 src = fetchurl {
491 src = fetchurl {
440 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
492 url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz";
441 md5 = "8493b06f700862f1294298d6c1b715a9";
493 md5 = "cefd527b59332688bf5db4a10aa8a7cb";
494 };
495 meta = {
496 license = [ pkgs.lib.licenses.mit ];
497 };
498 };
499 pytest-catchlog = super.buildPythonPackage {
500 name = "pytest-catchlog-1.2.2";
501 buildInputs = with self; [];
502 doCheck = false;
503 propagatedBuildInputs = with self; [py pytest];
504 src = fetchurl {
505 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
506 md5 = "09d890c54c7456c818102b7ff8c182c8";
442 };
507 };
443 meta = {
508 meta = {
444 license = [ pkgs.lib.licenses.mit ];
509 license = [ pkgs.lib.licenses.mit ];
445 };
510 };
446 };
511 };
512 pytest-cov = super.buildPythonPackage {
513 name = "pytest-cov-2.4.0";
514 buildInputs = with self; [];
515 doCheck = false;
516 propagatedBuildInputs = with self; [pytest coverage];
517 src = fetchurl {
518 url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz";
519 md5 = "2fda09677d232acc99ec1b3c5831e33f";
520 };
521 meta = {
522 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
523 };
524 };
525 pytest-profiling = super.buildPythonPackage {
526 name = "pytest-profiling-1.2.2";
527 buildInputs = with self; [];
528 doCheck = false;
529 propagatedBuildInputs = with self; [six pytest gprof2dot];
530 src = fetchurl {
531 url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz";
532 md5 = "0a16d7dda2d23b91e9730fa4558cf728";
533 };
534 meta = {
535 license = [ pkgs.lib.licenses.mit ];
536 };
537 };
538 pytest-runner = super.buildPythonPackage {
539 name = "pytest-runner-2.9";
540 buildInputs = with self; [];
541 doCheck = false;
542 propagatedBuildInputs = with self; [];
543 src = fetchurl {
544 url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz";
545 md5 = "2212a2e34404b0960b2fdc2c469247b2";
546 };
547 meta = {
548 license = [ pkgs.lib.licenses.mit ];
549 };
550 };
551 pytest-sugar = super.buildPythonPackage {
552 name = "pytest-sugar-0.7.1";
553 buildInputs = with self; [];
554 doCheck = false;
555 propagatedBuildInputs = with self; [pytest termcolor];
556 src = fetchurl {
557 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
558 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
559 };
560 meta = {
561 license = [ pkgs.lib.licenses.bsdOriginal ];
562 };
563 };
564 pytest-timeout = super.buildPythonPackage {
565 name = "pytest-timeout-1.2.0";
566 buildInputs = with self; [];
567 doCheck = false;
568 propagatedBuildInputs = with self; [pytest];
569 src = fetchurl {
570 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
571 md5 = "83607d91aa163562c7ee835da57d061d";
572 };
573 meta = {
574 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
575 };
576 };
447 repoze.lru = super.buildPythonPackage {
577 repoze.lru = super.buildPythonPackage {
448 name = "repoze.lru-0.6";
578 name = "repoze.lru-0.6";
449 buildInputs = with self; [];
579 buildInputs = with self; [];
450 doCheck = false;
580 doCheck = false;
451 propagatedBuildInputs = with self; [];
581 propagatedBuildInputs = with self; [];
452 src = fetchurl {
582 src = fetchurl {
453 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
583 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
454 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
584 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
455 };
585 };
456 meta = {
586 meta = {
457 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
587 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
458 };
588 };
459 };
589 };
460 rhodecode-vcsserver = super.buildPythonPackage {
590 rhodecode-vcsserver = super.buildPythonPackage {
461 name = "rhodecode-vcsserver-4.5.2";
591 name = "rhodecode-vcsserver-4.6.0";
462 buildInputs = with self; [mock pytest pytest-sugar WebTest];
592 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
463 doCheck = true;
593 doCheck = true;
464 propagatedBuildInputs = with self; [configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid Pyro4 simplejson subprocess32 waitress WebOb];
594 propagatedBuildInputs = with self; [Beaker configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb gevent greenlet gunicorn waitress Pyro4 serpent pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
465 src = ./.;
595 src = ./.;
466 meta = {
596 meta = {
467 license = [ pkgs.lib.licenses.gpl3 { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
597 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
468 };
598 };
469 };
599 };
470 serpent = super.buildPythonPackage {
600 serpent = super.buildPythonPackage {
471 name = "serpent-1.12";
601 name = "serpent-1.15";
472 buildInputs = with self; [];
602 buildInputs = with self; [];
473 doCheck = false;
603 doCheck = false;
474 propagatedBuildInputs = with self; [];
604 propagatedBuildInputs = with self; [];
475 src = fetchurl {
605 src = fetchurl {
476 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
606 url = "https://pypi.python.org/packages/7b/38/b2b27673a882ff2ea5871bb3e3e6b496ebbaafd1612e51990ffb158b9254/serpent-1.15.tar.gz";
477 md5 = "05869ac7b062828b34f8f927f0457b65";
607 md5 = "e27b1aad5c218e16442f52abb7c7053a";
478 };
608 };
479 meta = {
609 meta = {
480 license = [ pkgs.lib.licenses.mit ];
610 license = [ pkgs.lib.licenses.mit ];
481 };
611 };
482 };
612 };
483 setuptools = super.buildPythonPackage {
613 setuptools = super.buildPythonPackage {
484 name = "setuptools-20.8.1";
614 name = "setuptools-30.1.0";
485 buildInputs = with self; [];
615 buildInputs = with self; [];
486 doCheck = false;
616 doCheck = false;
487 propagatedBuildInputs = with self; [];
617 propagatedBuildInputs = with self; [];
488 src = fetchurl {
618 src = fetchurl {
489 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
619 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
490 md5 = "fe58a5cac0df20bb83942b252a4b0543";
620 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
491 };
621 };
492 meta = {
622 meta = {
493 license = [ pkgs.lib.licenses.mit ];
623 license = [ pkgs.lib.licenses.mit ];
494 };
624 };
495 };
625 };
496 simplegeneric = super.buildPythonPackage {
626 simplegeneric = super.buildPythonPackage {
497 name = "simplegeneric-0.8.1";
627 name = "simplegeneric-0.8.1";
498 buildInputs = with self; [];
628 buildInputs = with self; [];
499 doCheck = false;
629 doCheck = false;
500 propagatedBuildInputs = with self; [];
630 propagatedBuildInputs = with self; [];
501 src = fetchurl {
631 src = fetchurl {
502 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
632 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
503 md5 = "f9c1fab00fd981be588fc32759f474e3";
633 md5 = "f9c1fab00fd981be588fc32759f474e3";
504 };
634 };
505 meta = {
635 meta = {
506 license = [ pkgs.lib.licenses.zpt21 ];
636 license = [ pkgs.lib.licenses.zpt21 ];
507 };
637 };
508 };
638 };
509 simplejson = super.buildPythonPackage {
639 simplejson = super.buildPythonPackage {
510 name = "simplejson-3.7.2";
640 name = "simplejson-3.7.2";
511 buildInputs = with self; [];
641 buildInputs = with self; [];
512 doCheck = false;
642 doCheck = false;
513 propagatedBuildInputs = with self; [];
643 propagatedBuildInputs = with self; [];
514 src = fetchurl {
644 src = fetchurl {
515 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
645 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
516 md5 = "a5fc7d05d4cb38492285553def5d4b46";
646 md5 = "a5fc7d05d4cb38492285553def5d4b46";
517 };
647 };
518 meta = {
648 meta = {
519 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
649 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
520 };
650 };
521 };
651 };
522 six = super.buildPythonPackage {
652 six = super.buildPythonPackage {
523 name = "six-1.9.0";
653 name = "six-1.9.0";
524 buildInputs = with self; [];
654 buildInputs = with self; [];
525 doCheck = false;
655 doCheck = false;
526 propagatedBuildInputs = with self; [];
656 propagatedBuildInputs = with self; [];
527 src = fetchurl {
657 src = fetchurl {
528 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
658 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
529 md5 = "476881ef4012262dfc8adc645ee786c4";
659 md5 = "476881ef4012262dfc8adc645ee786c4";
530 };
660 };
531 meta = {
661 meta = {
532 license = [ pkgs.lib.licenses.mit ];
662 license = [ pkgs.lib.licenses.mit ];
533 };
663 };
534 };
664 };
535 subprocess32 = super.buildPythonPackage {
665 subprocess32 = super.buildPythonPackage {
536 name = "subprocess32-3.2.6";
666 name = "subprocess32-3.2.6";
537 buildInputs = with self; [];
667 buildInputs = with self; [];
538 doCheck = false;
668 doCheck = false;
539 propagatedBuildInputs = with self; [];
669 propagatedBuildInputs = with self; [];
540 src = fetchurl {
670 src = fetchurl {
541 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
671 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
542 md5 = "754c5ab9f533e764f931136974b618f1";
672 md5 = "754c5ab9f533e764f931136974b618f1";
543 };
673 };
544 meta = {
674 meta = {
545 license = [ pkgs.lib.licenses.psfl ];
675 license = [ pkgs.lib.licenses.psfl ];
546 };
676 };
547 };
677 };
548 subvertpy = super.buildPythonPackage {
678 subvertpy = super.buildPythonPackage {
549 name = "subvertpy-0.9.3";
679 name = "subvertpy-0.9.3";
550 buildInputs = with self; [];
680 buildInputs = with self; [];
551 doCheck = false;
681 doCheck = false;
552 propagatedBuildInputs = with self; [];
682 propagatedBuildInputs = with self; [];
553 src = fetchurl {
683 src = fetchurl {
554 url = "https://github.com/jelmer/subvertpy/archive/subvertpy-0.9.3.tar.gz";
684 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
555 md5 = "7b745a47128050ea5a73efcd913ec1cf";
685 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
556 };
686 };
557 meta = {
687 meta = {
558 license = [ pkgs.lib.licenses.lgpl21Plus ];
688 license = [ pkgs.lib.licenses.lgpl21Plus ];
559 };
689 };
560 };
690 };
691 termcolor = super.buildPythonPackage {
692 name = "termcolor-1.1.0";
693 buildInputs = with self; [];
694 doCheck = false;
695 propagatedBuildInputs = with self; [];
696 src = fetchurl {
697 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
698 md5 = "043e89644f8909d462fbbfa511c768df";
699 };
700 meta = {
701 license = [ pkgs.lib.licenses.mit ];
702 };
703 };
561 traitlets = super.buildPythonPackage {
704 traitlets = super.buildPythonPackage {
562 name = "traitlets-4.3.1";
705 name = "traitlets-4.3.1";
563 buildInputs = with self; [];
706 buildInputs = with self; [];
564 doCheck = false;
707 doCheck = false;
565 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
708 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
566 src = fetchurl {
709 src = fetchurl {
567 url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz";
710 url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz";
568 md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98";
711 md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98";
569 };
712 };
570 meta = {
713 meta = {
571 license = [ pkgs.lib.licenses.bsdOriginal ];
714 license = [ pkgs.lib.licenses.bsdOriginal ];
572 };
715 };
573 };
716 };
574 translationstring = super.buildPythonPackage {
717 translationstring = super.buildPythonPackage {
575 name = "translationstring-1.3";
718 name = "translationstring-1.3";
576 buildInputs = with self; [];
719 buildInputs = with self; [];
577 doCheck = false;
720 doCheck = false;
578 propagatedBuildInputs = with self; [];
721 propagatedBuildInputs = with self; [];
579 src = fetchurl {
722 src = fetchurl {
580 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
723 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
581 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
724 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
582 };
725 };
583 meta = {
726 meta = {
584 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
727 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
585 };
728 };
586 };
729 };
587 venusian = super.buildPythonPackage {
730 venusian = super.buildPythonPackage {
588 name = "venusian-1.0";
731 name = "venusian-1.0";
589 buildInputs = with self; [];
732 buildInputs = with self; [];
590 doCheck = false;
733 doCheck = false;
591 propagatedBuildInputs = with self; [];
734 propagatedBuildInputs = with self; [];
592 src = fetchurl {
735 src = fetchurl {
593 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
736 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
594 md5 = "dccf2eafb7113759d60c86faf5538756";
737 md5 = "dccf2eafb7113759d60c86faf5538756";
595 };
738 };
596 meta = {
739 meta = {
597 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
740 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
598 };
741 };
599 };
742 };
600 waitress = super.buildPythonPackage {
743 waitress = super.buildPythonPackage {
601 name = "waitress-0.8.9";
744 name = "waitress-1.0.1";
602 buildInputs = with self; [];
745 buildInputs = with self; [];
603 doCheck = false;
746 doCheck = false;
604 propagatedBuildInputs = with self; [setuptools];
747 propagatedBuildInputs = with self; [];
605 src = fetchurl {
748 src = fetchurl {
606 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
749 url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz";
607 md5 = "da3f2e62b3676be5dd630703a68e2a04";
750 md5 = "dda92358a7569669086155923a46e57c";
608 };
751 };
609 meta = {
752 meta = {
610 license = [ pkgs.lib.licenses.zpt21 ];
753 license = [ pkgs.lib.licenses.zpt21 ];
611 };
754 };
612 };
755 };
613 wcwidth = super.buildPythonPackage {
756 wcwidth = super.buildPythonPackage {
614 name = "wcwidth-0.1.7";
757 name = "wcwidth-0.1.7";
615 buildInputs = with self; [];
758 buildInputs = with self; [];
616 doCheck = false;
759 doCheck = false;
617 propagatedBuildInputs = with self; [];
760 propagatedBuildInputs = with self; [];
618 src = fetchurl {
761 src = fetchurl {
619 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
762 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
620 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
763 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
621 };
764 };
622 meta = {
765 meta = {
623 license = [ pkgs.lib.licenses.mit ];
766 license = [ pkgs.lib.licenses.mit ];
624 };
767 };
625 };
768 };
626 wheel = super.buildPythonPackage {
769 wheel = super.buildPythonPackage {
627 name = "wheel-0.29.0";
770 name = "wheel-0.29.0";
628 buildInputs = with self; [];
771 buildInputs = with self; [];
629 doCheck = false;
772 doCheck = false;
630 propagatedBuildInputs = with self; [];
773 propagatedBuildInputs = with self; [];
631 src = fetchurl {
774 src = fetchurl {
632 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
775 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
633 md5 = "555a67e4507cedee23a0deb9651e452f";
776 md5 = "555a67e4507cedee23a0deb9651e452f";
634 };
777 };
635 meta = {
778 meta = {
636 license = [ pkgs.lib.licenses.mit ];
779 license = [ pkgs.lib.licenses.mit ];
637 };
780 };
638 };
781 };
639 zope.deprecation = super.buildPythonPackage {
782 zope.deprecation = super.buildPythonPackage {
640 name = "zope.deprecation-4.1.1";
783 name = "zope.deprecation-4.1.2";
641 buildInputs = with self; [];
784 buildInputs = with self; [];
642 doCheck = false;
785 doCheck = false;
643 propagatedBuildInputs = with self; [setuptools];
786 propagatedBuildInputs = with self; [setuptools];
644 src = fetchurl {
787 src = fetchurl {
645 url = "https://pypi.python.org/packages/c5/c9/e760f131fcde817da6c186a3f4952b8f206b7eeb269bb6f0836c715c5f20/zope.deprecation-4.1.1.tar.gz";
788 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
646 md5 = "ce261b9384066f7e13b63525778430cb";
789 md5 = "e9a663ded58f4f9f7881beb56cae2782";
647 };
790 };
648 meta = {
791 meta = {
649 license = [ pkgs.lib.licenses.zpt21 ];
792 license = [ pkgs.lib.licenses.zpt21 ];
650 };
793 };
651 };
794 };
652 zope.interface = super.buildPythonPackage {
795 zope.interface = super.buildPythonPackage {
653 name = "zope.interface-4.1.3";
796 name = "zope.interface-4.1.3";
654 buildInputs = with self; [];
797 buildInputs = with self; [];
655 doCheck = false;
798 doCheck = false;
656 propagatedBuildInputs = with self; [setuptools];
799 propagatedBuildInputs = with self; [setuptools];
657 src = fetchurl {
800 src = fetchurl {
658 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
801 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
659 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
802 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
660 };
803 };
661 meta = {
804 meta = {
662 license = [ pkgs.lib.licenses.zpt21 ];
805 license = [ pkgs.lib.licenses.zpt21 ];
663 };
806 };
664 };
807 };
665
808
666 ### Test requirements
809 ### Test requirements
667
810
668 pytest-sugar = super.buildPythonPackage {
811
669 name = "pytest-sugar-0.7.1";
670 buildInputs = with self; [];
671 doCheck = false;
672 propagatedBuildInputs = with self; [pytest termcolor];
673 src = fetchurl {
674 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
675 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
676 };
677 meta = {
678 license = [ pkgs.lib.licenses.bsdOriginal ];
679 };
680 };
681 termcolor = super.buildPythonPackage {
682 name = "termcolor-1.1.0";
683 buildInputs = with self; [];
684 doCheck = false;
685 propagatedBuildInputs = with self; [];
686 src = fetchurl {
687 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
688 md5 = "043e89644f8909d462fbbfa511c768df";
689 };
690 meta = {
691 license = [ pkgs.lib.licenses.mit ];
692 };
693 };
694 }
812 }
@@ -1,35 +1,43 b''
1 # core
2 setuptools==30.1.0
3
1 Beaker==1.7.0
4 Beaker==1.7.0
2 configobj==5.0.6
5 configobj==5.0.6
3 dulwich==0.13.0
6 dulwich==0.13.0
4 hgsubversion==1.8.6
7 hgsubversion==1.8.6
5 infrae.cache==1.0.1
8 infrae.cache==1.0.1
6 ipdb==0.10.1
9 mercurial==4.0.2
7 mercurial==3.8.4
10 msgpack-python==0.4.8
8 msgpack-python==0.4.6
9 py==1.4.29
10 pyramid==1.6.1
11 pyramid==1.6.1
11 pyramid-jinja2==2.5
12 pyramid-jinja2==2.5
12 pyramid-mako==1.0.2
13 pyramid-mako==1.0.2
13 Pyro4==4.41
14 pytest==2.8.5
15 repoze.lru==0.6
14 repoze.lru==0.6
16 serpent==1.12
17 setuptools==20.8.1
18 simplejson==3.7.2
15 simplejson==3.7.2
19 subprocess32==3.2.6
16 subprocess32==3.2.6
20 # TODO: johbo: This version is not in source on PyPI currently,
17
21 # change back once this or a future version is available
18 # Custom subvertpy that is not available on pypi.
22 https://github.com/jelmer/subvertpy/archive/subvertpy-0.9.3.tar.gz#md5=7b745a47128050ea5a73efcd913ec1cf
19 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
20
23 six==1.9.0
21 six==1.9.0
24 translationstring==1.3
22 translationstring==1.3
25 waitress==0.8.9
26 WebOb==1.3.1
23 WebOb==1.3.1
27 wheel==0.29.0
24 wheel==0.29.0
28 zope.deprecation==4.1.1
25 zope.deprecation==4.1.2
29 zope.interface==4.1.3
26 zope.interface==4.1.3
30 greenlet==0.4.7
27
28 ## debug
29 ipdb==0.10.1
30 ipython==5.1.0
31
32 # http servers
33 gevent==1.1.2
34 greenlet==0.4.10
31 gunicorn==19.6.0
35 gunicorn==19.6.0
36 waitress==1.0.1
32
37
33 # Test related requirements
38 # Pyro/Deprecated TODO(Marcink): remove in 4.7 release.
34 mock==1.0.1
39 Pyro4==4.41
35 WebTest==1.4.3
40 serpent==1.15
41
42 ## test related requirements
43 -r requirements_test.txt
@@ -1,103 +1,132 b''
1 # -*- coding: utf-8 -*-
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
4 #
4 # This program is free software; you can redistribute it and/or modify
5 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
7 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
8 # (at your option) any later version.
8 #
9 #
9 # This program is distributed in the hope that it will be useful,
10 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 # GNU General Public License for more details.
13 #
14 #
14 # You should have received a copy of the GNU General Public License
15 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
16 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
18
19 # Import early to make sure things are patched up properly
18 from setuptools import setup, find_packages
20 from setuptools import setup, find_packages
19 from setuptools.command.test import test as TestCommand
21
22 import os
23 import sys
24 import pkgutil
25 import platform
26
27 from pip.download import PipSession
28 from pip.req import parse_requirements
29
20 from codecs import open
30 from codecs import open
21 from os import path
22 import pkgutil
23 import sys
24
31
25
32
26 here = path.abspath(path.dirname(__file__))
33 if sys.version_info < (2, 7):
34 raise Exception('VCSServer requires Python 2.7 or later')
35
36 here = os.path.abspath(os.path.dirname(__file__))
37
38 # defines current platform
39 __platform__ = platform.system()
40 __license__ = 'GPL V3'
41 __author__ = 'RhodeCode GmbH'
42 __url__ = 'https://code.rhodecode.com'
43 is_windows = __platform__ in ('Windows',)
44
45
46 def _get_requirements(req_filename, exclude=None, extras=None):
47 extras = extras or []
48 exclude = exclude or []
27
49
28 with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
50 try:
29 long_description = f.read()
51 parsed = parse_requirements(
52 os.path.join(here, req_filename), session=PipSession())
53 except TypeError:
54 # try pip < 6.0.0, that doesn't support session
55 parsed = parse_requirements(os.path.join(here, req_filename))
56
57 requirements = []
58 for ir in parsed:
59 if ir.req and ir.name not in exclude:
60 requirements.append(str(ir.req))
61 return requirements + extras
62
63
64 # requirements extract
65 setup_requirements = ['pytest-runner']
66 install_requirements = _get_requirements(
67 'requirements.txt', exclude=['setuptools'])
68 test_requirements = _get_requirements(
69 'requirements_test.txt', extras=['configobj'])
30
70
31
71
32 def get_version():
72 def get_version():
33 version = pkgutil.get_data('vcsserver', 'VERSION')
73 version = pkgutil.get_data('vcsserver', 'VERSION')
34 return version.strip()
74 return version.strip()
35
75
36
76
37 class PyTest(TestCommand):
77 # additional files that goes into package itself
38 user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
78 package_data = {
79 '': ['*.txt', '*.rst'],
80 'configs': ['*.ini'],
81 'vcsserver': ['VERSION'],
82 }
39
83
40 def initialize_options(self):
84 description = 'Version Control System Server'
41 TestCommand.initialize_options(self)
85 keywords = ' '.join([
42 self.pytest_args = []
86 'CLI', 'RhodeCode', 'RhodeCode Enterprise', 'RhodeCode Tools'])
43
87
44 def finalize_options(self):
88 # README/DESCRIPTION generation
45 TestCommand.finalize_options(self)
89 readme_file = 'README.rst'
46 self.test_args = []
90 changelog_file = 'CHANGES.rst'
47 self.test_suite = True
91 try:
48
92 long_description = open(readme_file).read() + '\n\n' + \
49 def run_tests(self):
93 open(changelog_file).read()
50 # import here, cause outside the eggs aren't loaded
94 except IOError as err:
51 import pytest
95 sys.stderr.write(
52 errno = pytest.main(self.pytest_args)
96 "[WARNING] Cannot find file specified as long_description (%s)\n "
53 sys.exit(errno)
97 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
98 long_description = description
54
99
55
100
56 setup(
101 setup(
57 name='rhodecode-vcsserver',
102 name='rhodecode-vcsserver',
58 version=get_version(),
103 version=get_version(),
59 description='Version Control System Server',
104 description=description,
60 long_description=long_description,
105 long_description=long_description,
61 url='http://www.rhodecode.com',
106 keywords=keywords,
62 author='RhodeCode GmbH',
107 license=__license__,
108 author=__author__,
63 author_email='marcin@rhodecode.com',
109 author_email='marcin@rhodecode.com',
64 cmdclass={'test': PyTest},
110 url=__url__,
65 license='GPLv3',
111 setup_requires=setup_requirements,
112 install_requires=install_requirements,
113 tests_require=test_requirements,
114 zip_safe=False,
115 packages=find_packages(exclude=["docs", "tests*"]),
116 package_data=package_data,
117 include_package_data=True,
66 classifiers=[
118 classifiers=[
67 'Development Status :: 5 - Production/Stable',
119 'Development Status :: 6 - Mature',
68 'Intended Audience :: Developers',
120 'Intended Audience :: Developers',
121 'Operating System :: OS Independent',
69 'Topic :: Software Development :: Version Control',
122 'Topic :: Software Development :: Version Control',
70 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
123 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
71 'Programming Language :: Python :: 2.7',
124 'Programming Language :: Python :: 2.7',
72 ],
125 ],
73 packages=find_packages(),
74 tests_require=[
75 'mock',
76 'pytest',
77 'pytest-sugar',
78 'WebTest',
79 ],
80 install_requires=[
81 'configobj',
82 'dulwich',
83 'hgsubversion',
84 'infrae.cache',
85 'mercurial',
86 'msgpack-python',
87 'pyramid',
88 'Pyro4',
89 'simplejson',
90 'subprocess32',
91 'waitress',
92 'WebOb',
93 ],
94 package_data={
95 'vcsserver': ['VERSION'],
96 },
97 entry_points={
126 entry_points={
98 'console_scripts': [
127 'console_scripts': [
99 'vcsserver=vcsserver.main:main',
128 'vcsserver=vcsserver.main:main',
100 ],
129 ],
101 'paste.app_factory': ['main=vcsserver.http_main:main']
130 'paste.app_factory': ['main=vcsserver.http_main:main']
102 },
131 },
103 )
132 )
@@ -1,1 +1,1 b''
1 4.5.2 No newline at end of file
1 4.6.0 No newline at end of file
@@ -1,21 +1,21 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import pkgutil
18 import pkgutil
19
19
20
20
21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip()
21 __version__ = pkgutil.get_data('vcsserver', 'VERSION').strip()
@@ -1,82 +1,82 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import urlparse
19 import urlparse
20
20
21 log = logging.getLogger(__name__)
21 log = logging.getLogger(__name__)
22
22
23
23
24 class RepoFactory(object):
24 class RepoFactory(object):
25 """
25 """
26 Utility to create instances of repository
26 Utility to create instances of repository
27
27
28 It provides internal caching of the `repo` object based on
28 It provides internal caching of the `repo` object based on
29 the :term:`call context`.
29 the :term:`call context`.
30 """
30 """
31
31
32 def __init__(self, repo_cache):
32 def __init__(self, repo_cache):
33 self._cache = repo_cache
33 self._cache = repo_cache
34
34
35 def _create_config(self, path, config):
35 def _create_config(self, path, config):
36 config = {}
36 config = {}
37 return config
37 return config
38
38
39 def _create_repo(self, wire, create):
39 def _create_repo(self, wire, create):
40 raise NotImplementedError()
40 raise NotImplementedError()
41
41
42 def repo(self, wire, create=False):
42 def repo(self, wire, create=False):
43 """
43 """
44 Get a repository instance for the given path.
44 Get a repository instance for the given path.
45
45
46 Uses internally the low level beaker API since the decorators introduce
46 Uses internally the low level beaker API since the decorators introduce
47 significant overhead.
47 significant overhead.
48 """
48 """
49 def create_new_repo():
49 def create_new_repo():
50 return self._create_repo(wire, create)
50 return self._create_repo(wire, create)
51
51
52 return self._repo(wire, create_new_repo)
52 return self._repo(wire, create_new_repo)
53
53
54 def _repo(self, wire, createfunc):
54 def _repo(self, wire, createfunc):
55 context = wire.get('context', None)
55 context = wire.get('context', None)
56 cache = wire.get('cache', True)
56 cache = wire.get('cache', True)
57
57
58 if context and cache:
58 if context and cache:
59 cache_key = (context, wire['path'])
59 cache_key = (context, wire['path'])
60 log.debug(
60 log.debug(
61 'FETCH %s@%s repo object from cache. Context: %s',
61 'FETCH %s@%s repo object from cache. Context: %s',
62 self.__class__.__name__, wire['path'], context)
62 self.__class__.__name__, wire['path'], context)
63 return self._cache.get(key=cache_key, createfunc=createfunc)
63 return self._cache.get(key=cache_key, createfunc=createfunc)
64 else:
64 else:
65 log.debug(
65 log.debug(
66 'INIT %s@%s repo object based on wire %s. Context: %s',
66 'INIT %s@%s repo object based on wire %s. Context: %s',
67 self.__class__.__name__, wire['path'], wire, context)
67 self.__class__.__name__, wire['path'], wire, context)
68 return createfunc()
68 return createfunc()
69
69
70
70
71 def obfuscate_qs(query_string):
71 def obfuscate_qs(query_string):
72 if query_string is None:
72 if query_string is None:
73 return None
73 return None
74
74
75 parsed = []
75 parsed = []
76 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
76 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
77 if k in ['auth_token', 'api_key']:
77 if k in ['auth_token', 'api_key']:
78 v = "*****"
78 v = "*****"
79 parsed.append((k, v))
79 parsed.append((k, v))
80
80
81 return '&'.join('{}{}'.format(
81 return '&'.join('{}{}'.format(
82 k, '={}'.format(v) if v else '') for k, v in parsed)
82 k, '={}'.format(v) if v else '') for k, v in parsed)
@@ -1,70 +1,70 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Special exception handling over the wire.
19 Special exception handling over the wire.
20
20
21 Since we cannot assume that our client is able to import our exception classes,
21 Since we cannot assume that our client is able to import our exception classes,
22 this module provides a "wrapping" mechanism to raise plain exceptions
22 this module provides a "wrapping" mechanism to raise plain exceptions
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 import functools
27 import functools
28 from pyramid.httpexceptions import HTTPLocked
28 from pyramid.httpexceptions import HTTPLocked
29
29
30
30
31 def _make_exception(kind, *args):
31 def _make_exception(kind, *args):
32 """
32 """
33 Prepares a base `Exception` instance to be sent over the wire.
33 Prepares a base `Exception` instance to be sent over the wire.
34
34
35 To give our caller a hint what this is about, it will attach an attribute
35 To give our caller a hint what this is about, it will attach an attribute
36 `_vcs_kind` to the exception.
36 `_vcs_kind` to the exception.
37 """
37 """
38 exc = Exception(*args)
38 exc = Exception(*args)
39 exc._vcs_kind = kind
39 exc._vcs_kind = kind
40 return exc
40 return exc
41
41
42
42
43 AbortException = functools.partial(_make_exception, 'abort')
43 AbortException = functools.partial(_make_exception, 'abort')
44
44
45 ArchiveException = functools.partial(_make_exception, 'archive')
45 ArchiveException = functools.partial(_make_exception, 'archive')
46
46
47 LookupException = functools.partial(_make_exception, 'lookup')
47 LookupException = functools.partial(_make_exception, 'lookup')
48
48
49 VcsException = functools.partial(_make_exception, 'error')
49 VcsException = functools.partial(_make_exception, 'error')
50
50
51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
52
52
53 RequirementException = functools.partial(_make_exception, 'requirement')
53 RequirementException = functools.partial(_make_exception, 'requirement')
54
54
55 UnhandledException = functools.partial(_make_exception, 'unhandled')
55 UnhandledException = functools.partial(_make_exception, 'unhandled')
56
56
57 URLError = functools.partial(_make_exception, 'url_error')
57 URLError = functools.partial(_make_exception, 'url_error')
58
58
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
60
60
61
61
62 class HTTPRepoLocked(HTTPLocked):
62 class HTTPRepoLocked(HTTPLocked):
63 """
63 """
64 Subclass of HTTPLocked response that allows to set the title and status
64 Subclass of HTTPLocked response that allows to set the title and status
65 code via constructor arguments.
65 code via constructor arguments.
66 """
66 """
67 def __init__(self, title, status_code=None, **kwargs):
67 def __init__(self, title, status_code=None, **kwargs):
68 self.code = status_code or HTTPLocked.code
68 self.code = status_code or HTTPLocked.code
69 self.title = title
69 self.title = title
70 super(HTTPRepoLocked, self).__init__(**kwargs)
70 super(HTTPRepoLocked, self).__init__(**kwargs)
@@ -1,581 +1,581 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import urllib
23 import urllib
24 import urllib2
24 import urllib2
25 from functools import wraps
25 from functools import wraps
26
26
27 from dulwich import index, objects
27 from dulwich import index, objects
28 from dulwich.client import HttpGitClient, LocalGitClient
28 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.errors import (
29 from dulwich.errors import (
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 MissingCommitError, ObjectMissing, HangupException,
31 MissingCommitError, ObjectMissing, HangupException,
32 UnexpectedCommandError)
32 UnexpectedCommandError)
33 from dulwich.repo import Repo as DulwichRepo, Tag
33 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.server import update_server_info
34 from dulwich.server import update_server_info
35
35
36 from vcsserver import exceptions, settings, subprocessio
36 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver.utils import safe_str
37 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory, obfuscate_qs
38 from vcsserver.base import RepoFactory, obfuscate_qs
39 from vcsserver.hgcompat import (
39 from vcsserver.hgcompat import (
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41
41
42
42
43 DIR_STAT = stat.S_IFDIR
43 DIR_STAT = stat.S_IFDIR
44 FILE_MODE = stat.S_IFMT
44 FILE_MODE = stat.S_IFMT
45 GIT_LINK = objects.S_IFGITLINK
45 GIT_LINK = objects.S_IFGITLINK
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 def reraise_safe_exceptions(func):
50 def reraise_safe_exceptions(func):
51 """Converts Dulwich exceptions to something neutral."""
51 """Converts Dulwich exceptions to something neutral."""
52 @wraps(func)
52 @wraps(func)
53 def wrapper(*args, **kwargs):
53 def wrapper(*args, **kwargs):
54 try:
54 try:
55 return func(*args, **kwargs)
55 return func(*args, **kwargs)
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 ObjectMissing) as e:
57 ObjectMissing) as e:
58 raise exceptions.LookupException(e.message)
58 raise exceptions.LookupException(e.message)
59 except (HangupException, UnexpectedCommandError) as e:
59 except (HangupException, UnexpectedCommandError) as e:
60 raise exceptions.VcsException(e.message)
60 raise exceptions.VcsException(e.message)
61 return wrapper
61 return wrapper
62
62
63
63
64 class Repo(DulwichRepo):
64 class Repo(DulwichRepo):
65 """
65 """
66 A wrapper for dulwich Repo class.
66 A wrapper for dulwich Repo class.
67
67
68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
69 "Too many open files" error. We need to close all opened file descriptors
69 "Too many open files" error. We need to close all opened file descriptors
70 once the repo object is destroyed.
70 once the repo object is destroyed.
71
71
72 TODO: mikhail: please check if we need this wrapper after updating dulwich
72 TODO: mikhail: please check if we need this wrapper after updating dulwich
73 to 0.12.0 +
73 to 0.12.0 +
74 """
74 """
75 def __del__(self):
75 def __del__(self):
76 if hasattr(self, 'object_store'):
76 if hasattr(self, 'object_store'):
77 self.close()
77 self.close()
78
78
79
79
80 class GitFactory(RepoFactory):
80 class GitFactory(RepoFactory):
81
81
82 def _create_repo(self, wire, create):
82 def _create_repo(self, wire, create):
83 repo_path = str_to_dulwich(wire['path'])
83 repo_path = str_to_dulwich(wire['path'])
84 return Repo(repo_path)
84 return Repo(repo_path)
85
85
86
86
87 class GitRemote(object):
87 class GitRemote(object):
88
88
89 def __init__(self, factory):
89 def __init__(self, factory):
90 self._factory = factory
90 self._factory = factory
91
91
92 self._bulk_methods = {
92 self._bulk_methods = {
93 "author": self.commit_attribute,
93 "author": self.commit_attribute,
94 "date": self.get_object_attrs,
94 "date": self.get_object_attrs,
95 "message": self.commit_attribute,
95 "message": self.commit_attribute,
96 "parents": self.commit_attribute,
96 "parents": self.commit_attribute,
97 "_commit": self.revision,
97 "_commit": self.revision,
98 }
98 }
99
99
100 def _assign_ref(self, wire, ref, commit_id):
100 def _assign_ref(self, wire, ref, commit_id):
101 repo = self._factory.repo(wire)
101 repo = self._factory.repo(wire)
102 repo[ref] = commit_id
102 repo[ref] = commit_id
103
103
104 @reraise_safe_exceptions
104 @reraise_safe_exceptions
105 def add_object(self, wire, content):
105 def add_object(self, wire, content):
106 repo = self._factory.repo(wire)
106 repo = self._factory.repo(wire)
107 blob = objects.Blob()
107 blob = objects.Blob()
108 blob.set_raw_string(content)
108 blob.set_raw_string(content)
109 repo.object_store.add_object(blob)
109 repo.object_store.add_object(blob)
110 return blob.id
110 return blob.id
111
111
112 @reraise_safe_exceptions
112 @reraise_safe_exceptions
113 def assert_correct_path(self, wire):
113 def assert_correct_path(self, wire):
114 try:
114 try:
115 self._factory.repo(wire)
115 self._factory.repo(wire)
116 except NotGitRepository as e:
116 except NotGitRepository as e:
117 # Exception can contain unicode which we convert
117 # Exception can contain unicode which we convert
118 raise exceptions.AbortException(repr(e))
118 raise exceptions.AbortException(repr(e))
119
119
120 @reraise_safe_exceptions
120 @reraise_safe_exceptions
121 def bare(self, wire):
121 def bare(self, wire):
122 repo = self._factory.repo(wire)
122 repo = self._factory.repo(wire)
123 return repo.bare
123 return repo.bare
124
124
125 @reraise_safe_exceptions
125 @reraise_safe_exceptions
126 def blob_as_pretty_string(self, wire, sha):
126 def blob_as_pretty_string(self, wire, sha):
127 repo = self._factory.repo(wire)
127 repo = self._factory.repo(wire)
128 return repo[sha].as_pretty_string()
128 return repo[sha].as_pretty_string()
129
129
130 @reraise_safe_exceptions
130 @reraise_safe_exceptions
131 def blob_raw_length(self, wire, sha):
131 def blob_raw_length(self, wire, sha):
132 repo = self._factory.repo(wire)
132 repo = self._factory.repo(wire)
133 blob = repo[sha]
133 blob = repo[sha]
134 return blob.raw_length()
134 return blob.raw_length()
135
135
136 @reraise_safe_exceptions
136 @reraise_safe_exceptions
137 def bulk_request(self, wire, rev, pre_load):
137 def bulk_request(self, wire, rev, pre_load):
138 result = {}
138 result = {}
139 for attr in pre_load:
139 for attr in pre_load:
140 try:
140 try:
141 method = self._bulk_methods[attr]
141 method = self._bulk_methods[attr]
142 args = [wire, rev]
142 args = [wire, rev]
143 if attr == "date":
143 if attr == "date":
144 args.extend(["commit_time", "commit_timezone"])
144 args.extend(["commit_time", "commit_timezone"])
145 elif attr in ["author", "message", "parents"]:
145 elif attr in ["author", "message", "parents"]:
146 args.append(attr)
146 args.append(attr)
147 result[attr] = method(*args)
147 result[attr] = method(*args)
148 except KeyError:
148 except KeyError:
149 raise exceptions.VcsException(
149 raise exceptions.VcsException(
150 "Unknown bulk attribute: %s" % attr)
150 "Unknown bulk attribute: %s" % attr)
151 return result
151 return result
152
152
153 def _build_opener(self, url):
153 def _build_opener(self, url):
154 handlers = []
154 handlers = []
155 url_obj = url_parser(url)
155 url_obj = url_parser(url)
156 _, authinfo = url_obj.authinfo()
156 _, authinfo = url_obj.authinfo()
157
157
158 if authinfo:
158 if authinfo:
159 # create a password manager
159 # create a password manager
160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
161 passmgr.add_password(*authinfo)
161 passmgr.add_password(*authinfo)
162
162
163 handlers.extend((httpbasicauthhandler(passmgr),
163 handlers.extend((httpbasicauthhandler(passmgr),
164 httpdigestauthhandler(passmgr)))
164 httpdigestauthhandler(passmgr)))
165
165
166 return urllib2.build_opener(*handlers)
166 return urllib2.build_opener(*handlers)
167
167
168 @reraise_safe_exceptions
168 @reraise_safe_exceptions
169 def check_url(self, url, config):
169 def check_url(self, url, config):
170 url_obj = url_parser(url)
170 url_obj = url_parser(url)
171 test_uri, _ = url_obj.authinfo()
171 test_uri, _ = url_obj.authinfo()
172 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
172 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
173 url_obj.query = obfuscate_qs(url_obj.query)
173 url_obj.query = obfuscate_qs(url_obj.query)
174 cleaned_uri = str(url_obj)
174 cleaned_uri = str(url_obj)
175 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
175 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
176
176
177 if not test_uri.endswith('info/refs'):
177 if not test_uri.endswith('info/refs'):
178 test_uri = test_uri.rstrip('/') + '/info/refs'
178 test_uri = test_uri.rstrip('/') + '/info/refs'
179
179
180 o = self._build_opener(url)
180 o = self._build_opener(url)
181 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
181 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
182
182
183 q = {"service": 'git-upload-pack'}
183 q = {"service": 'git-upload-pack'}
184 qs = '?%s' % urllib.urlencode(q)
184 qs = '?%s' % urllib.urlencode(q)
185 cu = "%s%s" % (test_uri, qs)
185 cu = "%s%s" % (test_uri, qs)
186 req = urllib2.Request(cu, None, {})
186 req = urllib2.Request(cu, None, {})
187
187
188 try:
188 try:
189 log.debug("Trying to open URL %s", cleaned_uri)
189 log.debug("Trying to open URL %s", cleaned_uri)
190 resp = o.open(req)
190 resp = o.open(req)
191 if resp.code != 200:
191 if resp.code != 200:
192 raise exceptions.URLError('Return Code is not 200')
192 raise exceptions.URLError('Return Code is not 200')
193 except Exception as e:
193 except Exception as e:
194 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
194 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
195 # means it cannot be cloned
195 # means it cannot be cloned
196 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
196 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
197
197
198 # now detect if it's proper git repo
198 # now detect if it's proper git repo
199 gitdata = resp.read()
199 gitdata = resp.read()
200 if 'service=git-upload-pack' in gitdata:
200 if 'service=git-upload-pack' in gitdata:
201 pass
201 pass
202 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
202 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
203 # old style git can return some other format !
203 # old style git can return some other format !
204 pass
204 pass
205 else:
205 else:
206 raise exceptions.URLError(
206 raise exceptions.URLError(
207 "url [%s] does not look like an git" % (cleaned_uri,))
207 "url [%s] does not look like an git" % (cleaned_uri,))
208
208
209 return True
209 return True
210
210
211 @reraise_safe_exceptions
211 @reraise_safe_exceptions
212 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
212 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
213 remote_refs = self.fetch(wire, url, apply_refs=False)
213 remote_refs = self.fetch(wire, url, apply_refs=False)
214 repo = self._factory.repo(wire)
214 repo = self._factory.repo(wire)
215 if isinstance(valid_refs, list):
215 if isinstance(valid_refs, list):
216 valid_refs = tuple(valid_refs)
216 valid_refs = tuple(valid_refs)
217
217
218 for k in remote_refs:
218 for k in remote_refs:
219 # only parse heads/tags and skip so called deferred tags
219 # only parse heads/tags and skip so called deferred tags
220 if k.startswith(valid_refs) and not k.endswith(deferred):
220 if k.startswith(valid_refs) and not k.endswith(deferred):
221 repo[k] = remote_refs[k]
221 repo[k] = remote_refs[k]
222
222
223 if update_after_clone:
223 if update_after_clone:
224 # we want to checkout HEAD
224 # we want to checkout HEAD
225 repo["HEAD"] = remote_refs["HEAD"]
225 repo["HEAD"] = remote_refs["HEAD"]
226 index.build_index_from_tree(repo.path, repo.index_path(),
226 index.build_index_from_tree(repo.path, repo.index_path(),
227 repo.object_store, repo["HEAD"].tree)
227 repo.object_store, repo["HEAD"].tree)
228
228
229 # TODO: this is quite complex, check if that can be simplified
229 # TODO: this is quite complex, check if that can be simplified
230 @reraise_safe_exceptions
230 @reraise_safe_exceptions
231 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
231 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
232 repo = self._factory.repo(wire)
232 repo = self._factory.repo(wire)
233 object_store = repo.object_store
233 object_store = repo.object_store
234
234
235 # Create tree and populates it with blobs
235 # Create tree and populates it with blobs
236 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
236 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
237
237
238 for node in updated:
238 for node in updated:
239 # Compute subdirs if needed
239 # Compute subdirs if needed
240 dirpath, nodename = vcspath.split(node['path'])
240 dirpath, nodename = vcspath.split(node['path'])
241 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
241 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
242 parent = commit_tree
242 parent = commit_tree
243 ancestors = [('', parent)]
243 ancestors = [('', parent)]
244
244
245 # Tries to dig for the deepest existing tree
245 # Tries to dig for the deepest existing tree
246 while dirnames:
246 while dirnames:
247 curdir = dirnames.pop(0)
247 curdir = dirnames.pop(0)
248 try:
248 try:
249 dir_id = parent[curdir][1]
249 dir_id = parent[curdir][1]
250 except KeyError:
250 except KeyError:
251 # put curdir back into dirnames and stops
251 # put curdir back into dirnames and stops
252 dirnames.insert(0, curdir)
252 dirnames.insert(0, curdir)
253 break
253 break
254 else:
254 else:
255 # If found, updates parent
255 # If found, updates parent
256 parent = repo[dir_id]
256 parent = repo[dir_id]
257 ancestors.append((curdir, parent))
257 ancestors.append((curdir, parent))
258 # Now parent is deepest existing tree and we need to create
258 # Now parent is deepest existing tree and we need to create
259 # subtrees for dirnames (in reverse order)
259 # subtrees for dirnames (in reverse order)
260 # [this only applies for nodes from added]
260 # [this only applies for nodes from added]
261 new_trees = []
261 new_trees = []
262
262
263 blob = objects.Blob.from_string(node['content'])
263 blob = objects.Blob.from_string(node['content'])
264
264
265 if dirnames:
265 if dirnames:
266 # If there are trees which should be created we need to build
266 # If there are trees which should be created we need to build
267 # them now (in reverse order)
267 # them now (in reverse order)
268 reversed_dirnames = list(reversed(dirnames))
268 reversed_dirnames = list(reversed(dirnames))
269 curtree = objects.Tree()
269 curtree = objects.Tree()
270 curtree[node['node_path']] = node['mode'], blob.id
270 curtree[node['node_path']] = node['mode'], blob.id
271 new_trees.append(curtree)
271 new_trees.append(curtree)
272 for dirname in reversed_dirnames[:-1]:
272 for dirname in reversed_dirnames[:-1]:
273 newtree = objects.Tree()
273 newtree = objects.Tree()
274 newtree[dirname] = (DIR_STAT, curtree.id)
274 newtree[dirname] = (DIR_STAT, curtree.id)
275 new_trees.append(newtree)
275 new_trees.append(newtree)
276 curtree = newtree
276 curtree = newtree
277 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
277 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
278 else:
278 else:
279 parent.add(
279 parent.add(
280 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
280 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
281
281
282 new_trees.append(parent)
282 new_trees.append(parent)
283 # Update ancestors
283 # Update ancestors
284 reversed_ancestors = reversed(
284 reversed_ancestors = reversed(
285 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
285 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
286 for parent, tree, path in reversed_ancestors:
286 for parent, tree, path in reversed_ancestors:
287 parent[path] = (DIR_STAT, tree.id)
287 parent[path] = (DIR_STAT, tree.id)
288 object_store.add_object(tree)
288 object_store.add_object(tree)
289
289
290 object_store.add_object(blob)
290 object_store.add_object(blob)
291 for tree in new_trees:
291 for tree in new_trees:
292 object_store.add_object(tree)
292 object_store.add_object(tree)
293
293
294 for node_path in removed:
294 for node_path in removed:
295 paths = node_path.split('/')
295 paths = node_path.split('/')
296 tree = commit_tree
296 tree = commit_tree
297 trees = [tree]
297 trees = [tree]
298 # Traverse deep into the forest...
298 # Traverse deep into the forest...
299 for path in paths:
299 for path in paths:
300 try:
300 try:
301 obj = repo[tree[path][1]]
301 obj = repo[tree[path][1]]
302 if isinstance(obj, objects.Tree):
302 if isinstance(obj, objects.Tree):
303 trees.append(obj)
303 trees.append(obj)
304 tree = obj
304 tree = obj
305 except KeyError:
305 except KeyError:
306 break
306 break
307 # Cut down the blob and all rotten trees on the way back...
307 # Cut down the blob and all rotten trees on the way back...
308 for path, tree in reversed(zip(paths, trees)):
308 for path, tree in reversed(zip(paths, trees)):
309 del tree[path]
309 del tree[path]
310 if tree:
310 if tree:
311 # This tree still has elements - don't remove it or any
311 # This tree still has elements - don't remove it or any
312 # of it's parents
312 # of it's parents
313 break
313 break
314
314
315 object_store.add_object(commit_tree)
315 object_store.add_object(commit_tree)
316
316
317 # Create commit
317 # Create commit
318 commit = objects.Commit()
318 commit = objects.Commit()
319 commit.tree = commit_tree.id
319 commit.tree = commit_tree.id
320 for k, v in commit_data.iteritems():
320 for k, v in commit_data.iteritems():
321 setattr(commit, k, v)
321 setattr(commit, k, v)
322 object_store.add_object(commit)
322 object_store.add_object(commit)
323
323
324 ref = 'refs/heads/%s' % branch
324 ref = 'refs/heads/%s' % branch
325 repo.refs[ref] = commit.id
325 repo.refs[ref] = commit.id
326
326
327 return commit.id
327 return commit.id
328
328
329 @reraise_safe_exceptions
329 @reraise_safe_exceptions
330 def fetch(self, wire, url, apply_refs=True, refs=None):
330 def fetch(self, wire, url, apply_refs=True, refs=None):
331 if url != 'default' and '://' not in url:
331 if url != 'default' and '://' not in url:
332 client = LocalGitClient(url)
332 client = LocalGitClient(url)
333 else:
333 else:
334 url_obj = url_parser(url)
334 url_obj = url_parser(url)
335 o = self._build_opener(url)
335 o = self._build_opener(url)
336 url, _ = url_obj.authinfo()
336 url, _ = url_obj.authinfo()
337 client = HttpGitClient(base_url=url, opener=o)
337 client = HttpGitClient(base_url=url, opener=o)
338 repo = self._factory.repo(wire)
338 repo = self._factory.repo(wire)
339
339
340 determine_wants = repo.object_store.determine_wants_all
340 determine_wants = repo.object_store.determine_wants_all
341 if refs:
341 if refs:
342 def determine_wants_requested(references):
342 def determine_wants_requested(references):
343 return [references[r] for r in references if r in refs]
343 return [references[r] for r in references if r in refs]
344 determine_wants = determine_wants_requested
344 determine_wants = determine_wants_requested
345
345
346 try:
346 try:
347 remote_refs = client.fetch(
347 remote_refs = client.fetch(
348 path=url, target=repo, determine_wants=determine_wants)
348 path=url, target=repo, determine_wants=determine_wants)
349 except NotGitRepository as e:
349 except NotGitRepository as e:
350 log.warning(
350 log.warning(
351 'Trying to fetch from "%s" failed, not a Git repository.', url)
351 'Trying to fetch from "%s" failed, not a Git repository.', url)
352 # Exception can contain unicode which we convert
352 # Exception can contain unicode which we convert
353 raise exceptions.AbortException(repr(e))
353 raise exceptions.AbortException(repr(e))
354
354
355 # mikhail: client.fetch() returns all the remote refs, but fetches only
355 # mikhail: client.fetch() returns all the remote refs, but fetches only
356 # refs filtered by `determine_wants` function. We need to filter result
356 # refs filtered by `determine_wants` function. We need to filter result
357 # as well
357 # as well
358 if refs:
358 if refs:
359 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
359 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
360
360
361 if apply_refs:
361 if apply_refs:
362 # TODO: johbo: Needs proper test coverage with a git repository
362 # TODO: johbo: Needs proper test coverage with a git repository
363 # that contains a tag object, so that we would end up with
363 # that contains a tag object, so that we would end up with
364 # a peeled ref at this point.
364 # a peeled ref at this point.
365 PEELED_REF_MARKER = '^{}'
365 PEELED_REF_MARKER = '^{}'
366 for k in remote_refs:
366 for k in remote_refs:
367 if k.endswith(PEELED_REF_MARKER):
367 if k.endswith(PEELED_REF_MARKER):
368 log.info("Skipping peeled reference %s", k)
368 log.info("Skipping peeled reference %s", k)
369 continue
369 continue
370 repo[k] = remote_refs[k]
370 repo[k] = remote_refs[k]
371
371
372 if refs:
372 if refs:
373 # mikhail: explicitly set the head to the last ref.
373 # mikhail: explicitly set the head to the last ref.
374 repo['HEAD'] = remote_refs[refs[-1]]
374 repo['HEAD'] = remote_refs[refs[-1]]
375
375
376 # TODO: mikhail: should we return remote_refs here to be
376 # TODO: mikhail: should we return remote_refs here to be
377 # consistent?
377 # consistent?
378 else:
378 else:
379 return remote_refs
379 return remote_refs
380
380
381 @reraise_safe_exceptions
381 @reraise_safe_exceptions
382 def get_remote_refs(self, wire, url):
382 def get_remote_refs(self, wire, url):
383 repo = Repo(url)
383 repo = Repo(url)
384 return repo.get_refs()
384 return repo.get_refs()
385
385
386 @reraise_safe_exceptions
386 @reraise_safe_exceptions
387 def get_description(self, wire):
387 def get_description(self, wire):
388 repo = self._factory.repo(wire)
388 repo = self._factory.repo(wire)
389 return repo.get_description()
389 return repo.get_description()
390
390
391 @reraise_safe_exceptions
391 @reraise_safe_exceptions
392 def get_file_history(self, wire, file_path, commit_id, limit):
392 def get_file_history(self, wire, file_path, commit_id, limit):
393 repo = self._factory.repo(wire)
393 repo = self._factory.repo(wire)
394 include = [commit_id]
394 include = [commit_id]
395 paths = [file_path]
395 paths = [file_path]
396
396
397 walker = repo.get_walker(include, paths=paths, max_entries=limit)
397 walker = repo.get_walker(include, paths=paths, max_entries=limit)
398 return [x.commit.id for x in walker]
398 return [x.commit.id for x in walker]
399
399
400 @reraise_safe_exceptions
400 @reraise_safe_exceptions
401 def get_missing_revs(self, wire, rev1, rev2, path2):
401 def get_missing_revs(self, wire, rev1, rev2, path2):
402 repo = self._factory.repo(wire)
402 repo = self._factory.repo(wire)
403 LocalGitClient(thin_packs=False).fetch(path2, repo)
403 LocalGitClient(thin_packs=False).fetch(path2, repo)
404
404
405 wire_remote = wire.copy()
405 wire_remote = wire.copy()
406 wire_remote['path'] = path2
406 wire_remote['path'] = path2
407 repo_remote = self._factory.repo(wire_remote)
407 repo_remote = self._factory.repo(wire_remote)
408 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
408 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
409
409
410 revs = [
410 revs = [
411 x.commit.id
411 x.commit.id
412 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
412 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
413 return revs
413 return revs
414
414
415 @reraise_safe_exceptions
415 @reraise_safe_exceptions
416 def get_object(self, wire, sha):
416 def get_object(self, wire, sha):
417 repo = self._factory.repo(wire)
417 repo = self._factory.repo(wire)
418 obj = repo.get_object(sha)
418 obj = repo.get_object(sha)
419 commit_id = obj.id
419 commit_id = obj.id
420
420
421 if isinstance(obj, Tag):
421 if isinstance(obj, Tag):
422 commit_id = obj.object[1]
422 commit_id = obj.object[1]
423
423
424 return {
424 return {
425 'id': obj.id,
425 'id': obj.id,
426 'type': obj.type_name,
426 'type': obj.type_name,
427 'commit_id': commit_id
427 'commit_id': commit_id
428 }
428 }
429
429
430 @reraise_safe_exceptions
430 @reraise_safe_exceptions
431 def get_object_attrs(self, wire, sha, *attrs):
431 def get_object_attrs(self, wire, sha, *attrs):
432 repo = self._factory.repo(wire)
432 repo = self._factory.repo(wire)
433 obj = repo.get_object(sha)
433 obj = repo.get_object(sha)
434 return list(getattr(obj, a) for a in attrs)
434 return list(getattr(obj, a) for a in attrs)
435
435
436 @reraise_safe_exceptions
436 @reraise_safe_exceptions
437 def get_refs(self, wire):
437 def get_refs(self, wire):
438 repo = self._factory.repo(wire)
438 repo = self._factory.repo(wire)
439 result = {}
439 result = {}
440 for ref, sha in repo.refs.as_dict().items():
440 for ref, sha in repo.refs.as_dict().items():
441 peeled_sha = repo.get_peeled(ref)
441 peeled_sha = repo.get_peeled(ref)
442 result[ref] = peeled_sha
442 result[ref] = peeled_sha
443 return result
443 return result
444
444
445 @reraise_safe_exceptions
445 @reraise_safe_exceptions
446 def get_refs_path(self, wire):
446 def get_refs_path(self, wire):
447 repo = self._factory.repo(wire)
447 repo = self._factory.repo(wire)
448 return repo.refs.path
448 return repo.refs.path
449
449
450 @reraise_safe_exceptions
450 @reraise_safe_exceptions
451 def head(self, wire):
451 def head(self, wire):
452 repo = self._factory.repo(wire)
452 repo = self._factory.repo(wire)
453 return repo.head()
453 return repo.head()
454
454
455 @reraise_safe_exceptions
455 @reraise_safe_exceptions
456 def init(self, wire):
456 def init(self, wire):
457 repo_path = str_to_dulwich(wire['path'])
457 repo_path = str_to_dulwich(wire['path'])
458 self.repo = Repo.init(repo_path)
458 self.repo = Repo.init(repo_path)
459
459
460 @reraise_safe_exceptions
460 @reraise_safe_exceptions
461 def init_bare(self, wire):
461 def init_bare(self, wire):
462 repo_path = str_to_dulwich(wire['path'])
462 repo_path = str_to_dulwich(wire['path'])
463 self.repo = Repo.init_bare(repo_path)
463 self.repo = Repo.init_bare(repo_path)
464
464
465 @reraise_safe_exceptions
465 @reraise_safe_exceptions
466 def revision(self, wire, rev):
466 def revision(self, wire, rev):
467 repo = self._factory.repo(wire)
467 repo = self._factory.repo(wire)
468 obj = repo[rev]
468 obj = repo[rev]
469 obj_data = {
469 obj_data = {
470 'id': obj.id,
470 'id': obj.id,
471 }
471 }
472 try:
472 try:
473 obj_data['tree'] = obj.tree
473 obj_data['tree'] = obj.tree
474 except AttributeError:
474 except AttributeError:
475 pass
475 pass
476 return obj_data
476 return obj_data
477
477
478 @reraise_safe_exceptions
478 @reraise_safe_exceptions
479 def commit_attribute(self, wire, rev, attr):
479 def commit_attribute(self, wire, rev, attr):
480 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
481 obj = repo[rev]
481 obj = repo[rev]
482 return getattr(obj, attr)
482 return getattr(obj, attr)
483
483
484 @reraise_safe_exceptions
484 @reraise_safe_exceptions
485 def set_refs(self, wire, key, value):
485 def set_refs(self, wire, key, value):
486 repo = self._factory.repo(wire)
486 repo = self._factory.repo(wire)
487 repo.refs[key] = value
487 repo.refs[key] = value
488
488
489 @reraise_safe_exceptions
489 @reraise_safe_exceptions
490 def remove_ref(self, wire, key):
490 def remove_ref(self, wire, key):
491 repo = self._factory.repo(wire)
491 repo = self._factory.repo(wire)
492 del repo.refs[key]
492 del repo.refs[key]
493
493
494 @reraise_safe_exceptions
494 @reraise_safe_exceptions
495 def tree_changes(self, wire, source_id, target_id):
495 def tree_changes(self, wire, source_id, target_id):
496 repo = self._factory.repo(wire)
496 repo = self._factory.repo(wire)
497 source = repo[source_id].tree if source_id else None
497 source = repo[source_id].tree if source_id else None
498 target = repo[target_id].tree
498 target = repo[target_id].tree
499 result = repo.object_store.tree_changes(source, target)
499 result = repo.object_store.tree_changes(source, target)
500 return list(result)
500 return list(result)
501
501
502 @reraise_safe_exceptions
502 @reraise_safe_exceptions
503 def tree_items(self, wire, tree_id):
503 def tree_items(self, wire, tree_id):
504 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
505 tree = repo[tree_id]
505 tree = repo[tree_id]
506
506
507 result = []
507 result = []
508 for item in tree.iteritems():
508 for item in tree.iteritems():
509 item_sha = item.sha
509 item_sha = item.sha
510 item_mode = item.mode
510 item_mode = item.mode
511
511
512 if FILE_MODE(item_mode) == GIT_LINK:
512 if FILE_MODE(item_mode) == GIT_LINK:
513 item_type = "link"
513 item_type = "link"
514 else:
514 else:
515 item_type = repo[item_sha].type_name
515 item_type = repo[item_sha].type_name
516
516
517 result.append((item.path, item_mode, item_sha, item_type))
517 result.append((item.path, item_mode, item_sha, item_type))
518 return result
518 return result
519
519
520 @reraise_safe_exceptions
520 @reraise_safe_exceptions
521 def update_server_info(self, wire):
521 def update_server_info(self, wire):
522 repo = self._factory.repo(wire)
522 repo = self._factory.repo(wire)
523 update_server_info(repo)
523 update_server_info(repo)
524
524
525 @reraise_safe_exceptions
525 @reraise_safe_exceptions
526 def discover_git_version(self):
526 def discover_git_version(self):
527 stdout, _ = self.run_git_command(
527 stdout, _ = self.run_git_command(
528 {}, ['--version'], _bare=True, _safe=True)
528 {}, ['--version'], _bare=True, _safe=True)
529 prefix = 'git version'
529 prefix = 'git version'
530 if stdout.startswith(prefix):
530 if stdout.startswith(prefix):
531 stdout = stdout[len(prefix):]
531 stdout = stdout[len(prefix):]
532 return stdout.strip()
532 return stdout.strip()
533
533
534 @reraise_safe_exceptions
534 @reraise_safe_exceptions
535 def run_git_command(self, wire, cmd, **opts):
535 def run_git_command(self, wire, cmd, **opts):
536 path = wire.get('path', None)
536 path = wire.get('path', None)
537
537
538 if path and os.path.isdir(path):
538 if path and os.path.isdir(path):
539 opts['cwd'] = path
539 opts['cwd'] = path
540
540
541 if '_bare' in opts:
541 if '_bare' in opts:
542 _copts = []
542 _copts = []
543 del opts['_bare']
543 del opts['_bare']
544 else:
544 else:
545 _copts = ['-c', 'core.quotepath=false', ]
545 _copts = ['-c', 'core.quotepath=false', ]
546 safe_call = False
546 safe_call = False
547 if '_safe' in opts:
547 if '_safe' in opts:
548 # no exc on failure
548 # no exc on failure
549 del opts['_safe']
549 del opts['_safe']
550 safe_call = True
550 safe_call = True
551
551
552 gitenv = os.environ.copy()
552 gitenv = os.environ.copy()
553 gitenv.update(opts.pop('extra_env', {}))
553 gitenv.update(opts.pop('extra_env', {}))
554 # need to clean fix GIT_DIR !
554 # need to clean fix GIT_DIR !
555 if 'GIT_DIR' in gitenv:
555 if 'GIT_DIR' in gitenv:
556 del gitenv['GIT_DIR']
556 del gitenv['GIT_DIR']
557 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
557 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
558
558
559 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
559 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
560
560
561 try:
561 try:
562 _opts = {'env': gitenv, 'shell': False}
562 _opts = {'env': gitenv, 'shell': False}
563 _opts.update(opts)
563 _opts.update(opts)
564 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
564 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
565
565
566 return ''.join(p), ''.join(p.error)
566 return ''.join(p), ''.join(p.error)
567 except (EnvironmentError, OSError) as err:
567 except (EnvironmentError, OSError) as err:
568 tb_err = ("Couldn't run git command (%s).\n"
568 tb_err = ("Couldn't run git command (%s).\n"
569 "Original error was:%s\n" % (cmd, err))
569 "Original error was:%s\n" % (cmd, err))
570 log.exception(tb_err)
570 log.exception(tb_err)
571 if safe_call:
571 if safe_call:
572 return '', err
572 return '', err
573 else:
573 else:
574 raise exceptions.VcsException(tb_err)
574 raise exceptions.VcsException(tb_err)
575
575
576
576
577 def str_to_dulwich(value):
577 def str_to_dulwich(value):
578 """
578 """
579 Dulwich 0.10.1a requires `unicode` objects to be passed in.
579 Dulwich 0.10.1a requires `unicode` objects to be passed in.
580 """
580 """
581 return value.decode(settings.WIRE_ENCODING)
581 return value.decode(settings.WIRE_ENCODING)
@@ -1,725 +1,725 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import sys
21 import sys
22 import urllib
22 import urllib
23 import urllib2
23 import urllib2
24
24
25 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs
31 from vcsserver.base import RepoFactory, obfuscate_qs
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
37 InterventionRequired, RequirementError)
37 InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 # force mercurial to only use 1 thread, otherwise it may try to set a
56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # signal in a non-main thread, thus generating a ValueError.
57 # signal in a non-main thread, thus generating a ValueError.
58 baseui.setconfig('worker', 'numcpus', 1)
58 baseui.setconfig('worker', 'numcpus', 1)
59
59
60 # If there is no config for the largefiles extension, we explicitly disable
60 # If there is no config for the largefiles extension, we explicitly disable
61 # it here. This overrides settings from repositories hgrc file. Recent
61 # it here. This overrides settings from repositories hgrc file. Recent
62 # mercurial versions enable largefiles in hgrc on clone from largefile
62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # repo.
63 # repo.
64 if not baseui.hasconfig('extensions', 'largefiles'):
64 if not baseui.hasconfig('extensions', 'largefiles'):
65 log.debug('Explicitly disable largefiles extension for repo.')
65 log.debug('Explicitly disable largefiles extension for repo.')
66 baseui.setconfig('extensions', 'largefiles', '!')
66 baseui.setconfig('extensions', 'largefiles', '!')
67
67
68 return baseui
68 return baseui
69
69
70
70
71 def reraise_safe_exceptions(func):
71 def reraise_safe_exceptions(func):
72 """Decorator for converting mercurial exceptions to something neutral."""
72 """Decorator for converting mercurial exceptions to something neutral."""
73 def wrapper(*args, **kwargs):
73 def wrapper(*args, **kwargs):
74 try:
74 try:
75 return func(*args, **kwargs)
75 return func(*args, **kwargs)
76 except (Abort, InterventionRequired):
76 except (Abort, InterventionRequired):
77 raise_from_original(exceptions.AbortException)
77 raise_from_original(exceptions.AbortException)
78 except RepoLookupError:
78 except RepoLookupError:
79 raise_from_original(exceptions.LookupException)
79 raise_from_original(exceptions.LookupException)
80 except RequirementError:
80 except RequirementError:
81 raise_from_original(exceptions.RequirementException)
81 raise_from_original(exceptions.RequirementException)
82 except RepoError:
82 except RepoError:
83 raise_from_original(exceptions.VcsException)
83 raise_from_original(exceptions.VcsException)
84 except LookupError:
84 except LookupError:
85 raise_from_original(exceptions.LookupException)
85 raise_from_original(exceptions.LookupException)
86 except Exception as e:
86 except Exception as e:
87 if not hasattr(e, '_vcs_kind'):
87 if not hasattr(e, '_vcs_kind'):
88 log.exception("Unhandled exception in hg remote call")
88 log.exception("Unhandled exception in hg remote call")
89 raise_from_original(exceptions.UnhandledException)
89 raise_from_original(exceptions.UnhandledException)
90 raise
90 raise
91 return wrapper
91 return wrapper
92
92
93
93
94 def raise_from_original(new_type):
94 def raise_from_original(new_type):
95 """
95 """
96 Raise a new exception type with original args and traceback.
96 Raise a new exception type with original args and traceback.
97 """
97 """
98 _, original, traceback = sys.exc_info()
98 _, original, traceback = sys.exc_info()
99 try:
99 try:
100 raise new_type(*original.args), None, traceback
100 raise new_type(*original.args), None, traceback
101 finally:
101 finally:
102 del traceback
102 del traceback
103
103
104
104
105 class MercurialFactory(RepoFactory):
105 class MercurialFactory(RepoFactory):
106
106
107 def _create_config(self, config, hooks=True):
107 def _create_config(self, config, hooks=True):
108 if not hooks:
108 if not hooks:
109 hooks_to_clean = frozenset((
109 hooks_to_clean = frozenset((
110 'changegroup.repo_size', 'preoutgoing.pre_pull',
110 'changegroup.repo_size', 'preoutgoing.pre_pull',
111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
112 new_config = []
112 new_config = []
113 for section, option, value in config:
113 for section, option, value in config:
114 if section == 'hooks' and option in hooks_to_clean:
114 if section == 'hooks' and option in hooks_to_clean:
115 continue
115 continue
116 new_config.append((section, option, value))
116 new_config.append((section, option, value))
117 config = new_config
117 config = new_config
118
118
119 baseui = make_ui_from_config(config)
119 baseui = make_ui_from_config(config)
120 return baseui
120 return baseui
121
121
122 def _create_repo(self, wire, create):
122 def _create_repo(self, wire, create):
123 baseui = self._create_config(wire["config"])
123 baseui = self._create_config(wire["config"])
124 return localrepository(baseui, wire["path"], create)
124 return localrepository(baseui, wire["path"], create)
125
125
126
126
127 class HgRemote(object):
127 class HgRemote(object):
128
128
129 def __init__(self, factory):
129 def __init__(self, factory):
130 self._factory = factory
130 self._factory = factory
131
131
132 self._bulk_methods = {
132 self._bulk_methods = {
133 "affected_files": self.ctx_files,
133 "affected_files": self.ctx_files,
134 "author": self.ctx_user,
134 "author": self.ctx_user,
135 "branch": self.ctx_branch,
135 "branch": self.ctx_branch,
136 "children": self.ctx_children,
136 "children": self.ctx_children,
137 "date": self.ctx_date,
137 "date": self.ctx_date,
138 "message": self.ctx_description,
138 "message": self.ctx_description,
139 "parents": self.ctx_parents,
139 "parents": self.ctx_parents,
140 "status": self.ctx_status,
140 "status": self.ctx_status,
141 "_file_paths": self.ctx_list,
141 "_file_paths": self.ctx_list,
142 }
142 }
143
143
144 @reraise_safe_exceptions
144 @reraise_safe_exceptions
145 def discover_hg_version(self):
145 def discover_hg_version(self):
146 from mercurial import util
146 from mercurial import util
147 return util.version()
147 return util.version()
148
148
149 @reraise_safe_exceptions
149 @reraise_safe_exceptions
150 def archive_repo(self, archive_path, mtime, file_info, kind):
150 def archive_repo(self, archive_path, mtime, file_info, kind):
151 if kind == "tgz":
151 if kind == "tgz":
152 archiver = archival.tarit(archive_path, mtime, "gz")
152 archiver = archival.tarit(archive_path, mtime, "gz")
153 elif kind == "tbz2":
153 elif kind == "tbz2":
154 archiver = archival.tarit(archive_path, mtime, "bz2")
154 archiver = archival.tarit(archive_path, mtime, "bz2")
155 elif kind == 'zip':
155 elif kind == 'zip':
156 archiver = archival.zipit(archive_path, mtime)
156 archiver = archival.zipit(archive_path, mtime)
157 else:
157 else:
158 raise exceptions.ArchiveException(
158 raise exceptions.ArchiveException(
159 'Remote does not support: "%s".' % kind)
159 'Remote does not support: "%s".' % kind)
160
160
161 for f_path, f_mode, f_is_link, f_content in file_info:
161 for f_path, f_mode, f_is_link, f_content in file_info:
162 archiver.addfile(f_path, f_mode, f_is_link, f_content)
162 archiver.addfile(f_path, f_mode, f_is_link, f_content)
163 archiver.done()
163 archiver.done()
164
164
165 @reraise_safe_exceptions
165 @reraise_safe_exceptions
166 def bookmarks(self, wire):
166 def bookmarks(self, wire):
167 repo = self._factory.repo(wire)
167 repo = self._factory.repo(wire)
168 return dict(repo._bookmarks)
168 return dict(repo._bookmarks)
169
169
170 @reraise_safe_exceptions
170 @reraise_safe_exceptions
171 def branches(self, wire, normal, closed):
171 def branches(self, wire, normal, closed):
172 repo = self._factory.repo(wire)
172 repo = self._factory.repo(wire)
173 iter_branches = repo.branchmap().iterbranches()
173 iter_branches = repo.branchmap().iterbranches()
174 bt = {}
174 bt = {}
175 for branch_name, _heads, tip, is_closed in iter_branches:
175 for branch_name, _heads, tip, is_closed in iter_branches:
176 if normal and not is_closed:
176 if normal and not is_closed:
177 bt[branch_name] = tip
177 bt[branch_name] = tip
178 if closed and is_closed:
178 if closed and is_closed:
179 bt[branch_name] = tip
179 bt[branch_name] = tip
180
180
181 return bt
181 return bt
182
182
183 @reraise_safe_exceptions
183 @reraise_safe_exceptions
184 def bulk_request(self, wire, rev, pre_load):
184 def bulk_request(self, wire, rev, pre_load):
185 result = {}
185 result = {}
186 for attr in pre_load:
186 for attr in pre_load:
187 try:
187 try:
188 method = self._bulk_methods[attr]
188 method = self._bulk_methods[attr]
189 result[attr] = method(wire, rev)
189 result[attr] = method(wire, rev)
190 except KeyError:
190 except KeyError:
191 raise exceptions.VcsException(
191 raise exceptions.VcsException(
192 'Unknown bulk attribute: "%s"' % attr)
192 'Unknown bulk attribute: "%s"' % attr)
193 return result
193 return result
194
194
195 @reraise_safe_exceptions
195 @reraise_safe_exceptions
196 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
196 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
197 baseui = self._factory._create_config(wire["config"], hooks=hooks)
197 baseui = self._factory._create_config(wire["config"], hooks=hooks)
198 clone(baseui, source, dest, noupdate=not update_after_clone)
198 clone(baseui, source, dest, noupdate=not update_after_clone)
199
199
200 @reraise_safe_exceptions
200 @reraise_safe_exceptions
201 def commitctx(
201 def commitctx(
202 self, wire, message, parents, commit_time, commit_timezone,
202 self, wire, message, parents, commit_time, commit_timezone,
203 user, files, extra, removed, updated):
203 user, files, extra, removed, updated):
204
204
205 def _filectxfn(_repo, memctx, path):
205 def _filectxfn(_repo, memctx, path):
206 """
206 """
207 Marks given path as added/changed/removed in a given _repo. This is
207 Marks given path as added/changed/removed in a given _repo. This is
208 for internal mercurial commit function.
208 for internal mercurial commit function.
209 """
209 """
210
210
211 # check if this path is removed
211 # check if this path is removed
212 if path in removed:
212 if path in removed:
213 # returning None is a way to mark node for removal
213 # returning None is a way to mark node for removal
214 return None
214 return None
215
215
216 # check if this path is added
216 # check if this path is added
217 for node in updated:
217 for node in updated:
218 if node['path'] == path:
218 if node['path'] == path:
219 return memfilectx(
219 return memfilectx(
220 _repo,
220 _repo,
221 path=node['path'],
221 path=node['path'],
222 data=node['content'],
222 data=node['content'],
223 islink=False,
223 islink=False,
224 isexec=bool(node['mode'] & stat.S_IXUSR),
224 isexec=bool(node['mode'] & stat.S_IXUSR),
225 copied=False,
225 copied=False,
226 memctx=memctx)
226 memctx=memctx)
227
227
228 raise exceptions.AbortException(
228 raise exceptions.AbortException(
229 "Given path haven't been marked as added, "
229 "Given path haven't been marked as added, "
230 "changed or removed (%s)" % path)
230 "changed or removed (%s)" % path)
231
231
232 repo = self._factory.repo(wire)
232 repo = self._factory.repo(wire)
233
233
234 commit_ctx = memctx(
234 commit_ctx = memctx(
235 repo=repo,
235 repo=repo,
236 parents=parents,
236 parents=parents,
237 text=message,
237 text=message,
238 files=files,
238 files=files,
239 filectxfn=_filectxfn,
239 filectxfn=_filectxfn,
240 user=user,
240 user=user,
241 date=(commit_time, commit_timezone),
241 date=(commit_time, commit_timezone),
242 extra=extra)
242 extra=extra)
243
243
244 n = repo.commitctx(commit_ctx)
244 n = repo.commitctx(commit_ctx)
245 new_id = hex(n)
245 new_id = hex(n)
246
246
247 return new_id
247 return new_id
248
248
249 @reraise_safe_exceptions
249 @reraise_safe_exceptions
250 def ctx_branch(self, wire, revision):
250 def ctx_branch(self, wire, revision):
251 repo = self._factory.repo(wire)
251 repo = self._factory.repo(wire)
252 ctx = repo[revision]
252 ctx = repo[revision]
253 return ctx.branch()
253 return ctx.branch()
254
254
255 @reraise_safe_exceptions
255 @reraise_safe_exceptions
256 def ctx_children(self, wire, revision):
256 def ctx_children(self, wire, revision):
257 repo = self._factory.repo(wire)
257 repo = self._factory.repo(wire)
258 ctx = repo[revision]
258 ctx = repo[revision]
259 return [child.rev() for child in ctx.children()]
259 return [child.rev() for child in ctx.children()]
260
260
261 @reraise_safe_exceptions
261 @reraise_safe_exceptions
262 def ctx_date(self, wire, revision):
262 def ctx_date(self, wire, revision):
263 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
264 ctx = repo[revision]
264 ctx = repo[revision]
265 return ctx.date()
265 return ctx.date()
266
266
267 @reraise_safe_exceptions
267 @reraise_safe_exceptions
268 def ctx_description(self, wire, revision):
268 def ctx_description(self, wire, revision):
269 repo = self._factory.repo(wire)
269 repo = self._factory.repo(wire)
270 ctx = repo[revision]
270 ctx = repo[revision]
271 return ctx.description()
271 return ctx.description()
272
272
273 @reraise_safe_exceptions
273 @reraise_safe_exceptions
274 def ctx_diff(
274 def ctx_diff(
275 self, wire, revision, git=True, ignore_whitespace=True, context=3):
275 self, wire, revision, git=True, ignore_whitespace=True, context=3):
276 repo = self._factory.repo(wire)
276 repo = self._factory.repo(wire)
277 ctx = repo[revision]
277 ctx = repo[revision]
278 result = ctx.diff(
278 result = ctx.diff(
279 git=git, ignore_whitespace=ignore_whitespace, context=context)
279 git=git, ignore_whitespace=ignore_whitespace, context=context)
280 return list(result)
280 return list(result)
281
281
282 @reraise_safe_exceptions
282 @reraise_safe_exceptions
283 def ctx_files(self, wire, revision):
283 def ctx_files(self, wire, revision):
284 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
285 ctx = repo[revision]
285 ctx = repo[revision]
286 return ctx.files()
286 return ctx.files()
287
287
288 @reraise_safe_exceptions
288 @reraise_safe_exceptions
289 def ctx_list(self, path, revision):
289 def ctx_list(self, path, revision):
290 repo = self._factory.repo(path)
290 repo = self._factory.repo(path)
291 ctx = repo[revision]
291 ctx = repo[revision]
292 return list(ctx)
292 return list(ctx)
293
293
294 @reraise_safe_exceptions
294 @reraise_safe_exceptions
295 def ctx_parents(self, wire, revision):
295 def ctx_parents(self, wire, revision):
296 repo = self._factory.repo(wire)
296 repo = self._factory.repo(wire)
297 ctx = repo[revision]
297 ctx = repo[revision]
298 return [parent.rev() for parent in ctx.parents()]
298 return [parent.rev() for parent in ctx.parents()]
299
299
300 @reraise_safe_exceptions
300 @reraise_safe_exceptions
301 def ctx_substate(self, wire, revision):
301 def ctx_substate(self, wire, revision):
302 repo = self._factory.repo(wire)
302 repo = self._factory.repo(wire)
303 ctx = repo[revision]
303 ctx = repo[revision]
304 return ctx.substate
304 return ctx.substate
305
305
306 @reraise_safe_exceptions
306 @reraise_safe_exceptions
307 def ctx_status(self, wire, revision):
307 def ctx_status(self, wire, revision):
308 repo = self._factory.repo(wire)
308 repo = self._factory.repo(wire)
309 ctx = repo[revision]
309 ctx = repo[revision]
310 status = repo[ctx.p1().node()].status(other=ctx.node())
310 status = repo[ctx.p1().node()].status(other=ctx.node())
311 # object of status (odd, custom named tuple in mercurial) is not
311 # object of status (odd, custom named tuple in mercurial) is not
312 # correctly serializable via Pyro, we make it a list, as the underling
312 # correctly serializable via Pyro, we make it a list, as the underling
313 # API expects this to be a list
313 # API expects this to be a list
314 return list(status)
314 return list(status)
315
315
316 @reraise_safe_exceptions
316 @reraise_safe_exceptions
317 def ctx_user(self, wire, revision):
317 def ctx_user(self, wire, revision):
318 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
319 ctx = repo[revision]
319 ctx = repo[revision]
320 return ctx.user()
320 return ctx.user()
321
321
322 @reraise_safe_exceptions
322 @reraise_safe_exceptions
323 def check_url(self, url, config):
323 def check_url(self, url, config):
324 _proto = None
324 _proto = None
325 if '+' in url[:url.find('://')]:
325 if '+' in url[:url.find('://')]:
326 _proto = url[0:url.find('+')]
326 _proto = url[0:url.find('+')]
327 url = url[url.find('+') + 1:]
327 url = url[url.find('+') + 1:]
328 handlers = []
328 handlers = []
329 url_obj = url_parser(url)
329 url_obj = url_parser(url)
330 test_uri, authinfo = url_obj.authinfo()
330 test_uri, authinfo = url_obj.authinfo()
331 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
331 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
332 url_obj.query = obfuscate_qs(url_obj.query)
332 url_obj.query = obfuscate_qs(url_obj.query)
333
333
334 cleaned_uri = str(url_obj)
334 cleaned_uri = str(url_obj)
335 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
335 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
336
336
337 if authinfo:
337 if authinfo:
338 # create a password manager
338 # create a password manager
339 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
339 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
340 passmgr.add_password(*authinfo)
340 passmgr.add_password(*authinfo)
341
341
342 handlers.extend((httpbasicauthhandler(passmgr),
342 handlers.extend((httpbasicauthhandler(passmgr),
343 httpdigestauthhandler(passmgr)))
343 httpdigestauthhandler(passmgr)))
344
344
345 o = urllib2.build_opener(*handlers)
345 o = urllib2.build_opener(*handlers)
346 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
346 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
347 ('Accept', 'application/mercurial-0.1')]
347 ('Accept', 'application/mercurial-0.1')]
348
348
349 q = {"cmd": 'between'}
349 q = {"cmd": 'between'}
350 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
350 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
351 qs = '?%s' % urllib.urlencode(q)
351 qs = '?%s' % urllib.urlencode(q)
352 cu = "%s%s" % (test_uri, qs)
352 cu = "%s%s" % (test_uri, qs)
353 req = urllib2.Request(cu, None, {})
353 req = urllib2.Request(cu, None, {})
354
354
355 try:
355 try:
356 log.debug("Trying to open URL %s", cleaned_uri)
356 log.debug("Trying to open URL %s", cleaned_uri)
357 resp = o.open(req)
357 resp = o.open(req)
358 if resp.code != 200:
358 if resp.code != 200:
359 raise exceptions.URLError('Return Code is not 200')
359 raise exceptions.URLError('Return Code is not 200')
360 except Exception as e:
360 except Exception as e:
361 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
361 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
362 # means it cannot be cloned
362 # means it cannot be cloned
363 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
363 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
364
364
365 # now check if it's a proper hg repo, but don't do it for svn
365 # now check if it's a proper hg repo, but don't do it for svn
366 try:
366 try:
367 if _proto == 'svn':
367 if _proto == 'svn':
368 pass
368 pass
369 else:
369 else:
370 # check for pure hg repos
370 # check for pure hg repos
371 log.debug(
371 log.debug(
372 "Verifying if URL is a Mercurial repository: %s",
372 "Verifying if URL is a Mercurial repository: %s",
373 cleaned_uri)
373 cleaned_uri)
374 httppeer(make_ui_from_config(config), url).lookup('tip')
374 httppeer(make_ui_from_config(config), url).lookup('tip')
375 except Exception as e:
375 except Exception as e:
376 log.warning("URL is not a valid Mercurial repository: %s",
376 log.warning("URL is not a valid Mercurial repository: %s",
377 cleaned_uri)
377 cleaned_uri)
378 raise exceptions.URLError(
378 raise exceptions.URLError(
379 "url [%s] does not look like an hg repo org_exc: %s"
379 "url [%s] does not look like an hg repo org_exc: %s"
380 % (cleaned_uri, e))
380 % (cleaned_uri, e))
381
381
382 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
382 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
383 return True
383 return True
384
384
385 @reraise_safe_exceptions
385 @reraise_safe_exceptions
386 def diff(
386 def diff(
387 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
387 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
388 context):
388 context):
389 repo = self._factory.repo(wire)
389 repo = self._factory.repo(wire)
390
390
391 if file_filter:
391 if file_filter:
392 filter = match(file_filter[0], '', [file_filter[1]])
392 match_filter = match(file_filter[0], '', [file_filter[1]])
393 else:
393 else:
394 filter = file_filter
394 match_filter = file_filter
395 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
395 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
396
396
397 try:
397 try:
398 return "".join(patch.diff(
398 return "".join(patch.diff(
399 repo, node1=rev1, node2=rev2, match=filter, opts=opts))
399 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
400 except RepoLookupError:
400 except RepoLookupError:
401 raise exceptions.LookupException()
401 raise exceptions.LookupException()
402
402
403 @reraise_safe_exceptions
403 @reraise_safe_exceptions
404 def file_history(self, wire, revision, path, limit):
404 def file_history(self, wire, revision, path, limit):
405 repo = self._factory.repo(wire)
405 repo = self._factory.repo(wire)
406
406
407 ctx = repo[revision]
407 ctx = repo[revision]
408 fctx = ctx.filectx(path)
408 fctx = ctx.filectx(path)
409
409
410 def history_iter():
410 def history_iter():
411 limit_rev = fctx.rev()
411 limit_rev = fctx.rev()
412 for obj in reversed(list(fctx.filelog())):
412 for obj in reversed(list(fctx.filelog())):
413 obj = fctx.filectx(obj)
413 obj = fctx.filectx(obj)
414 if limit_rev >= obj.rev():
414 if limit_rev >= obj.rev():
415 yield obj
415 yield obj
416
416
417 history = []
417 history = []
418 for cnt, obj in enumerate(history_iter()):
418 for cnt, obj in enumerate(history_iter()):
419 if limit and cnt >= limit:
419 if limit and cnt >= limit:
420 break
420 break
421 history.append(hex(obj.node()))
421 history.append(hex(obj.node()))
422
422
423 return [x for x in history]
423 return [x for x in history]
424
424
425 @reraise_safe_exceptions
425 @reraise_safe_exceptions
426 def file_history_untill(self, wire, revision, path, limit):
426 def file_history_untill(self, wire, revision, path, limit):
427 repo = self._factory.repo(wire)
427 repo = self._factory.repo(wire)
428 ctx = repo[revision]
428 ctx = repo[revision]
429 fctx = ctx.filectx(path)
429 fctx = ctx.filectx(path)
430
430
431 file_log = list(fctx.filelog())
431 file_log = list(fctx.filelog())
432 if limit:
432 if limit:
433 # Limit to the last n items
433 # Limit to the last n items
434 file_log = file_log[-limit:]
434 file_log = file_log[-limit:]
435
435
436 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
436 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
437
437
438 @reraise_safe_exceptions
438 @reraise_safe_exceptions
439 def fctx_annotate(self, wire, revision, path):
439 def fctx_annotate(self, wire, revision, path):
440 repo = self._factory.repo(wire)
440 repo = self._factory.repo(wire)
441 ctx = repo[revision]
441 ctx = repo[revision]
442 fctx = ctx.filectx(path)
442 fctx = ctx.filectx(path)
443
443
444 result = []
444 result = []
445 for i, annotate_data in enumerate(fctx.annotate()):
445 for i, annotate_data in enumerate(fctx.annotate()):
446 ln_no = i + 1
446 ln_no = i + 1
447 node_info, content = annotate_data
447 node_info, content = annotate_data
448 sha = hex(node_info[0].node())
448 sha = hex(node_info[0].node())
449 result.append((ln_no, sha, content))
449 result.append((ln_no, sha, content))
450 return result
450 return result
451
451
452 @reraise_safe_exceptions
452 @reraise_safe_exceptions
453 def fctx_data(self, wire, revision, path):
453 def fctx_data(self, wire, revision, path):
454 repo = self._factory.repo(wire)
454 repo = self._factory.repo(wire)
455 ctx = repo[revision]
455 ctx = repo[revision]
456 fctx = ctx.filectx(path)
456 fctx = ctx.filectx(path)
457 return fctx.data()
457 return fctx.data()
458
458
459 @reraise_safe_exceptions
459 @reraise_safe_exceptions
460 def fctx_flags(self, wire, revision, path):
460 def fctx_flags(self, wire, revision, path):
461 repo = self._factory.repo(wire)
461 repo = self._factory.repo(wire)
462 ctx = repo[revision]
462 ctx = repo[revision]
463 fctx = ctx.filectx(path)
463 fctx = ctx.filectx(path)
464 return fctx.flags()
464 return fctx.flags()
465
465
466 @reraise_safe_exceptions
466 @reraise_safe_exceptions
467 def fctx_size(self, wire, revision, path):
467 def fctx_size(self, wire, revision, path):
468 repo = self._factory.repo(wire)
468 repo = self._factory.repo(wire)
469 ctx = repo[revision]
469 ctx = repo[revision]
470 fctx = ctx.filectx(path)
470 fctx = ctx.filectx(path)
471 return fctx.size()
471 return fctx.size()
472
472
473 @reraise_safe_exceptions
473 @reraise_safe_exceptions
474 def get_all_commit_ids(self, wire, name):
474 def get_all_commit_ids(self, wire, name):
475 repo = self._factory.repo(wire)
475 repo = self._factory.repo(wire)
476 revs = repo.filtered(name).changelog.index
476 revs = repo.filtered(name).changelog.index
477 return map(lambda x: hex(x[7]), revs)[:-1]
477 return map(lambda x: hex(x[7]), revs)[:-1]
478
478
479 @reraise_safe_exceptions
479 @reraise_safe_exceptions
480 def get_config_value(self, wire, section, name, untrusted=False):
480 def get_config_value(self, wire, section, name, untrusted=False):
481 repo = self._factory.repo(wire)
481 repo = self._factory.repo(wire)
482 return repo.ui.config(section, name, untrusted=untrusted)
482 return repo.ui.config(section, name, untrusted=untrusted)
483
483
484 @reraise_safe_exceptions
484 @reraise_safe_exceptions
485 def get_config_bool(self, wire, section, name, untrusted=False):
485 def get_config_bool(self, wire, section, name, untrusted=False):
486 repo = self._factory.repo(wire)
486 repo = self._factory.repo(wire)
487 return repo.ui.configbool(section, name, untrusted=untrusted)
487 return repo.ui.configbool(section, name, untrusted=untrusted)
488
488
489 @reraise_safe_exceptions
489 @reraise_safe_exceptions
490 def get_config_list(self, wire, section, name, untrusted=False):
490 def get_config_list(self, wire, section, name, untrusted=False):
491 repo = self._factory.repo(wire)
491 repo = self._factory.repo(wire)
492 return repo.ui.configlist(section, name, untrusted=untrusted)
492 return repo.ui.configlist(section, name, untrusted=untrusted)
493
493
494 @reraise_safe_exceptions
494 @reraise_safe_exceptions
495 def is_large_file(self, wire, path):
495 def is_large_file(self, wire, path):
496 return largefiles.lfutil.isstandin(path)
496 return largefiles.lfutil.isstandin(path)
497
497
498 @reraise_safe_exceptions
498 @reraise_safe_exceptions
499 def in_store(self, wire, sha):
499 def in_store(self, wire, sha):
500 repo = self._factory.repo(wire)
500 repo = self._factory.repo(wire)
501 return largefiles.lfutil.instore(repo, sha)
501 return largefiles.lfutil.instore(repo, sha)
502
502
503 @reraise_safe_exceptions
503 @reraise_safe_exceptions
504 def in_user_cache(self, wire, sha):
504 def in_user_cache(self, wire, sha):
505 repo = self._factory.repo(wire)
505 repo = self._factory.repo(wire)
506 return largefiles.lfutil.inusercache(repo.ui, sha)
506 return largefiles.lfutil.inusercache(repo.ui, sha)
507
507
508 @reraise_safe_exceptions
508 @reraise_safe_exceptions
509 def store_path(self, wire, sha):
509 def store_path(self, wire, sha):
510 repo = self._factory.repo(wire)
510 repo = self._factory.repo(wire)
511 return largefiles.lfutil.storepath(repo, sha)
511 return largefiles.lfutil.storepath(repo, sha)
512
512
513 @reraise_safe_exceptions
513 @reraise_safe_exceptions
514 def link(self, wire, sha, path):
514 def link(self, wire, sha, path):
515 repo = self._factory.repo(wire)
515 repo = self._factory.repo(wire)
516 largefiles.lfutil.link(
516 largefiles.lfutil.link(
517 largefiles.lfutil.usercachepath(repo.ui, sha), path)
517 largefiles.lfutil.usercachepath(repo.ui, sha), path)
518
518
519 @reraise_safe_exceptions
519 @reraise_safe_exceptions
520 def localrepository(self, wire, create=False):
520 def localrepository(self, wire, create=False):
521 self._factory.repo(wire, create=create)
521 self._factory.repo(wire, create=create)
522
522
523 @reraise_safe_exceptions
523 @reraise_safe_exceptions
524 def lookup(self, wire, revision, both):
524 def lookup(self, wire, revision, both):
525 # TODO Paris: Ugly hack to "deserialize" long for msgpack
525 # TODO Paris: Ugly hack to "deserialize" long for msgpack
526 if isinstance(revision, float):
526 if isinstance(revision, float):
527 revision = long(revision)
527 revision = long(revision)
528 repo = self._factory.repo(wire)
528 repo = self._factory.repo(wire)
529 try:
529 try:
530 ctx = repo[revision]
530 ctx = repo[revision]
531 except RepoLookupError:
531 except RepoLookupError:
532 raise exceptions.LookupException(revision)
532 raise exceptions.LookupException(revision)
533 except LookupError as e:
533 except LookupError as e:
534 raise exceptions.LookupException(e.name)
534 raise exceptions.LookupException(e.name)
535
535
536 if not both:
536 if not both:
537 return ctx.hex()
537 return ctx.hex()
538
538
539 ctx = repo[ctx.hex()]
539 ctx = repo[ctx.hex()]
540 return ctx.hex(), ctx.rev()
540 return ctx.hex(), ctx.rev()
541
541
542 @reraise_safe_exceptions
542 @reraise_safe_exceptions
543 def pull(self, wire, url, commit_ids=None):
543 def pull(self, wire, url, commit_ids=None):
544 repo = self._factory.repo(wire)
544 repo = self._factory.repo(wire)
545 remote = peer(repo, {}, url)
545 remote = peer(repo, {}, url)
546 if commit_ids:
546 if commit_ids:
547 commit_ids = [bin(commit_id) for commit_id in commit_ids]
547 commit_ids = [bin(commit_id) for commit_id in commit_ids]
548
548
549 return exchange.pull(
549 return exchange.pull(
550 repo, remote, heads=commit_ids, force=None).cgresult
550 repo, remote, heads=commit_ids, force=None).cgresult
551
551
552 @reraise_safe_exceptions
552 @reraise_safe_exceptions
553 def revision(self, wire, rev):
553 def revision(self, wire, rev):
554 repo = self._factory.repo(wire)
554 repo = self._factory.repo(wire)
555 ctx = repo[rev]
555 ctx = repo[rev]
556 return ctx.rev()
556 return ctx.rev()
557
557
558 @reraise_safe_exceptions
558 @reraise_safe_exceptions
559 def rev_range(self, wire, filter):
559 def rev_range(self, wire, filter):
560 repo = self._factory.repo(wire)
560 repo = self._factory.repo(wire)
561 revisions = [rev for rev in revrange(repo, filter)]
561 revisions = [rev for rev in revrange(repo, filter)]
562 return revisions
562 return revisions
563
563
564 @reraise_safe_exceptions
564 @reraise_safe_exceptions
565 def rev_range_hash(self, wire, node):
565 def rev_range_hash(self, wire, node):
566 repo = self._factory.repo(wire)
566 repo = self._factory.repo(wire)
567
567
568 def get_revs(repo, rev_opt):
568 def get_revs(repo, rev_opt):
569 if rev_opt:
569 if rev_opt:
570 revs = revrange(repo, rev_opt)
570 revs = revrange(repo, rev_opt)
571 if len(revs) == 0:
571 if len(revs) == 0:
572 return (nullrev, nullrev)
572 return (nullrev, nullrev)
573 return max(revs), min(revs)
573 return max(revs), min(revs)
574 else:
574 else:
575 return len(repo) - 1, 0
575 return len(repo) - 1, 0
576
576
577 stop, start = get_revs(repo, [node + ':'])
577 stop, start = get_revs(repo, [node + ':'])
578 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
578 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
579 return revs
579 return revs
580
580
581 @reraise_safe_exceptions
581 @reraise_safe_exceptions
582 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
582 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
583 other_path = kwargs.pop('other_path', None)
583 other_path = kwargs.pop('other_path', None)
584
584
585 # case when we want to compare two independent repositories
585 # case when we want to compare two independent repositories
586 if other_path and other_path != wire["path"]:
586 if other_path and other_path != wire["path"]:
587 baseui = self._factory._create_config(wire["config"])
587 baseui = self._factory._create_config(wire["config"])
588 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
588 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
589 else:
589 else:
590 repo = self._factory.repo(wire)
590 repo = self._factory.repo(wire)
591 return list(repo.revs(rev_spec, *args))
591 return list(repo.revs(rev_spec, *args))
592
592
593 @reraise_safe_exceptions
593 @reraise_safe_exceptions
594 def strip(self, wire, revision, update, backup):
594 def strip(self, wire, revision, update, backup):
595 repo = self._factory.repo(wire)
595 repo = self._factory.repo(wire)
596 ctx = repo[revision]
596 ctx = repo[revision]
597 hgext_strip(
597 hgext_strip(
598 repo.baseui, repo, ctx.node(), update=update, backup=backup)
598 repo.baseui, repo, ctx.node(), update=update, backup=backup)
599
599
600 @reraise_safe_exceptions
600 @reraise_safe_exceptions
601 def tag(self, wire, name, revision, message, local, user,
601 def tag(self, wire, name, revision, message, local, user,
602 tag_time, tag_timezone):
602 tag_time, tag_timezone):
603 repo = self._factory.repo(wire)
603 repo = self._factory.repo(wire)
604 ctx = repo[revision]
604 ctx = repo[revision]
605 node = ctx.node()
605 node = ctx.node()
606
606
607 date = (tag_time, tag_timezone)
607 date = (tag_time, tag_timezone)
608 try:
608 try:
609 repo.tag(name, node, message, local, user, date)
609 repo.tag(name, node, message, local, user, date)
610 except Abort as e:
610 except Abort as e:
611 log.exception("Tag operation aborted")
611 log.exception("Tag operation aborted")
612 # Exception can contain unicode which we convert
612 # Exception can contain unicode which we convert
613 raise exceptions.AbortException(repr(e))
613 raise exceptions.AbortException(repr(e))
614
614
615 @reraise_safe_exceptions
615 @reraise_safe_exceptions
616 def tags(self, wire):
616 def tags(self, wire):
617 repo = self._factory.repo(wire)
617 repo = self._factory.repo(wire)
618 return repo.tags()
618 return repo.tags()
619
619
620 @reraise_safe_exceptions
620 @reraise_safe_exceptions
621 def update(self, wire, node=None, clean=False):
621 def update(self, wire, node=None, clean=False):
622 repo = self._factory.repo(wire)
622 repo = self._factory.repo(wire)
623 baseui = self._factory._create_config(wire['config'])
623 baseui = self._factory._create_config(wire['config'])
624 commands.update(baseui, repo, node=node, clean=clean)
624 commands.update(baseui, repo, node=node, clean=clean)
625
625
626 @reraise_safe_exceptions
626 @reraise_safe_exceptions
627 def identify(self, wire):
627 def identify(self, wire):
628 repo = self._factory.repo(wire)
628 repo = self._factory.repo(wire)
629 baseui = self._factory._create_config(wire['config'])
629 baseui = self._factory._create_config(wire['config'])
630 output = io.BytesIO()
630 output = io.BytesIO()
631 baseui.write = output.write
631 baseui.write = output.write
632 # This is required to get a full node id
632 # This is required to get a full node id
633 baseui.debugflag = True
633 baseui.debugflag = True
634 commands.identify(baseui, repo, id=True)
634 commands.identify(baseui, repo, id=True)
635
635
636 return output.getvalue()
636 return output.getvalue()
637
637
638 @reraise_safe_exceptions
638 @reraise_safe_exceptions
639 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
639 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
640 hooks=True):
640 hooks=True):
641 repo = self._factory.repo(wire)
641 repo = self._factory.repo(wire)
642 baseui = self._factory._create_config(wire['config'], hooks=hooks)
642 baseui = self._factory._create_config(wire['config'], hooks=hooks)
643
643
644 # Mercurial internally has a lot of logic that checks ONLY if
644 # Mercurial internally has a lot of logic that checks ONLY if
645 # option is defined, we just pass those if they are defined then
645 # option is defined, we just pass those if they are defined then
646 opts = {}
646 opts = {}
647 if bookmark:
647 if bookmark:
648 opts['bookmark'] = bookmark
648 opts['bookmark'] = bookmark
649 if branch:
649 if branch:
650 opts['branch'] = branch
650 opts['branch'] = branch
651 if revision:
651 if revision:
652 opts['rev'] = revision
652 opts['rev'] = revision
653
653
654 commands.pull(baseui, repo, source, **opts)
654 commands.pull(baseui, repo, source, **opts)
655
655
656 @reraise_safe_exceptions
656 @reraise_safe_exceptions
657 def heads(self, wire, branch=None):
657 def heads(self, wire, branch=None):
658 repo = self._factory.repo(wire)
658 repo = self._factory.repo(wire)
659 baseui = self._factory._create_config(wire['config'])
659 baseui = self._factory._create_config(wire['config'])
660 output = io.BytesIO()
660 output = io.BytesIO()
661
661
662 def write(data, **unused_kwargs):
662 def write(data, **unused_kwargs):
663 output.write(data)
663 output.write(data)
664
664
665 baseui.write = write
665 baseui.write = write
666 if branch:
666 if branch:
667 args = [branch]
667 args = [branch]
668 else:
668 else:
669 args = []
669 args = []
670 commands.heads(baseui, repo, template='{node} ', *args)
670 commands.heads(baseui, repo, template='{node} ', *args)
671
671
672 return output.getvalue()
672 return output.getvalue()
673
673
674 @reraise_safe_exceptions
674 @reraise_safe_exceptions
675 def ancestor(self, wire, revision1, revision2):
675 def ancestor(self, wire, revision1, revision2):
676 repo = self._factory.repo(wire)
676 repo = self._factory.repo(wire)
677 baseui = self._factory._create_config(wire['config'])
677 baseui = self._factory._create_config(wire['config'])
678 output = io.BytesIO()
678 output = io.BytesIO()
679 baseui.write = output.write
679 baseui.write = output.write
680 commands.debugancestor(baseui, repo, revision1, revision2)
680 commands.debugancestor(baseui, repo, revision1, revision2)
681
681
682 return output.getvalue()
682 return output.getvalue()
683
683
684 @reraise_safe_exceptions
684 @reraise_safe_exceptions
685 def push(self, wire, revisions, dest_path, hooks=True,
685 def push(self, wire, revisions, dest_path, hooks=True,
686 push_branches=False):
686 push_branches=False):
687 repo = self._factory.repo(wire)
687 repo = self._factory.repo(wire)
688 baseui = self._factory._create_config(wire['config'], hooks=hooks)
688 baseui = self._factory._create_config(wire['config'], hooks=hooks)
689 commands.push(baseui, repo, dest=dest_path, rev=revisions,
689 commands.push(baseui, repo, dest=dest_path, rev=revisions,
690 new_branch=push_branches)
690 new_branch=push_branches)
691
691
692 @reraise_safe_exceptions
692 @reraise_safe_exceptions
693 def merge(self, wire, revision):
693 def merge(self, wire, revision):
694 repo = self._factory.repo(wire)
694 repo = self._factory.repo(wire)
695 baseui = self._factory._create_config(wire['config'])
695 baseui = self._factory._create_config(wire['config'])
696 repo.ui.setconfig('ui', 'merge', 'internal:dump')
696 repo.ui.setconfig('ui', 'merge', 'internal:dump')
697
697
698 # In case of sub repositories are used mercurial prompts the user in
698 # In case of sub repositories are used mercurial prompts the user in
699 # case of merge conflicts or different sub repository sources. By
699 # case of merge conflicts or different sub repository sources. By
700 # setting the interactive flag to `False` mercurial doesn't prompt the
700 # setting the interactive flag to `False` mercurial doesn't prompt the
701 # used but instead uses a default value.
701 # used but instead uses a default value.
702 repo.ui.setconfig('ui', 'interactive', False)
702 repo.ui.setconfig('ui', 'interactive', False)
703
703
704 commands.merge(baseui, repo, rev=revision)
704 commands.merge(baseui, repo, rev=revision)
705
705
706 @reraise_safe_exceptions
706 @reraise_safe_exceptions
707 def commit(self, wire, message, username):
707 def commit(self, wire, message, username):
708 repo = self._factory.repo(wire)
708 repo = self._factory.repo(wire)
709 baseui = self._factory._create_config(wire['config'])
709 baseui = self._factory._create_config(wire['config'])
710 repo.ui.setconfig('ui', 'username', username)
710 repo.ui.setconfig('ui', 'username', username)
711 commands.commit(baseui, repo, message=message)
711 commands.commit(baseui, repo, message=message)
712
712
713 @reraise_safe_exceptions
713 @reraise_safe_exceptions
714 def rebase(self, wire, source=None, dest=None, abort=False):
714 def rebase(self, wire, source=None, dest=None, abort=False):
715 repo = self._factory.repo(wire)
715 repo = self._factory.repo(wire)
716 baseui = self._factory._create_config(wire['config'])
716 baseui = self._factory._create_config(wire['config'])
717 repo.ui.setconfig('ui', 'merge', 'internal:dump')
717 repo.ui.setconfig('ui', 'merge', 'internal:dump')
718 rebase.rebase(
718 rebase.rebase(
719 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
719 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
720
720
721 @reraise_safe_exceptions
721 @reraise_safe_exceptions
722 def bookmark(self, wire, bookmark, revision=None):
722 def bookmark(self, wire, bookmark, revision=None):
723 repo = self._factory.repo(wire)
723 repo = self._factory.repo(wire)
724 baseui = self._factory._create_config(wire['config'])
724 baseui = self._factory._create_config(wire['config'])
725 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
725 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,62 +1,62 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 import mercurial.demandimport
23 import mercurial.demandimport
24 # patch demandimport, due to bug in mercurial when it always triggers
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
25 # demandimport.enable()
26 mercurial.demandimport.enable = lambda *args, **kwargs: 1
26 mercurial.demandimport.enable = lambda *args, **kwargs: 1
27
27
28 from mercurial import ui
28 from mercurial import ui
29 from mercurial import patch
29 from mercurial import patch
30 from mercurial import config
30 from mercurial import config
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial import scmutil
32 from mercurial import scmutil
33 from mercurial import archival
33 from mercurial import archival
34 from mercurial import discovery
34 from mercurial import discovery
35 from mercurial import unionrepo
35 from mercurial import unionrepo
36 from mercurial import localrepo
36 from mercurial import localrepo
37 from mercurial import merge as hg_merge
37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38 from mercurial import subrepo
39
39
40 from mercurial.commands import clone, nullid, pull
40 from mercurial.commands import clone, nullid, pull
41 from mercurial.context import memctx, memfilectx
41 from mercurial.context import memctx, memfilectx
42 from mercurial.error import (
42 from mercurial.error import (
43 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
43 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
44 RequirementError)
44 RequirementError)
45 from mercurial.hgweb import hgweb_mod
45 from mercurial.hgweb import hgweb_mod
46 from mercurial.localrepo import localrepository
46 from mercurial.localrepo import localrepository
47 from mercurial.match import match
47 from mercurial.match import match
48 from mercurial.mdiff import diffopts
48 from mercurial.mdiff import diffopts
49 from mercurial.node import bin, hex
49 from mercurial.node import bin, hex
50 from mercurial.encoding import tolocal
50 from mercurial.encoding import tolocal
51 from mercurial.discovery import findcommonoutgoing
51 from mercurial.discovery import findcommonoutgoing
52 from mercurial.hg import peer
52 from mercurial.hg import peer
53 from mercurial.httppeer import httppeer
53 from mercurial.httppeer import httppeer
54 from mercurial.util import url as hg_url
54 from mercurial.util import url as hg_url
55 from mercurial.scmutil import revrange
55 from mercurial.scmutil import revrange
56 from mercurial.node import nullrev
56 from mercurial.node import nullrev
57 from mercurial import exchange
57 from mercurial import exchange
58 from hgext import largefiles
58 from hgext import largefiles
59
59
60 # those authnadlers are patched for python 2.6.5 bug an
60 # those authnadlers are patched for python 2.6.5 bug an
61 # infinit looping when given invalid resources
61 # infinit looping when given invalid resources
62 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
62 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
@@ -1,134 +1,134 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Adjustments to Mercurial
19 Adjustments to Mercurial
20
20
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 be applied without having to import the whole Mercurial machinery.
22 be applied without having to import the whole Mercurial machinery.
23
23
24 Imports are function local, so that just importing this module does not cause
24 Imports are function local, so that just importing this module does not cause
25 side-effects other than these functions being defined.
25 side-effects other than these functions being defined.
26 """
26 """
27
27
28 import logging
28 import logging
29
29
30
30
31 def patch_largefiles_capabilities():
31 def patch_largefiles_capabilities():
32 """
32 """
33 Patches the capabilities function in the largefiles extension.
33 Patches the capabilities function in the largefiles extension.
34 """
34 """
35 from vcsserver import hgcompat
35 from vcsserver import hgcompat
36 lfproto = hgcompat.largefiles.proto
36 lfproto = hgcompat.largefiles.proto
37 wrapper = _dynamic_capabilities_wrapper(
37 wrapper = _dynamic_capabilities_wrapper(
38 lfproto, hgcompat.extensions.extensions)
38 lfproto, hgcompat.extensions.extensions)
39 lfproto.capabilities = wrapper
39 lfproto.capabilities = wrapper
40
40
41
41
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43
43
44 wrapped_capabilities = lfproto.capabilities
44 wrapped_capabilities = lfproto.capabilities
45 logger = logging.getLogger('vcsserver.hg')
45 logger = logging.getLogger('vcsserver.hg')
46
46
47 def _dynamic_capabilities(repo, proto):
47 def _dynamic_capabilities(repo, proto):
48 """
48 """
49 Adds dynamic behavior, so that the capability is only added if the
49 Adds dynamic behavior, so that the capability is only added if the
50 extension is enabled in the current ui object.
50 extension is enabled in the current ui object.
51 """
51 """
52 if 'largefiles' in dict(extensions(repo.ui)):
52 if 'largefiles' in dict(extensions(repo.ui)):
53 logger.debug('Extension largefiles enabled')
53 logger.debug('Extension largefiles enabled')
54 calc_capabilities = wrapped_capabilities
54 calc_capabilities = wrapped_capabilities
55 else:
55 else:
56 logger.debug('Extension largefiles disabled')
56 logger.debug('Extension largefiles disabled')
57 calc_capabilities = lfproto.capabilitiesorig
57 calc_capabilities = lfproto.capabilitiesorig
58 return calc_capabilities(repo, proto)
58 return calc_capabilities(repo, proto)
59
59
60 return _dynamic_capabilities
60 return _dynamic_capabilities
61
61
62
62
63 def patch_subrepo_type_mapping():
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
64 from collections import defaultdict
65 from hgcompat import subrepo
65 from hgcompat import subrepo
66 from exceptions import SubrepoMergeException
66 from exceptions import SubrepoMergeException
67
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
68 class NoOpSubrepo(subrepo.abstractsubrepo):
69
69
70 def __init__(self, ctx, path, *args, **kwargs):
70 def __init__(self, ctx, path, *args, **kwargs):
71 """Initialize abstractsubrepo part
71 """Initialize abstractsubrepo part
72
72
73 ``ctx`` is the context referring this subrepository in the
73 ``ctx`` is the context referring this subrepository in the
74 parent repository.
74 parent repository.
75
75
76 ``path`` is the path to this subrepository as seen from
76 ``path`` is the path to this subrepository as seen from
77 innermost repository.
77 innermost repository.
78 """
78 """
79 self.ui = ctx.repo().ui
79 self.ui = ctx.repo().ui
80 self._ctx = ctx
80 self._ctx = ctx
81 self._path = path
81 self._path = path
82
82
83 def storeclean(self, path):
83 def storeclean(self, path):
84 """
84 """
85 returns true if the repository has not changed since it was last
85 returns true if the repository has not changed since it was last
86 cloned from or pushed to a given repository.
86 cloned from or pushed to a given repository.
87 """
87 """
88 return True
88 return True
89
89
90 def dirty(self, ignoreupdate=False):
90 def dirty(self, ignoreupdate=False):
91 """returns true if the dirstate of the subrepo is dirty or does not
91 """returns true if the dirstate of the subrepo is dirty or does not
92 match current stored state. If ignoreupdate is true, only check
92 match current stored state. If ignoreupdate is true, only check
93 whether the subrepo has uncommitted changes in its dirstate.
93 whether the subrepo has uncommitted changes in its dirstate.
94 """
94 """
95 return False
95 return False
96
96
97 def basestate(self):
97 def basestate(self):
98 """current working directory base state, disregarding .hgsubstate
98 """current working directory base state, disregarding .hgsubstate
99 state and working directory modifications"""
99 state and working directory modifications"""
100 substate = subrepo.state(self._ctx, self.ui)
100 substate = subrepo.state(self._ctx, self.ui)
101 file_system_path, rev, repotype = substate.get(self._path)
101 file_system_path, rev, repotype = substate.get(self._path)
102 return rev
102 return rev
103
103
104 def remove(self):
104 def remove(self):
105 """remove the subrepo
105 """remove the subrepo
106
106
107 (should verify the dirstate is not dirty first)
107 (should verify the dirstate is not dirty first)
108 """
108 """
109 pass
109 pass
110
110
111 def get(self, state, overwrite=False):
111 def get(self, state, overwrite=False):
112 """run whatever commands are needed to put the subrepo into
112 """run whatever commands are needed to put the subrepo into
113 this state
113 this state
114 """
114 """
115 pass
115 pass
116
116
117 def merge(self, state):
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()
119 raise SubrepoMergeException()
120
120
121 def push(self, opts):
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
122 """perform whatever action is analogous to 'hg push'
123
123
124 This may be a no-op on some systems.
124 This may be a no-op on some systems.
125 """
125 """
126 pass
126 pass
127
127
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 # whenever a subrepo class is looked up.
129 # whenever a subrepo class is looked up.
130 subrepo.types = {
130 subrepo.types = {
131 'hg': NoOpSubrepo,
131 'hg': NoOpSubrepo,
132 'git': NoOpSubrepo,
132 'git': NoOpSubrepo,
133 'svn': NoOpSubrepo
133 'svn': NoOpSubrepo
134 }
134 }
@@ -1,372 +1,372 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2016 RodeCode GmbH
4 # Copyright (C) 2014-2017 RodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import collections
20 import collections
21 import importlib
21 import importlib
22 import io
22 import io
23 import json
23 import json
24 import subprocess
24 import subprocess
25 import sys
25 import sys
26 from httplib import HTTPConnection
26 from httplib import HTTPConnection
27
27
28
28
29 import mercurial.scmutil
29 import mercurial.scmutil
30 import mercurial.node
30 import mercurial.node
31 import Pyro4
31 import Pyro4
32 import simplejson as json
32 import simplejson as json
33
33
34 from vcsserver import exceptions
34 from vcsserver import exceptions
35
35
36
36
37 class HooksHttpClient(object):
37 class HooksHttpClient(object):
38 connection = None
38 connection = None
39
39
40 def __init__(self, hooks_uri):
40 def __init__(self, hooks_uri):
41 self.hooks_uri = hooks_uri
41 self.hooks_uri = hooks_uri
42
42
43 def __call__(self, method, extras):
43 def __call__(self, method, extras):
44 connection = HTTPConnection(self.hooks_uri)
44 connection = HTTPConnection(self.hooks_uri)
45 body = self._serialize(method, extras)
45 body = self._serialize(method, extras)
46 connection.request('POST', '/', body)
46 connection.request('POST', '/', body)
47 response = connection.getresponse()
47 response = connection.getresponse()
48 return json.loads(response.read())
48 return json.loads(response.read())
49
49
50 def _serialize(self, hook_name, extras):
50 def _serialize(self, hook_name, extras):
51 data = {
51 data = {
52 'method': hook_name,
52 'method': hook_name,
53 'extras': extras
53 'extras': extras
54 }
54 }
55 return json.dumps(data)
55 return json.dumps(data)
56
56
57
57
58 class HooksDummyClient(object):
58 class HooksDummyClient(object):
59 def __init__(self, hooks_module):
59 def __init__(self, hooks_module):
60 self._hooks_module = importlib.import_module(hooks_module)
60 self._hooks_module = importlib.import_module(hooks_module)
61
61
62 def __call__(self, hook_name, extras):
62 def __call__(self, hook_name, extras):
63 with self._hooks_module.Hooks() as hooks:
63 with self._hooks_module.Hooks() as hooks:
64 return getattr(hooks, hook_name)(extras)
64 return getattr(hooks, hook_name)(extras)
65
65
66
66
67 class HooksPyro4Client(object):
67 class HooksPyro4Client(object):
68 def __init__(self, hooks_uri):
68 def __init__(self, hooks_uri):
69 self.hooks_uri = hooks_uri
69 self.hooks_uri = hooks_uri
70
70
71 def __call__(self, hook_name, extras):
71 def __call__(self, hook_name, extras):
72 with Pyro4.Proxy(self.hooks_uri) as hooks:
72 with Pyro4.Proxy(self.hooks_uri) as hooks:
73 return getattr(hooks, hook_name)(extras)
73 return getattr(hooks, hook_name)(extras)
74
74
75
75
76 class RemoteMessageWriter(object):
76 class RemoteMessageWriter(object):
77 """Writer base class."""
77 """Writer base class."""
78 def write(message):
78 def write(message):
79 raise NotImplementedError()
79 raise NotImplementedError()
80
80
81
81
82 class HgMessageWriter(RemoteMessageWriter):
82 class HgMessageWriter(RemoteMessageWriter):
83 """Writer that knows how to send messages to mercurial clients."""
83 """Writer that knows how to send messages to mercurial clients."""
84
84
85 def __init__(self, ui):
85 def __init__(self, ui):
86 self.ui = ui
86 self.ui = ui
87
87
88 def write(self, message):
88 def write(self, message):
89 # TODO: Check why the quiet flag is set by default.
89 # TODO: Check why the quiet flag is set by default.
90 old = self.ui.quiet
90 old = self.ui.quiet
91 self.ui.quiet = False
91 self.ui.quiet = False
92 self.ui.status(message.encode('utf-8'))
92 self.ui.status(message.encode('utf-8'))
93 self.ui.quiet = old
93 self.ui.quiet = old
94
94
95
95
96 class GitMessageWriter(RemoteMessageWriter):
96 class GitMessageWriter(RemoteMessageWriter):
97 """Writer that knows how to send messages to git clients."""
97 """Writer that knows how to send messages to git clients."""
98
98
99 def __init__(self, stdout=None):
99 def __init__(self, stdout=None):
100 self.stdout = stdout or sys.stdout
100 self.stdout = stdout or sys.stdout
101
101
102 def write(self, message):
102 def write(self, message):
103 self.stdout.write(message.encode('utf-8'))
103 self.stdout.write(message.encode('utf-8'))
104
104
105
105
106 def _handle_exception(result):
106 def _handle_exception(result):
107 exception_class = result.get('exception')
107 exception_class = result.get('exception')
108 if exception_class == 'HTTPLockedRC':
108 if exception_class == 'HTTPLockedRC':
109 raise exceptions.RepositoryLockedException(*result['exception_args'])
109 raise exceptions.RepositoryLockedException(*result['exception_args'])
110 elif exception_class == 'RepositoryError':
110 elif exception_class == 'RepositoryError':
111 raise exceptions.VcsException(*result['exception_args'])
111 raise exceptions.VcsException(*result['exception_args'])
112 elif exception_class:
112 elif exception_class:
113 raise Exception('Got remote exception "%s" with args "%s"' %
113 raise Exception('Got remote exception "%s" with args "%s"' %
114 (exception_class, result['exception_args']))
114 (exception_class, result['exception_args']))
115
115
116
116
117 def _get_hooks_client(extras):
117 def _get_hooks_client(extras):
118 if 'hooks_uri' in extras:
118 if 'hooks_uri' in extras:
119 protocol = extras.get('hooks_protocol')
119 protocol = extras.get('hooks_protocol')
120 return (
120 return (
121 HooksHttpClient(extras['hooks_uri'])
121 HooksHttpClient(extras['hooks_uri'])
122 if protocol == 'http'
122 if protocol == 'http'
123 else HooksPyro4Client(extras['hooks_uri'])
123 else HooksPyro4Client(extras['hooks_uri'])
124 )
124 )
125 else:
125 else:
126 return HooksDummyClient(extras['hooks_module'])
126 return HooksDummyClient(extras['hooks_module'])
127
127
128
128
129 def _call_hook(hook_name, extras, writer):
129 def _call_hook(hook_name, extras, writer):
130 hooks = _get_hooks_client(extras)
130 hooks = _get_hooks_client(extras)
131 result = hooks(hook_name, extras)
131 result = hooks(hook_name, extras)
132 writer.write(result['output'])
132 writer.write(result['output'])
133 _handle_exception(result)
133 _handle_exception(result)
134
134
135 return result['status']
135 return result['status']
136
136
137
137
138 def _extras_from_ui(ui):
138 def _extras_from_ui(ui):
139 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
139 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
140 return extras
140 return extras
141
141
142
142
143 def repo_size(ui, repo, **kwargs):
143 def repo_size(ui, repo, **kwargs):
144 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
144 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
145
145
146
146
147 def pre_pull(ui, repo, **kwargs):
147 def pre_pull(ui, repo, **kwargs):
148 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
148 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
149
149
150
150
151 def post_pull(ui, repo, **kwargs):
151 def post_pull(ui, repo, **kwargs):
152 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
152 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
153
153
154
154
155 def pre_push(ui, repo, **kwargs):
155 def pre_push(ui, repo, **kwargs):
156 return _call_hook('pre_push', _extras_from_ui(ui), HgMessageWriter(ui))
156 return _call_hook('pre_push', _extras_from_ui(ui), HgMessageWriter(ui))
157
157
158
158
159 # N.B.(skreft): the two functions below were taken and adapted from
159 # N.B.(skreft): the two functions below were taken and adapted from
160 # rhodecode.lib.vcs.remote.handle_git_pre_receive
160 # rhodecode.lib.vcs.remote.handle_git_pre_receive
161 # They are required to compute the commit_ids
161 # They are required to compute the commit_ids
162 def _get_revs(repo, rev_opt):
162 def _get_revs(repo, rev_opt):
163 revs = [rev for rev in mercurial.scmutil.revrange(repo, rev_opt)]
163 revs = [rev for rev in mercurial.scmutil.revrange(repo, rev_opt)]
164 if len(revs) == 0:
164 if len(revs) == 0:
165 return (mercurial.node.nullrev, mercurial.node.nullrev)
165 return (mercurial.node.nullrev, mercurial.node.nullrev)
166
166
167 return max(revs), min(revs)
167 return max(revs), min(revs)
168
168
169
169
170 def _rev_range_hash(repo, node):
170 def _rev_range_hash(repo, node):
171 stop, start = _get_revs(repo, [node + ':'])
171 stop, start = _get_revs(repo, [node + ':'])
172 revs = [mercurial.node.hex(repo[r].node()) for r in xrange(start, stop + 1)]
172 revs = [mercurial.node.hex(repo[r].node()) for r in xrange(start, stop + 1)]
173
173
174 return revs
174 return revs
175
175
176
176
177 def post_push(ui, repo, node, **kwargs):
177 def post_push(ui, repo, node, **kwargs):
178 commit_ids = _rev_range_hash(repo, node)
178 commit_ids = _rev_range_hash(repo, node)
179
179
180 extras = _extras_from_ui(ui)
180 extras = _extras_from_ui(ui)
181 extras['commit_ids'] = commit_ids
181 extras['commit_ids'] = commit_ids
182
182
183 return _call_hook('post_push', extras, HgMessageWriter(ui))
183 return _call_hook('post_push', extras, HgMessageWriter(ui))
184
184
185
185
186 # backward compat
186 # backward compat
187 log_pull_action = post_pull
187 log_pull_action = post_pull
188
188
189 # backward compat
189 # backward compat
190 log_push_action = post_push
190 log_push_action = post_push
191
191
192
192
193 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
193 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
194 """
194 """
195 Old hook name: keep here for backward compatibility.
195 Old hook name: keep here for backward compatibility.
196
196
197 This is only required when the installed git hooks are not upgraded.
197 This is only required when the installed git hooks are not upgraded.
198 """
198 """
199 pass
199 pass
200
200
201
201
202 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
202 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
203 """
203 """
204 Old hook name: keep here for backward compatibility.
204 Old hook name: keep here for backward compatibility.
205
205
206 This is only required when the installed git hooks are not upgraded.
206 This is only required when the installed git hooks are not upgraded.
207 """
207 """
208 pass
208 pass
209
209
210
210
211 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
211 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
212
212
213
213
214 def git_pre_pull(extras):
214 def git_pre_pull(extras):
215 """
215 """
216 Pre pull hook.
216 Pre pull hook.
217
217
218 :param extras: dictionary containing the keys defined in simplevcs
218 :param extras: dictionary containing the keys defined in simplevcs
219 :type extras: dict
219 :type extras: dict
220
220
221 :return: status code of the hook. 0 for success.
221 :return: status code of the hook. 0 for success.
222 :rtype: int
222 :rtype: int
223 """
223 """
224 if 'pull' not in extras['hooks']:
224 if 'pull' not in extras['hooks']:
225 return HookResponse(0, '')
225 return HookResponse(0, '')
226
226
227 stdout = io.BytesIO()
227 stdout = io.BytesIO()
228 try:
228 try:
229 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
229 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
230 except Exception as error:
230 except Exception as error:
231 status = 128
231 status = 128
232 stdout.write('ERROR: %s\n' % str(error))
232 stdout.write('ERROR: %s\n' % str(error))
233
233
234 return HookResponse(status, stdout.getvalue())
234 return HookResponse(status, stdout.getvalue())
235
235
236
236
237 def git_post_pull(extras):
237 def git_post_pull(extras):
238 """
238 """
239 Post pull hook.
239 Post pull hook.
240
240
241 :param extras: dictionary containing the keys defined in simplevcs
241 :param extras: dictionary containing the keys defined in simplevcs
242 :type extras: dict
242 :type extras: dict
243
243
244 :return: status code of the hook. 0 for success.
244 :return: status code of the hook. 0 for success.
245 :rtype: int
245 :rtype: int
246 """
246 """
247 if 'pull' not in extras['hooks']:
247 if 'pull' not in extras['hooks']:
248 return HookResponse(0, '')
248 return HookResponse(0, '')
249
249
250 stdout = io.BytesIO()
250 stdout = io.BytesIO()
251 try:
251 try:
252 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
252 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
253 except Exception as error:
253 except Exception as error:
254 status = 128
254 status = 128
255 stdout.write('ERROR: %s\n' % error)
255 stdout.write('ERROR: %s\n' % error)
256
256
257 return HookResponse(status, stdout.getvalue())
257 return HookResponse(status, stdout.getvalue())
258
258
259
259
260 def git_pre_receive(unused_repo_path, unused_revs, env):
260 def git_pre_receive(unused_repo_path, unused_revs, env):
261 """
261 """
262 Pre push hook.
262 Pre push hook.
263
263
264 :param extras: dictionary containing the keys defined in simplevcs
264 :param extras: dictionary containing the keys defined in simplevcs
265 :type extras: dict
265 :type extras: dict
266
266
267 :return: status code of the hook. 0 for success.
267 :return: status code of the hook. 0 for success.
268 :rtype: int
268 :rtype: int
269 """
269 """
270 extras = json.loads(env['RC_SCM_DATA'])
270 extras = json.loads(env['RC_SCM_DATA'])
271 if 'push' not in extras['hooks']:
271 if 'push' not in extras['hooks']:
272 return 0
272 return 0
273 return _call_hook('pre_push', extras, GitMessageWriter())
273 return _call_hook('pre_push', extras, GitMessageWriter())
274
274
275
275
276 def _run_command(arguments):
276 def _run_command(arguments):
277 """
277 """
278 Run the specified command and return the stdout.
278 Run the specified command and return the stdout.
279
279
280 :param arguments: sequence of program arugments (including the program name)
280 :param arguments: sequence of program arugments (including the program name)
281 :type arguments: list[str]
281 :type arguments: list[str]
282 """
282 """
283 # TODO(skreft): refactor this method and all the other similar ones.
283 # TODO(skreft): refactor this method and all the other similar ones.
284 # Probably this should be using subprocessio.
284 # Probably this should be using subprocessio.
285 process = subprocess.Popen(
285 process = subprocess.Popen(
286 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
286 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
287 stdout, _ = process.communicate()
287 stdout, _ = process.communicate()
288
288
289 if process.returncode != 0:
289 if process.returncode != 0:
290 raise Exception(
290 raise Exception(
291 'Command %s exited with exit code %s' % (arguments,
291 'Command %s exited with exit code %s' % (arguments,
292 process.returncode))
292 process.returncode))
293
293
294 return stdout
294 return stdout
295
295
296
296
297 def git_post_receive(unused_repo_path, revision_lines, env):
297 def git_post_receive(unused_repo_path, revision_lines, env):
298 """
298 """
299 Post push hook.
299 Post push hook.
300
300
301 :param extras: dictionary containing the keys defined in simplevcs
301 :param extras: dictionary containing the keys defined in simplevcs
302 :type extras: dict
302 :type extras: dict
303
303
304 :return: status code of the hook. 0 for success.
304 :return: status code of the hook. 0 for success.
305 :rtype: int
305 :rtype: int
306 """
306 """
307 extras = json.loads(env['RC_SCM_DATA'])
307 extras = json.loads(env['RC_SCM_DATA'])
308 if 'push' not in extras['hooks']:
308 if 'push' not in extras['hooks']:
309 return 0
309 return 0
310
310
311 rev_data = []
311 rev_data = []
312 for revision_line in revision_lines:
312 for revision_line in revision_lines:
313 old_rev, new_rev, ref = revision_line.strip().split(' ')
313 old_rev, new_rev, ref = revision_line.strip().split(' ')
314 ref_data = ref.split('/', 2)
314 ref_data = ref.split('/', 2)
315 if ref_data[1] in ('tags', 'heads'):
315 if ref_data[1] in ('tags', 'heads'):
316 rev_data.append({
316 rev_data.append({
317 'old_rev': old_rev,
317 'old_rev': old_rev,
318 'new_rev': new_rev,
318 'new_rev': new_rev,
319 'ref': ref,
319 'ref': ref,
320 'type': ref_data[1],
320 'type': ref_data[1],
321 'name': ref_data[2],
321 'name': ref_data[2],
322 })
322 })
323
323
324 git_revs = []
324 git_revs = []
325
325
326 # N.B.(skreft): it is ok to just call git, as git before calling a
326 # N.B.(skreft): it is ok to just call git, as git before calling a
327 # subcommand sets the PATH environment variable so that it point to the
327 # subcommand sets the PATH environment variable so that it point to the
328 # correct version of the git executable.
328 # correct version of the git executable.
329 empty_commit_id = '0' * 40
329 empty_commit_id = '0' * 40
330 for push_ref in rev_data:
330 for push_ref in rev_data:
331 type_ = push_ref['type']
331 type_ = push_ref['type']
332 if type_ == 'heads':
332 if type_ == 'heads':
333 if push_ref['old_rev'] == empty_commit_id:
333 if push_ref['old_rev'] == empty_commit_id:
334
334
335 # Fix up head revision if needed
335 # Fix up head revision if needed
336 cmd = ['git', 'show', 'HEAD']
336 cmd = ['git', 'show', 'HEAD']
337 try:
337 try:
338 _run_command(cmd)
338 _run_command(cmd)
339 except Exception:
339 except Exception:
340 cmd = ['git', 'symbolic-ref', 'HEAD',
340 cmd = ['git', 'symbolic-ref', 'HEAD',
341 'refs/heads/%s' % push_ref['name']]
341 'refs/heads/%s' % push_ref['name']]
342 print "Setting default branch to %s" % push_ref['name']
342 print "Setting default branch to %s" % push_ref['name']
343 _run_command(cmd)
343 _run_command(cmd)
344
344
345 cmd = ['git', 'for-each-ref', '--format=%(refname)',
345 cmd = ['git', 'for-each-ref', '--format=%(refname)',
346 'refs/heads/*']
346 'refs/heads/*']
347 heads = _run_command(cmd)
347 heads = _run_command(cmd)
348 heads = heads.replace(push_ref['ref'], '')
348 heads = heads.replace(push_ref['ref'], '')
349 heads = ' '.join(head for head in heads.splitlines() if head)
349 heads = ' '.join(head for head in heads.splitlines() if head)
350 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
350 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
351 '--', push_ref['new_rev'], '--not', heads]
351 '--', push_ref['new_rev'], '--not', heads]
352 git_revs.extend(_run_command(cmd).splitlines())
352 git_revs.extend(_run_command(cmd).splitlines())
353 elif push_ref['new_rev'] == empty_commit_id:
353 elif push_ref['new_rev'] == empty_commit_id:
354 # delete branch case
354 # delete branch case
355 git_revs.append('delete_branch=>%s' % push_ref['name'])
355 git_revs.append('delete_branch=>%s' % push_ref['name'])
356 else:
356 else:
357 cmd = ['git', 'log',
357 cmd = ['git', 'log',
358 '{old_rev}..{new_rev}'.format(**push_ref),
358 '{old_rev}..{new_rev}'.format(**push_ref),
359 '--reverse', '--pretty=format:%H']
359 '--reverse', '--pretty=format:%H']
360 git_revs.extend(_run_command(cmd).splitlines())
360 git_revs.extend(_run_command(cmd).splitlines())
361 elif type_ == 'tags':
361 elif type_ == 'tags':
362 git_revs.append('tag=>%s' % push_ref['name'])
362 git_revs.append('tag=>%s' % push_ref['name'])
363
363
364 extras['commit_ids'] = git_revs
364 extras['commit_ids'] = git_revs
365
365
366 if 'repo_size' in extras['hooks']:
366 if 'repo_size' in extras['hooks']:
367 try:
367 try:
368 _call_hook('repo_size', extras, GitMessageWriter())
368 _call_hook('repo_size', extras, GitMessageWriter())
369 except:
369 except:
370 pass
370 pass
371
371
372 return _call_hook('post_push', extras, GitMessageWriter())
372 return _call_hook('post_push', extras, GitMessageWriter())
@@ -1,376 +1,408 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import base64
18 import base64
19 import locale
19 import locale
20 import logging
20 import logging
21 import uuid
21 import uuid
22 import wsgiref.util
22 import wsgiref.util
23 import traceback
23 from itertools import chain
24 from itertools import chain
24
25
25 import msgpack
26 import msgpack
26 from beaker.cache import CacheManager
27 from beaker.cache import CacheManager
27 from beaker.util import parse_cache_config_options
28 from beaker.util import parse_cache_config_options
28 from pyramid.config import Configurator
29 from pyramid.config import Configurator
29 from pyramid.wsgi import wsgiapp
30 from pyramid.wsgi import wsgiapp
30
31
31 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
32 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
32 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
33 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
33 from vcsserver.echo_stub.echo_app import EchoApp
34 from vcsserver.echo_stub.echo_app import EchoApp
34 from vcsserver.exceptions import HTTPRepoLocked
35 from vcsserver.exceptions import HTTPRepoLocked
35 from vcsserver.server import VcsServer
36 from vcsserver.server import VcsServer
36
37
37 try:
38 try:
38 from vcsserver.git import GitFactory, GitRemote
39 from vcsserver.git import GitFactory, GitRemote
39 except ImportError:
40 except ImportError:
40 GitFactory = None
41 GitFactory = None
41 GitRemote = None
42 GitRemote = None
42 try:
43 try:
43 from vcsserver.hg import MercurialFactory, HgRemote
44 from vcsserver.hg import MercurialFactory, HgRemote
44 except ImportError:
45 except ImportError:
45 MercurialFactory = None
46 MercurialFactory = None
46 HgRemote = None
47 HgRemote = None
47 try:
48 try:
48 from vcsserver.svn import SubversionFactory, SvnRemote
49 from vcsserver.svn import SubversionFactory, SvnRemote
49 except ImportError:
50 except ImportError:
50 SubversionFactory = None
51 SubversionFactory = None
51 SvnRemote = None
52 SvnRemote = None
52
53
53 log = logging.getLogger(__name__)
54 log = logging.getLogger(__name__)
54
55
55
56
56 class VCS(object):
57 class VCS(object):
57 def __init__(self, locale=None, cache_config=None):
58 def __init__(self, locale=None, cache_config=None):
58 self.locale = locale
59 self.locale = locale
59 self.cache_config = cache_config
60 self.cache_config = cache_config
60 self._configure_locale()
61 self._configure_locale()
61 self._initialize_cache()
62 self._initialize_cache()
62
63
63 if GitFactory and GitRemote:
64 if GitFactory and GitRemote:
64 git_repo_cache = self.cache.get_cache_region(
65 git_repo_cache = self.cache.get_cache_region(
65 'git', region='repo_object')
66 'git', region='repo_object')
66 git_factory = GitFactory(git_repo_cache)
67 git_factory = GitFactory(git_repo_cache)
67 self._git_remote = GitRemote(git_factory)
68 self._git_remote = GitRemote(git_factory)
68 else:
69 else:
69 log.info("Git client import failed")
70 log.info("Git client import failed")
70
71
71 if MercurialFactory and HgRemote:
72 if MercurialFactory and HgRemote:
72 hg_repo_cache = self.cache.get_cache_region(
73 hg_repo_cache = self.cache.get_cache_region(
73 'hg', region='repo_object')
74 'hg', region='repo_object')
74 hg_factory = MercurialFactory(hg_repo_cache)
75 hg_factory = MercurialFactory(hg_repo_cache)
75 self._hg_remote = HgRemote(hg_factory)
76 self._hg_remote = HgRemote(hg_factory)
76 else:
77 else:
77 log.info("Mercurial client import failed")
78 log.info("Mercurial client import failed")
78
79
79 if SubversionFactory and SvnRemote:
80 if SubversionFactory and SvnRemote:
80 svn_repo_cache = self.cache.get_cache_region(
81 svn_repo_cache = self.cache.get_cache_region(
81 'svn', region='repo_object')
82 'svn', region='repo_object')
82 svn_factory = SubversionFactory(svn_repo_cache)
83 svn_factory = SubversionFactory(svn_repo_cache)
83 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
84 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
84 else:
85 else:
85 log.info("Subversion client import failed")
86 log.info("Subversion client import failed")
86
87
87 self._vcsserver = VcsServer()
88 self._vcsserver = VcsServer()
88
89
89 def _initialize_cache(self):
90 def _initialize_cache(self):
90 cache_config = parse_cache_config_options(self.cache_config)
91 cache_config = parse_cache_config_options(self.cache_config)
91 log.info('Initializing beaker cache: %s' % cache_config)
92 log.info('Initializing beaker cache: %s' % cache_config)
92 self.cache = CacheManager(**cache_config)
93 self.cache = CacheManager(**cache_config)
93
94
94 def _configure_locale(self):
95 def _configure_locale(self):
95 if self.locale:
96 if self.locale:
96 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
97 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
97 else:
98 else:
98 log.info(
99 log.info(
99 'Configuring locale subsystem based on environment variables')
100 'Configuring locale subsystem based on environment variables')
100 try:
101 try:
101 # If self.locale is the empty string, then the locale
102 # If self.locale is the empty string, then the locale
102 # module will use the environment variables. See the
103 # module will use the environment variables. See the
103 # documentation of the package `locale`.
104 # documentation of the package `locale`.
104 locale.setlocale(locale.LC_ALL, self.locale)
105 locale.setlocale(locale.LC_ALL, self.locale)
105
106
106 language_code, encoding = locale.getlocale()
107 language_code, encoding = locale.getlocale()
107 log.info(
108 log.info(
108 'Locale set to language code "%s" with encoding "%s".',
109 'Locale set to language code "%s" with encoding "%s".',
109 language_code, encoding)
110 language_code, encoding)
110 except locale.Error:
111 except locale.Error:
111 log.exception(
112 log.exception(
112 'Cannot set locale, not configuring the locale system')
113 'Cannot set locale, not configuring the locale system')
113
114
114
115
115 class WsgiProxy(object):
116 class WsgiProxy(object):
116 def __init__(self, wsgi):
117 def __init__(self, wsgi):
117 self.wsgi = wsgi
118 self.wsgi = wsgi
118
119
119 def __call__(self, environ, start_response):
120 def __call__(self, environ, start_response):
120 input_data = environ['wsgi.input'].read()
121 input_data = environ['wsgi.input'].read()
121 input_data = msgpack.unpackb(input_data)
122 input_data = msgpack.unpackb(input_data)
122
123
123 error = None
124 error = None
124 try:
125 try:
125 data, status, headers = self.wsgi.handle(
126 data, status, headers = self.wsgi.handle(
126 input_data['environment'], input_data['input_data'],
127 input_data['environment'], input_data['input_data'],
127 *input_data['args'], **input_data['kwargs'])
128 *input_data['args'], **input_data['kwargs'])
128 except Exception as e:
129 except Exception as e:
129 data, status, headers = [], None, None
130 data, status, headers = [], None, None
130 error = {
131 error = {
131 'message': str(e),
132 'message': str(e),
132 '_vcs_kind': getattr(e, '_vcs_kind', None)
133 '_vcs_kind': getattr(e, '_vcs_kind', None)
133 }
134 }
134
135
135 start_response(200, {})
136 start_response(200, {})
136 return self._iterator(error, status, headers, data)
137 return self._iterator(error, status, headers, data)
137
138
138 def _iterator(self, error, status, headers, data):
139 def _iterator(self, error, status, headers, data):
139 initial_data = [
140 initial_data = [
140 error,
141 error,
141 status,
142 status,
142 headers,
143 headers,
143 ]
144 ]
144
145
145 for d in chain(initial_data, data):
146 for d in chain(initial_data, data):
146 yield msgpack.packb(d)
147 yield msgpack.packb(d)
147
148
148
149
149 class HTTPApplication(object):
150 class HTTPApplication(object):
150 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
151 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
151
152
152 remote_wsgi = remote_wsgi
153 remote_wsgi = remote_wsgi
153 _use_echo_app = False
154 _use_echo_app = False
154
155
155 def __init__(self, settings=None):
156 def __init__(self, settings=None):
156 self.config = Configurator(settings=settings)
157 self.config = Configurator(settings=settings)
157 locale = settings.get('', 'en_US.UTF-8')
158 locale = settings.get('locale', '') or 'en_US.UTF-8'
158 vcs = VCS(locale=locale, cache_config=settings)
159 vcs = VCS(locale=locale, cache_config=settings)
159 self._remotes = {
160 self._remotes = {
160 'hg': vcs._hg_remote,
161 'hg': vcs._hg_remote,
161 'git': vcs._git_remote,
162 'git': vcs._git_remote,
162 'svn': vcs._svn_remote,
163 'svn': vcs._svn_remote,
163 'server': vcs._vcsserver,
164 'server': vcs._vcsserver,
164 }
165 }
165 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
166 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
166 self._use_echo_app = True
167 self._use_echo_app = True
167 log.warning("Using EchoApp for VCS operations.")
168 log.warning("Using EchoApp for VCS operations.")
168 self.remote_wsgi = remote_wsgi_stub
169 self.remote_wsgi = remote_wsgi_stub
169 self._configure_settings(settings)
170 self._configure_settings(settings)
170 self._configure()
171 self._configure()
171
172
172 def _configure_settings(self, app_settings):
173 def _configure_settings(self, app_settings):
173 """
174 """
174 Configure the settings module.
175 Configure the settings module.
175 """
176 """
176 git_path = app_settings.get('git_path', None)
177 git_path = app_settings.get('git_path', None)
177 if git_path:
178 if git_path:
178 settings.GIT_EXECUTABLE = git_path
179 settings.GIT_EXECUTABLE = git_path
179
180
180 def _configure(self):
181 def _configure(self):
181 self.config.add_renderer(
182 self.config.add_renderer(
182 name='msgpack',
183 name='msgpack',
183 factory=self._msgpack_renderer_factory)
184 factory=self._msgpack_renderer_factory)
184
185
185 self.config.add_route('service', '/_service')
186 self.config.add_route('service', '/_service')
186 self.config.add_route('status', '/status')
187 self.config.add_route('status', '/status')
187 self.config.add_route('hg_proxy', '/proxy/hg')
188 self.config.add_route('hg_proxy', '/proxy/hg')
188 self.config.add_route('git_proxy', '/proxy/git')
189 self.config.add_route('git_proxy', '/proxy/git')
189 self.config.add_route('vcs', '/{backend}')
190 self.config.add_route('vcs', '/{backend}')
190 self.config.add_route('stream_git', '/stream/git/*repo_name')
191 self.config.add_route('stream_git', '/stream/git/*repo_name')
191 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
192 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
192
193
193 self.config.add_view(
194 self.config.add_view(
194 self.status_view, route_name='status', renderer='json')
195 self.status_view, route_name='status', renderer='json')
195 self.config.add_view(
196 self.config.add_view(
196 self.service_view, route_name='service', renderer='msgpack')
197 self.service_view, route_name='service', renderer='msgpack')
197
198
198 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
199 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
199 self.config.add_view(self.git_proxy(), route_name='git_proxy')
200 self.config.add_view(self.git_proxy(), route_name='git_proxy')
200 self.config.add_view(
201 self.config.add_view(
201 self.vcs_view, route_name='vcs', renderer='msgpack')
202 self.vcs_view, route_name='vcs', renderer='msgpack',
203 custom_predicates=[self.is_vcs_view])
202
204
203 self.config.add_view(self.hg_stream(), route_name='stream_hg')
205 self.config.add_view(self.hg_stream(), route_name='stream_hg')
204 self.config.add_view(self.git_stream(), route_name='stream_git')
206 self.config.add_view(self.git_stream(), route_name='stream_git')
207
208 def notfound(request):
209 return {'status': '404 NOT FOUND'}
210 self.config.add_notfound_view(notfound, renderer='json')
211
205 self.config.add_view(
212 self.config.add_view(
206 self.handle_vcs_exception, context=Exception,
213 self.handle_vcs_exception, context=Exception,
207 custom_predicates=[self.is_vcs_exception])
214 custom_predicates=[self.is_vcs_exception])
208
215
216 self.config.add_view(
217 self.general_error_handler, context=Exception)
218
219 self.config.add_tween(
220 'vcsserver.tweens.RequestWrapperTween',
221 )
222
209 def wsgi_app(self):
223 def wsgi_app(self):
210 return self.config.make_wsgi_app()
224 return self.config.make_wsgi_app()
211
225
212 def vcs_view(self, request):
226 def vcs_view(self, request):
213 remote = self._remotes[request.matchdict['backend']]
227 remote = self._remotes[request.matchdict['backend']]
214 payload = msgpack.unpackb(request.body, use_list=True)
228 payload = msgpack.unpackb(request.body, use_list=True)
215 method = payload.get('method')
229 method = payload.get('method')
216 params = payload.get('params')
230 params = payload.get('params')
217 wire = params.get('wire')
231 wire = params.get('wire')
218 args = params.get('args')
232 args = params.get('args')
219 kwargs = params.get('kwargs')
233 kwargs = params.get('kwargs')
220 if wire:
234 if wire:
221 try:
235 try:
222 wire['context'] = uuid.UUID(wire['context'])
236 wire['context'] = uuid.UUID(wire['context'])
223 except KeyError:
237 except KeyError:
224 pass
238 pass
225 args.insert(0, wire)
239 args.insert(0, wire)
226
240
241 log.debug('method called:%s with kwargs:%s', method, kwargs)
227 try:
242 try:
228 resp = getattr(remote, method)(*args, **kwargs)
243 resp = getattr(remote, method)(*args, **kwargs)
229 except Exception as e:
244 except Exception as e:
245 tb_info = traceback.format_exc()
246
230 type_ = e.__class__.__name__
247 type_ = e.__class__.__name__
231 if type_ not in self.ALLOWED_EXCEPTIONS:
248 if type_ not in self.ALLOWED_EXCEPTIONS:
232 type_ = None
249 type_ = None
233
250
234 resp = {
251 resp = {
235 'id': payload.get('id'),
252 'id': payload.get('id'),
236 'error': {
253 'error': {
237 'message': e.message,
254 'message': e.message,
255 'traceback': tb_info,
238 'type': type_
256 'type': type_
239 }
257 }
240 }
258 }
241 try:
259 try:
242 resp['error']['_vcs_kind'] = e._vcs_kind
260 resp['error']['_vcs_kind'] = e._vcs_kind
243 except AttributeError:
261 except AttributeError:
244 pass
262 pass
245 else:
263 else:
246 resp = {
264 resp = {
247 'id': payload.get('id'),
265 'id': payload.get('id'),
248 'result': resp
266 'result': resp
249 }
267 }
250
268
251 return resp
269 return resp
252
270
253 def status_view(self, request):
271 def status_view(self, request):
254 return {'status': 'OK'}
272 return {'status': 'OK'}
255
273
256 def service_view(self, request):
274 def service_view(self, request):
257 import vcsserver
275 import vcsserver
258 payload = msgpack.unpackb(request.body, use_list=True)
276 payload = msgpack.unpackb(request.body, use_list=True)
259 resp = {
277 resp = {
260 'id': payload.get('id'),
278 'id': payload.get('id'),
261 'result': dict(
279 'result': dict(
262 version=vcsserver.__version__,
280 version=vcsserver.__version__,
263 config={},
281 config={},
264 payload=payload,
282 payload=payload,
265 )
283 )
266 }
284 }
267 return resp
285 return resp
268
286
269 def _msgpack_renderer_factory(self, info):
287 def _msgpack_renderer_factory(self, info):
270 def _render(value, system):
288 def _render(value, system):
271 value = msgpack.packb(value)
289 value = msgpack.packb(value)
272 request = system.get('request')
290 request = system.get('request')
273 if request is not None:
291 if request is not None:
274 response = request.response
292 response = request.response
275 ct = response.content_type
293 ct = response.content_type
276 if ct == response.default_content_type:
294 if ct == response.default_content_type:
277 response.content_type = 'application/x-msgpack'
295 response.content_type = 'application/x-msgpack'
278 return value
296 return value
279 return _render
297 return _render
280
298
281 def hg_proxy(self):
299 def hg_proxy(self):
282 @wsgiapp
300 @wsgiapp
283 def _hg_proxy(environ, start_response):
301 def _hg_proxy(environ, start_response):
284 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
302 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
285 return app(environ, start_response)
303 return app(environ, start_response)
286 return _hg_proxy
304 return _hg_proxy
287
305
288 def git_proxy(self):
306 def git_proxy(self):
289 @wsgiapp
307 @wsgiapp
290 def _git_proxy(environ, start_response):
308 def _git_proxy(environ, start_response):
291 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
309 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
292 return app(environ, start_response)
310 return app(environ, start_response)
293 return _git_proxy
311 return _git_proxy
294
312
295 def hg_stream(self):
313 def hg_stream(self):
296 if self._use_echo_app:
314 if self._use_echo_app:
297 @wsgiapp
315 @wsgiapp
298 def _hg_stream(environ, start_response):
316 def _hg_stream(environ, start_response):
299 app = EchoApp('fake_path', 'fake_name', None)
317 app = EchoApp('fake_path', 'fake_name', None)
300 return app(environ, start_response)
318 return app(environ, start_response)
301 return _hg_stream
319 return _hg_stream
302 else:
320 else:
303 @wsgiapp
321 @wsgiapp
304 def _hg_stream(environ, start_response):
322 def _hg_stream(environ, start_response):
305 repo_path = environ['HTTP_X_RC_REPO_PATH']
323 repo_path = environ['HTTP_X_RC_REPO_PATH']
306 repo_name = environ['HTTP_X_RC_REPO_NAME']
324 repo_name = environ['HTTP_X_RC_REPO_NAME']
307 packed_config = base64.b64decode(
325 packed_config = base64.b64decode(
308 environ['HTTP_X_RC_REPO_CONFIG'])
326 environ['HTTP_X_RC_REPO_CONFIG'])
309 config = msgpack.unpackb(packed_config)
327 config = msgpack.unpackb(packed_config)
310 app = scm_app.create_hg_wsgi_app(
328 app = scm_app.create_hg_wsgi_app(
311 repo_path, repo_name, config)
329 repo_path, repo_name, config)
312
330
313 # Consitent path information for hgweb
331 # Consitent path information for hgweb
314 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
332 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
315 environ['REPO_NAME'] = repo_name
333 environ['REPO_NAME'] = repo_name
316 return app(environ, ResponseFilter(start_response))
334 return app(environ, ResponseFilter(start_response))
317 return _hg_stream
335 return _hg_stream
318
336
319 def git_stream(self):
337 def git_stream(self):
320 if self._use_echo_app:
338 if self._use_echo_app:
321 @wsgiapp
339 @wsgiapp
322 def _git_stream(environ, start_response):
340 def _git_stream(environ, start_response):
323 app = EchoApp('fake_path', 'fake_name', None)
341 app = EchoApp('fake_path', 'fake_name', None)
324 return app(environ, start_response)
342 return app(environ, start_response)
325 return _git_stream
343 return _git_stream
326 else:
344 else:
327 @wsgiapp
345 @wsgiapp
328 def _git_stream(environ, start_response):
346 def _git_stream(environ, start_response):
329 repo_path = environ['HTTP_X_RC_REPO_PATH']
347 repo_path = environ['HTTP_X_RC_REPO_PATH']
330 repo_name = environ['HTTP_X_RC_REPO_NAME']
348 repo_name = environ['HTTP_X_RC_REPO_NAME']
331 packed_config = base64.b64decode(
349 packed_config = base64.b64decode(
332 environ['HTTP_X_RC_REPO_CONFIG'])
350 environ['HTTP_X_RC_REPO_CONFIG'])
333 config = msgpack.unpackb(packed_config)
351 config = msgpack.unpackb(packed_config)
334
352
335 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
353 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
336 app = scm_app.create_git_wsgi_app(
354 app = scm_app.create_git_wsgi_app(
337 repo_path, repo_name, config)
355 repo_path, repo_name, config)
338 return app(environ, start_response)
356 return app(environ, start_response)
339 return _git_stream
357 return _git_stream
340
358
359 def is_vcs_view(self, context, request):
360 """
361 View predicate that returns true if given backend is supported by
362 defined remotes.
363 """
364 backend = request.matchdict.get('backend')
365 return backend in self._remotes
366
341 def is_vcs_exception(self, context, request):
367 def is_vcs_exception(self, context, request):
342 """
368 """
343 View predicate that returns true if the context object is a VCS
369 View predicate that returns true if the context object is a VCS
344 exception.
370 exception.
345 """
371 """
346 return hasattr(context, '_vcs_kind')
372 return hasattr(context, '_vcs_kind')
347
373
348 def handle_vcs_exception(self, exception, request):
374 def handle_vcs_exception(self, exception, request):
349 if exception._vcs_kind == 'repo_locked':
375 if exception._vcs_kind == 'repo_locked':
350 # Get custom repo-locked status code if present.
376 # Get custom repo-locked status code if present.
351 status_code = request.headers.get('X-RC-Locked-Status-Code')
377 status_code = request.headers.get('X-RC-Locked-Status-Code')
352 return HTTPRepoLocked(
378 return HTTPRepoLocked(
353 title=exception.message, status_code=status_code)
379 title=exception.message, status_code=status_code)
354
380
355 # Re-raise exception if we can not handle it.
381 # Re-raise exception if we can not handle it.
356 raise exception
382 raise exception
357
383
384 def general_error_handler(self, exception, request):
385 log.exception(
386 'error occurred handling this request for path: %s',
387 request.path)
388 raise exception
389
358
390
359 class ResponseFilter(object):
391 class ResponseFilter(object):
360
392
361 def __init__(self, start_response):
393 def __init__(self, start_response):
362 self._start_response = start_response
394 self._start_response = start_response
363
395
364 def __call__(self, status, response_headers, exc_info=None):
396 def __call__(self, status, response_headers, exc_info=None):
365 headers = tuple(
397 headers = tuple(
366 (h, v) for h, v in response_headers
398 (h, v) for h, v in response_headers
367 if not wsgiref.util.is_hop_by_hop(h))
399 if not wsgiref.util.is_hop_by_hop(h))
368 return self._start_response(status, headers, exc_info)
400 return self._start_response(status, headers, exc_info)
369
401
370
402
371 def main(global_config, **settings):
403 def main(global_config, **settings):
372 if MercurialFactory:
404 if MercurialFactory:
373 hgpatches.patch_largefiles_capabilities()
405 hgpatches.patch_largefiles_capabilities()
374 hgpatches.patch_subrepo_type_mapping()
406 hgpatches.patch_subrepo_type_mapping()
375 app = HTTPApplication(settings=settings)
407 app = HTTPApplication(settings=settings)
376 return app.wsgi_app()
408 return app.wsgi_app()
@@ -1,508 +1,508 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import atexit
18 import atexit
19 import locale
19 import locale
20 import logging
20 import logging
21 import optparse
21 import optparse
22 import os
22 import os
23 import textwrap
23 import textwrap
24 import threading
24 import threading
25 import sys
25 import sys
26
26
27 import configobj
27 import configobj
28 import Pyro4
28 import Pyro4
29 from beaker.cache import CacheManager
29 from beaker.cache import CacheManager
30 from beaker.util import parse_cache_config_options
30 from beaker.util import parse_cache_config_options
31
31
32 try:
32 try:
33 from vcsserver.git import GitFactory, GitRemote
33 from vcsserver.git import GitFactory, GitRemote
34 except ImportError:
34 except ImportError:
35 GitFactory = None
35 GitFactory = None
36 GitRemote = None
36 GitRemote = None
37 try:
37 try:
38 from vcsserver.hg import MercurialFactory, HgRemote
38 from vcsserver.hg import MercurialFactory, HgRemote
39 except ImportError:
39 except ImportError:
40 MercurialFactory = None
40 MercurialFactory = None
41 HgRemote = None
41 HgRemote = None
42 try:
42 try:
43 from vcsserver.svn import SubversionFactory, SvnRemote
43 from vcsserver.svn import SubversionFactory, SvnRemote
44 except ImportError:
44 except ImportError:
45 SubversionFactory = None
45 SubversionFactory = None
46 SvnRemote = None
46 SvnRemote = None
47
47
48 from server import VcsServer
48 from server import VcsServer
49 from vcsserver import hgpatches, remote_wsgi, settings
49 from vcsserver import hgpatches, remote_wsgi, settings
50 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
50 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54 HERE = os.path.dirname(os.path.abspath(__file__))
54 HERE = os.path.dirname(os.path.abspath(__file__))
55 SERVER_RUNNING_FILE = None
55 SERVER_RUNNING_FILE = None
56
56
57
57
58 # HOOKS - inspired by gunicorn #
58 # HOOKS - inspired by gunicorn #
59
59
60 def when_ready(server):
60 def when_ready(server):
61 """
61 """
62 Called just after the server is started.
62 Called just after the server is started.
63 """
63 """
64
64
65 def _remove_server_running_file():
65 def _remove_server_running_file():
66 if os.path.isfile(SERVER_RUNNING_FILE):
66 if os.path.isfile(SERVER_RUNNING_FILE):
67 os.remove(SERVER_RUNNING_FILE)
67 os.remove(SERVER_RUNNING_FILE)
68
68
69 # top up to match to level location
69 # top up to match to level location
70 if SERVER_RUNNING_FILE:
70 if SERVER_RUNNING_FILE:
71 with open(SERVER_RUNNING_FILE, 'wb') as f:
71 with open(SERVER_RUNNING_FILE, 'wb') as f:
72 f.write(str(os.getpid()))
72 f.write(str(os.getpid()))
73 # register cleanup of that file when server exits
73 # register cleanup of that file when server exits
74 atexit.register(_remove_server_running_file)
74 atexit.register(_remove_server_running_file)
75
75
76
76
77 class LazyWriter(object):
77 class LazyWriter(object):
78 """
78 """
79 File-like object that opens a file lazily when it is first written
79 File-like object that opens a file lazily when it is first written
80 to.
80 to.
81 """
81 """
82
82
83 def __init__(self, filename, mode='w'):
83 def __init__(self, filename, mode='w'):
84 self.filename = filename
84 self.filename = filename
85 self.fileobj = None
85 self.fileobj = None
86 self.lock = threading.Lock()
86 self.lock = threading.Lock()
87 self.mode = mode
87 self.mode = mode
88
88
89 def open(self):
89 def open(self):
90 if self.fileobj is None:
90 if self.fileobj is None:
91 with self.lock:
91 with self.lock:
92 self.fileobj = open(self.filename, self.mode)
92 self.fileobj = open(self.filename, self.mode)
93 return self.fileobj
93 return self.fileobj
94
94
95 def close(self):
95 def close(self):
96 fileobj = self.fileobj
96 fileobj = self.fileobj
97 if fileobj is not None:
97 if fileobj is not None:
98 fileobj.close()
98 fileobj.close()
99
99
100 def __del__(self):
100 def __del__(self):
101 self.close()
101 self.close()
102
102
103 def write(self, text):
103 def write(self, text):
104 fileobj = self.open()
104 fileobj = self.open()
105 fileobj.write(text)
105 fileobj.write(text)
106 fileobj.flush()
106 fileobj.flush()
107
107
108 def writelines(self, text):
108 def writelines(self, text):
109 fileobj = self.open()
109 fileobj = self.open()
110 fileobj.writelines(text)
110 fileobj.writelines(text)
111 fileobj.flush()
111 fileobj.flush()
112
112
113 def flush(self):
113 def flush(self):
114 self.open().flush()
114 self.open().flush()
115
115
116
116
117 class Application(object):
117 class Application(object):
118 """
118 """
119 Represents the vcs server application.
119 Represents the vcs server application.
120
120
121 This object is responsible to initialize the application and all needed
121 This object is responsible to initialize the application and all needed
122 libraries. After that it hooks together the different objects and provides
122 libraries. After that it hooks together the different objects and provides
123 them a way to access things like configuration.
123 them a way to access things like configuration.
124 """
124 """
125
125
126 def __init__(
126 def __init__(
127 self, host, port=None, locale='', threadpool_size=None,
127 self, host, port=None, locale='', threadpool_size=None,
128 timeout=None, cache_config=None, remote_wsgi_=None):
128 timeout=None, cache_config=None, remote_wsgi_=None):
129
129
130 self.host = host
130 self.host = host
131 self.port = int(port) or settings.PYRO_PORT
131 self.port = int(port) or settings.PYRO_PORT
132 self.threadpool_size = (
132 self.threadpool_size = (
133 int(threadpool_size) if threadpool_size else None)
133 int(threadpool_size) if threadpool_size else None)
134 self.locale = locale
134 self.locale = locale
135 self.timeout = timeout
135 self.timeout = timeout
136 self.cache_config = cache_config
136 self.cache_config = cache_config
137 self.remote_wsgi = remote_wsgi_ or remote_wsgi
137 self.remote_wsgi = remote_wsgi_ or remote_wsgi
138
138
139 def init(self):
139 def init(self):
140 """
140 """
141 Configure and hook together all relevant objects.
141 Configure and hook together all relevant objects.
142 """
142 """
143 self._configure_locale()
143 self._configure_locale()
144 self._configure_pyro()
144 self._configure_pyro()
145 self._initialize_cache()
145 self._initialize_cache()
146 self._create_daemon_and_remote_objects(host=self.host, port=self.port)
146 self._create_daemon_and_remote_objects(host=self.host, port=self.port)
147
147
148 def run(self):
148 def run(self):
149 """
149 """
150 Start the main loop of the application.
150 Start the main loop of the application.
151 """
151 """
152
152
153 if hasattr(os, 'getpid'):
153 if hasattr(os, 'getpid'):
154 log.info('Starting %s in PID %i.', __name__, os.getpid())
154 log.info('Starting %s in PID %i.', __name__, os.getpid())
155 else:
155 else:
156 log.info('Starting %s.', __name__)
156 log.info('Starting %s.', __name__)
157 if SERVER_RUNNING_FILE:
157 if SERVER_RUNNING_FILE:
158 log.info('PID file written as %s', SERVER_RUNNING_FILE)
158 log.info('PID file written as %s', SERVER_RUNNING_FILE)
159 else:
159 else:
160 log.info('No PID file written by default.')
160 log.info('No PID file written by default.')
161 when_ready(self)
161 when_ready(self)
162 try:
162 try:
163 self._pyrodaemon.requestLoop(
163 self._pyrodaemon.requestLoop(
164 loopCondition=lambda: not self._vcsserver._shutdown)
164 loopCondition=lambda: not self._vcsserver._shutdown)
165 finally:
165 finally:
166 self._pyrodaemon.shutdown()
166 self._pyrodaemon.shutdown()
167
167
168 def _configure_locale(self):
168 def _configure_locale(self):
169 if self.locale:
169 if self.locale:
170 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
170 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
171 else:
171 else:
172 log.info(
172 log.info(
173 'Configuring locale subsystem based on environment variables')
173 'Configuring locale subsystem based on environment variables')
174
174
175 try:
175 try:
176 # If self.locale is the empty string, then the locale
176 # If self.locale is the empty string, then the locale
177 # module will use the environment variables. See the
177 # module will use the environment variables. See the
178 # documentation of the package `locale`.
178 # documentation of the package `locale`.
179 locale.setlocale(locale.LC_ALL, self.locale)
179 locale.setlocale(locale.LC_ALL, self.locale)
180
180
181 language_code, encoding = locale.getlocale()
181 language_code, encoding = locale.getlocale()
182 log.info(
182 log.info(
183 'Locale set to language code "%s" with encoding "%s".',
183 'Locale set to language code "%s" with encoding "%s".',
184 language_code, encoding)
184 language_code, encoding)
185 except locale.Error:
185 except locale.Error:
186 log.exception(
186 log.exception(
187 'Cannot set locale, not configuring the locale system')
187 'Cannot set locale, not configuring the locale system')
188
188
189 def _configure_pyro(self):
189 def _configure_pyro(self):
190 if self.threadpool_size is not None:
190 if self.threadpool_size is not None:
191 log.info("Threadpool size set to %s", self.threadpool_size)
191 log.info("Threadpool size set to %s", self.threadpool_size)
192 Pyro4.config.THREADPOOL_SIZE = self.threadpool_size
192 Pyro4.config.THREADPOOL_SIZE = self.threadpool_size
193 if self.timeout not in (None, 0, 0.0, '0'):
193 if self.timeout not in (None, 0, 0.0, '0'):
194 log.info("Timeout for RPC calls set to %s seconds", self.timeout)
194 log.info("Timeout for RPC calls set to %s seconds", self.timeout)
195 Pyro4.config.COMMTIMEOUT = float(self.timeout)
195 Pyro4.config.COMMTIMEOUT = float(self.timeout)
196 Pyro4.config.SERIALIZER = 'pickle'
196 Pyro4.config.SERIALIZER = 'pickle'
197 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
197 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
198 Pyro4.config.SOCK_REUSE = True
198 Pyro4.config.SOCK_REUSE = True
199 # Uncomment the next line when you need to debug remote errors
199 # Uncomment the next line when you need to debug remote errors
200 # Pyro4.config.DETAILED_TRACEBACK = True
200 # Pyro4.config.DETAILED_TRACEBACK = True
201
201
202 def _initialize_cache(self):
202 def _initialize_cache(self):
203 cache_config = parse_cache_config_options(self.cache_config)
203 cache_config = parse_cache_config_options(self.cache_config)
204 log.info('Initializing beaker cache: %s' % cache_config)
204 log.info('Initializing beaker cache: %s' % cache_config)
205 self.cache = CacheManager(**cache_config)
205 self.cache = CacheManager(**cache_config)
206
206
207 def _create_daemon_and_remote_objects(self, host='localhost',
207 def _create_daemon_and_remote_objects(self, host='localhost',
208 port=settings.PYRO_PORT):
208 port=settings.PYRO_PORT):
209 daemon = Pyro4.Daemon(host=host, port=port)
209 daemon = Pyro4.Daemon(host=host, port=port)
210
210
211 self._vcsserver = VcsServer()
211 self._vcsserver = VcsServer()
212 uri = daemon.register(
212 uri = daemon.register(
213 self._vcsserver, objectId=settings.PYRO_VCSSERVER)
213 self._vcsserver, objectId=settings.PYRO_VCSSERVER)
214 log.info("Object registered = %s", uri)
214 log.info("Object registered = %s", uri)
215
215
216 if GitFactory and GitRemote:
216 if GitFactory and GitRemote:
217 git_repo_cache = self.cache.get_cache_region('git', region='repo_object')
217 git_repo_cache = self.cache.get_cache_region('git', region='repo_object')
218 git_factory = GitFactory(git_repo_cache)
218 git_factory = GitFactory(git_repo_cache)
219 self._git_remote = GitRemote(git_factory)
219 self._git_remote = GitRemote(git_factory)
220 uri = daemon.register(self._git_remote, objectId=settings.PYRO_GIT)
220 uri = daemon.register(self._git_remote, objectId=settings.PYRO_GIT)
221 log.info("Object registered = %s", uri)
221 log.info("Object registered = %s", uri)
222 else:
222 else:
223 log.info("Git client import failed")
223 log.info("Git client import failed")
224
224
225 if MercurialFactory and HgRemote:
225 if MercurialFactory and HgRemote:
226 hg_repo_cache = self.cache.get_cache_region('hg', region='repo_object')
226 hg_repo_cache = self.cache.get_cache_region('hg', region='repo_object')
227 hg_factory = MercurialFactory(hg_repo_cache)
227 hg_factory = MercurialFactory(hg_repo_cache)
228 self._hg_remote = HgRemote(hg_factory)
228 self._hg_remote = HgRemote(hg_factory)
229 uri = daemon.register(self._hg_remote, objectId=settings.PYRO_HG)
229 uri = daemon.register(self._hg_remote, objectId=settings.PYRO_HG)
230 log.info("Object registered = %s", uri)
230 log.info("Object registered = %s", uri)
231 else:
231 else:
232 log.info("Mercurial client import failed")
232 log.info("Mercurial client import failed")
233
233
234 if SubversionFactory and SvnRemote:
234 if SubversionFactory and SvnRemote:
235 svn_repo_cache = self.cache.get_cache_region('svn', region='repo_object')
235 svn_repo_cache = self.cache.get_cache_region('svn', region='repo_object')
236 svn_factory = SubversionFactory(svn_repo_cache)
236 svn_factory = SubversionFactory(svn_repo_cache)
237 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
237 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
238 uri = daemon.register(self._svn_remote, objectId=settings.PYRO_SVN)
238 uri = daemon.register(self._svn_remote, objectId=settings.PYRO_SVN)
239 log.info("Object registered = %s", uri)
239 log.info("Object registered = %s", uri)
240 else:
240 else:
241 log.info("Subversion client import failed")
241 log.info("Subversion client import failed")
242
242
243 self._git_remote_wsgi = self.remote_wsgi.GitRemoteWsgi()
243 self._git_remote_wsgi = self.remote_wsgi.GitRemoteWsgi()
244 uri = daemon.register(self._git_remote_wsgi,
244 uri = daemon.register(self._git_remote_wsgi,
245 objectId=settings.PYRO_GIT_REMOTE_WSGI)
245 objectId=settings.PYRO_GIT_REMOTE_WSGI)
246 log.info("Object registered = %s", uri)
246 log.info("Object registered = %s", uri)
247
247
248 self._hg_remote_wsgi = self.remote_wsgi.HgRemoteWsgi()
248 self._hg_remote_wsgi = self.remote_wsgi.HgRemoteWsgi()
249 uri = daemon.register(self._hg_remote_wsgi,
249 uri = daemon.register(self._hg_remote_wsgi,
250 objectId=settings.PYRO_HG_REMOTE_WSGI)
250 objectId=settings.PYRO_HG_REMOTE_WSGI)
251 log.info("Object registered = %s", uri)
251 log.info("Object registered = %s", uri)
252
252
253 self._pyrodaemon = daemon
253 self._pyrodaemon = daemon
254
254
255
255
256 class VcsServerCommand(object):
256 class VcsServerCommand(object):
257
257
258 usage = '%prog'
258 usage = '%prog'
259 description = """
259 description = """
260 Runs the VCS server
260 Runs the VCS server
261 """
261 """
262 default_verbosity = 1
262 default_verbosity = 1
263
263
264 parser = optparse.OptionParser(
264 parser = optparse.OptionParser(
265 usage,
265 usage,
266 description=textwrap.dedent(description)
266 description=textwrap.dedent(description)
267 )
267 )
268 parser.add_option(
268 parser.add_option(
269 '--host',
269 '--host',
270 type="str",
270 type="str",
271 dest="host",
271 dest="host",
272 )
272 )
273 parser.add_option(
273 parser.add_option(
274 '--port',
274 '--port',
275 type="int",
275 type="int",
276 dest="port"
276 dest="port"
277 )
277 )
278 parser.add_option(
278 parser.add_option(
279 '--running-file',
279 '--running-file',
280 dest='running_file',
280 dest='running_file',
281 metavar='RUNNING_FILE',
281 metavar='RUNNING_FILE',
282 help="Create a running file after the server is initalized with "
282 help="Create a running file after the server is initalized with "
283 "stored PID of process"
283 "stored PID of process"
284 )
284 )
285 parser.add_option(
285 parser.add_option(
286 '--locale',
286 '--locale',
287 dest='locale',
287 dest='locale',
288 help="Allows to set the locale, e.g. en_US.UTF-8",
288 help="Allows to set the locale, e.g. en_US.UTF-8",
289 default=""
289 default=""
290 )
290 )
291 parser.add_option(
291 parser.add_option(
292 '--log-file',
292 '--log-file',
293 dest='log_file',
293 dest='log_file',
294 metavar='LOG_FILE',
294 metavar='LOG_FILE',
295 help="Save output to the given log file (redirects stdout)"
295 help="Save output to the given log file (redirects stdout)"
296 )
296 )
297 parser.add_option(
297 parser.add_option(
298 '--log-level',
298 '--log-level',
299 dest="log_level",
299 dest="log_level",
300 metavar="LOG_LEVEL",
300 metavar="LOG_LEVEL",
301 help="use LOG_LEVEL to set log level "
301 help="use LOG_LEVEL to set log level "
302 "(debug,info,warning,error,critical)"
302 "(debug,info,warning,error,critical)"
303 )
303 )
304 parser.add_option(
304 parser.add_option(
305 '--threadpool',
305 '--threadpool',
306 dest='threadpool_size',
306 dest='threadpool_size',
307 type='int',
307 type='int',
308 help="Set the size of the threadpool used to communicate with the "
308 help="Set the size of the threadpool used to communicate with the "
309 "WSGI workers. This should be at least 6 times the number of "
309 "WSGI workers. This should be at least 6 times the number of "
310 "WSGI worker processes."
310 "WSGI worker processes."
311 )
311 )
312 parser.add_option(
312 parser.add_option(
313 '--timeout',
313 '--timeout',
314 dest='timeout',
314 dest='timeout',
315 type='float',
315 type='float',
316 help="Set the timeout for RPC communication in seconds."
316 help="Set the timeout for RPC communication in seconds."
317 )
317 )
318 parser.add_option(
318 parser.add_option(
319 '--config',
319 '--config',
320 dest='config_file',
320 dest='config_file',
321 type='string',
321 type='string',
322 help="Configuration file for vcsserver."
322 help="Configuration file for vcsserver."
323 )
323 )
324
324
325 def __init__(self, argv, quiet=False):
325 def __init__(self, argv, quiet=False):
326 self.options, self.args = self.parser.parse_args(argv[1:])
326 self.options, self.args = self.parser.parse_args(argv[1:])
327 if quiet:
327 if quiet:
328 self.options.verbose = 0
328 self.options.verbose = 0
329
329
330 def _get_file_config(self):
330 def _get_file_config(self):
331 ini_conf = {}
331 ini_conf = {}
332 conf = configobj.ConfigObj(self.options.config_file)
332 conf = configobj.ConfigObj(self.options.config_file)
333 if 'DEFAULT' in conf:
333 if 'DEFAULT' in conf:
334 ini_conf = conf['DEFAULT']
334 ini_conf = conf['DEFAULT']
335
335
336 return ini_conf
336 return ini_conf
337
337
338 def _show_config(self, vcsserver_config):
338 def _show_config(self, vcsserver_config):
339 order = [
339 order = [
340 'config_file',
340 'config_file',
341 'host',
341 'host',
342 'port',
342 'port',
343 'log_file',
343 'log_file',
344 'log_level',
344 'log_level',
345 'locale',
345 'locale',
346 'threadpool_size',
346 'threadpool_size',
347 'timeout',
347 'timeout',
348 'cache_config',
348 'cache_config',
349 ]
349 ]
350
350
351 def sorter(k):
351 def sorter(k):
352 return dict([(y, x) for x, y in enumerate(order)]).get(k)
352 return dict([(y, x) for x, y in enumerate(order)]).get(k)
353
353
354 _config = []
354 _config = []
355 for k in sorted(vcsserver_config.keys(), key=sorter):
355 for k in sorted(vcsserver_config.keys(), key=sorter):
356 v = vcsserver_config[k]
356 v = vcsserver_config[k]
357 # construct padded key for display eg %-20s % = key: val
357 # construct padded key for display eg %-20s % = key: val
358 k_formatted = ('%-'+str(len(max(order, key=len))+1)+'s') % (k+':')
358 k_formatted = ('%-'+str(len(max(order, key=len))+1)+'s') % (k+':')
359 _config.append(' * %s %s' % (k_formatted, v))
359 _config.append(' * %s %s' % (k_formatted, v))
360 log.info('\n[vcsserver configuration]:\n'+'\n'.join(_config))
360 log.info('\n[vcsserver configuration]:\n'+'\n'.join(_config))
361
361
362 def _get_vcsserver_configuration(self):
362 def _get_vcsserver_configuration(self):
363 _defaults = {
363 _defaults = {
364 'config_file': None,
364 'config_file': None,
365 'git_path': 'git',
365 'git_path': 'git',
366 'host': 'localhost',
366 'host': 'localhost',
367 'port': settings.PYRO_PORT,
367 'port': settings.PYRO_PORT,
368 'log_file': None,
368 'log_file': None,
369 'log_level': 'debug',
369 'log_level': 'debug',
370 'locale': None,
370 'locale': None,
371 'threadpool_size': 16,
371 'threadpool_size': 16,
372 'timeout': None,
372 'timeout': None,
373
373
374 # Development support
374 # Development support
375 'dev.use_echo_app': False,
375 'dev.use_echo_app': False,
376
376
377 # caches, baker style config
377 # caches, baker style config
378 'beaker.cache.regions': 'repo_object',
378 'beaker.cache.regions': 'repo_object',
379 'beaker.cache.repo_object.expire': '10',
379 'beaker.cache.repo_object.expire': '10',
380 'beaker.cache.repo_object.type': 'memory',
380 'beaker.cache.repo_object.type': 'memory',
381 }
381 }
382 config = {}
382 config = {}
383 config.update(_defaults)
383 config.update(_defaults)
384 # overwrite defaults with one loaded from file
384 # overwrite defaults with one loaded from file
385 config.update(self._get_file_config())
385 config.update(self._get_file_config())
386
386
387 # overwrite with self.option which has the top priority
387 # overwrite with self.option which has the top priority
388 for k, v in self.options.__dict__.items():
388 for k, v in self.options.__dict__.items():
389 if v or v == 0:
389 if v or v == 0:
390 config[k] = v
390 config[k] = v
391
391
392 # clear all "extra" keys if they are somehow passed,
392 # clear all "extra" keys if they are somehow passed,
393 # we only want defaults, so any extra stuff from self.options is cleared
393 # we only want defaults, so any extra stuff from self.options is cleared
394 # except beaker stuff which needs to be dynamic
394 # except beaker stuff which needs to be dynamic
395 for k in [k for k in config.copy().keys() if not k.startswith('beaker.cache.')]:
395 for k in [k for k in config.copy().keys() if not k.startswith('beaker.cache.')]:
396 if k not in _defaults:
396 if k not in _defaults:
397 del config[k]
397 del config[k]
398
398
399 # group together the cache into one key.
399 # group together the cache into one key.
400 # Needed further for beaker lib configuration
400 # Needed further for beaker lib configuration
401 _k = {}
401 _k = {}
402 for k in [k for k in config.copy() if k.startswith('beaker.cache.')]:
402 for k in [k for k in config.copy() if k.startswith('beaker.cache.')]:
403 _k[k] = config.pop(k)
403 _k[k] = config.pop(k)
404 config['cache_config'] = _k
404 config['cache_config'] = _k
405
405
406 return config
406 return config
407
407
408 def out(self, msg): # pragma: no cover
408 def out(self, msg): # pragma: no cover
409 if self.options.verbose > 0:
409 if self.options.verbose > 0:
410 print(msg)
410 print(msg)
411
411
412 def run(self): # pragma: no cover
412 def run(self): # pragma: no cover
413 vcsserver_config = self._get_vcsserver_configuration()
413 vcsserver_config = self._get_vcsserver_configuration()
414
414
415 # Ensure the log file is writeable
415 # Ensure the log file is writeable
416 if vcsserver_config['log_file']:
416 if vcsserver_config['log_file']:
417 stdout_log = self._configure_logfile()
417 stdout_log = self._configure_logfile()
418 else:
418 else:
419 stdout_log = None
419 stdout_log = None
420
420
421 # set PID file with running lock
421 # set PID file with running lock
422 if self.options.running_file:
422 if self.options.running_file:
423 global SERVER_RUNNING_FILE
423 global SERVER_RUNNING_FILE
424 SERVER_RUNNING_FILE = self.options.running_file
424 SERVER_RUNNING_FILE = self.options.running_file
425
425
426 # configure logging, and logging based on configuration file
426 # configure logging, and logging based on configuration file
427 self._configure_logging(level=vcsserver_config['log_level'],
427 self._configure_logging(level=vcsserver_config['log_level'],
428 stream=stdout_log)
428 stream=stdout_log)
429 if self.options.config_file:
429 if self.options.config_file:
430 if not os.path.isfile(self.options.config_file):
430 if not os.path.isfile(self.options.config_file):
431 raise OSError('File %s does not exist' %
431 raise OSError('File %s does not exist' %
432 self.options.config_file)
432 self.options.config_file)
433
433
434 self._configure_file_logging(self.options.config_file)
434 self._configure_file_logging(self.options.config_file)
435
435
436 self._configure_settings(vcsserver_config)
436 self._configure_settings(vcsserver_config)
437
437
438 # display current configuration of vcsserver
438 # display current configuration of vcsserver
439 self._show_config(vcsserver_config)
439 self._show_config(vcsserver_config)
440
440
441 if not vcsserver_config['dev.use_echo_app']:
441 if not vcsserver_config['dev.use_echo_app']:
442 remote_wsgi_mod = remote_wsgi
442 remote_wsgi_mod = remote_wsgi
443 else:
443 else:
444 log.warning("Using EchoApp for VCS endpoints.")
444 log.warning("Using EchoApp for VCS endpoints.")
445 remote_wsgi_mod = remote_wsgi_stub
445 remote_wsgi_mod = remote_wsgi_stub
446
446
447 app = Application(
447 app = Application(
448 host=vcsserver_config['host'],
448 host=vcsserver_config['host'],
449 port=vcsserver_config['port'],
449 port=vcsserver_config['port'],
450 locale=vcsserver_config['locale'],
450 locale=vcsserver_config['locale'],
451 threadpool_size=vcsserver_config['threadpool_size'],
451 threadpool_size=vcsserver_config['threadpool_size'],
452 timeout=vcsserver_config['timeout'],
452 timeout=vcsserver_config['timeout'],
453 cache_config=vcsserver_config['cache_config'],
453 cache_config=vcsserver_config['cache_config'],
454 remote_wsgi_=remote_wsgi_mod)
454 remote_wsgi_=remote_wsgi_mod)
455 app.init()
455 app.init()
456 app.run()
456 app.run()
457
457
458 def _configure_logging(self, level, stream=None):
458 def _configure_logging(self, level, stream=None):
459 _format = (
459 _format = (
460 '%(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s')
460 '%(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s')
461 levels = {
461 levels = {
462 'debug': logging.DEBUG,
462 'debug': logging.DEBUG,
463 'info': logging.INFO,
463 'info': logging.INFO,
464 'warning': logging.WARNING,
464 'warning': logging.WARNING,
465 'error': logging.ERROR,
465 'error': logging.ERROR,
466 'critical': logging.CRITICAL,
466 'critical': logging.CRITICAL,
467 }
467 }
468 try:
468 try:
469 level = levels[level]
469 level = levels[level]
470 except KeyError:
470 except KeyError:
471 raise AttributeError(
471 raise AttributeError(
472 'Invalid log level please use one of %s' % (levels.keys(),))
472 'Invalid log level please use one of %s' % (levels.keys(),))
473 logging.basicConfig(format=_format, stream=stream, level=level)
473 logging.basicConfig(format=_format, stream=stream, level=level)
474 logging.getLogger('Pyro4').setLevel(level)
474 logging.getLogger('Pyro4').setLevel(level)
475
475
476 def _configure_file_logging(self, config):
476 def _configure_file_logging(self, config):
477 import logging.config
477 import logging.config
478 try:
478 try:
479 logging.config.fileConfig(config)
479 logging.config.fileConfig(config)
480 except Exception as e:
480 except Exception as e:
481 log.warning('Failed to configure logging based on given '
481 log.warning('Failed to configure logging based on given '
482 'config file. Error: %s' % e)
482 'config file. Error: %s' % e)
483
483
484 def _configure_logfile(self):
484 def _configure_logfile(self):
485 try:
485 try:
486 writeable_log_file = open(self.options.log_file, 'a')
486 writeable_log_file = open(self.options.log_file, 'a')
487 except IOError as ioe:
487 except IOError as ioe:
488 msg = 'Error: Unable to write to log file: %s' % ioe
488 msg = 'Error: Unable to write to log file: %s' % ioe
489 raise ValueError(msg)
489 raise ValueError(msg)
490 writeable_log_file.close()
490 writeable_log_file.close()
491 stdout_log = LazyWriter(self.options.log_file, 'a')
491 stdout_log = LazyWriter(self.options.log_file, 'a')
492 sys.stdout = stdout_log
492 sys.stdout = stdout_log
493 sys.stderr = stdout_log
493 sys.stderr = stdout_log
494 return stdout_log
494 return stdout_log
495
495
496 def _configure_settings(self, config):
496 def _configure_settings(self, config):
497 """
497 """
498 Configure the settings module based on the given `config`.
498 Configure the settings module based on the given `config`.
499 """
499 """
500 settings.GIT_EXECUTABLE = config['git_path']
500 settings.GIT_EXECUTABLE = config['git_path']
501
501
502
502
503 def main(argv=sys.argv, quiet=False):
503 def main(argv=sys.argv, quiet=False):
504 if MercurialFactory:
504 if MercurialFactory:
505 hgpatches.patch_largefiles_capabilities()
505 hgpatches.patch_largefiles_capabilities()
506 hgpatches.patch_subrepo_type_mapping()
506 hgpatches.patch_subrepo_type_mapping()
507 command = VcsServerCommand(argv, quiet=quiet)
507 command = VcsServerCommand(argv, quiet=quiet)
508 return command.run()
508 return command.run()
@@ -1,375 +1,375 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Handles the Git smart protocol."""
18 """Handles the Git smart protocol."""
19
19
20 import os
20 import os
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import simplejson as json
24 import simplejson as json
25 import dulwich.protocol
25 import dulwich.protocol
26 from webob import Request, Response, exc
26 from webob import Request, Response, exc
27
27
28 from vcsserver import hooks, subprocessio
28 from vcsserver import hooks, subprocessio
29
29
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class FileWrapper(object):
34 class FileWrapper(object):
35 """File wrapper that ensures how much data is read from it."""
35 """File wrapper that ensures how much data is read from it."""
36
36
37 def __init__(self, fd, content_length):
37 def __init__(self, fd, content_length):
38 self.fd = fd
38 self.fd = fd
39 self.content_length = content_length
39 self.content_length = content_length
40 self.remain = content_length
40 self.remain = content_length
41
41
42 def read(self, size):
42 def read(self, size):
43 if size <= self.remain:
43 if size <= self.remain:
44 try:
44 try:
45 data = self.fd.read(size)
45 data = self.fd.read(size)
46 except socket.error:
46 except socket.error:
47 raise IOError(self)
47 raise IOError(self)
48 self.remain -= size
48 self.remain -= size
49 elif self.remain:
49 elif self.remain:
50 data = self.fd.read(self.remain)
50 data = self.fd.read(self.remain)
51 self.remain = 0
51 self.remain = 0
52 else:
52 else:
53 data = None
53 data = None
54 return data
54 return data
55
55
56 def __repr__(self):
56 def __repr__(self):
57 return '<FileWrapper %s len: %s, read: %s>' % (
57 return '<FileWrapper %s len: %s, read: %s>' % (
58 self.fd, self.content_length, self.content_length - self.remain
58 self.fd, self.content_length, self.content_length - self.remain
59 )
59 )
60
60
61
61
62 class GitRepository(object):
62 class GitRepository(object):
63 """WSGI app for handling Git smart protocol endpoints."""
63 """WSGI app for handling Git smart protocol endpoints."""
64
64
65 git_folder_signature = frozenset(
65 git_folder_signature = frozenset(
66 ('config', 'head', 'info', 'objects', 'refs'))
66 ('config', 'head', 'info', 'objects', 'refs'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 valid_accepts = frozenset(('application/x-%s-result' %
68 valid_accepts = frozenset(('application/x-%s-result' %
69 c for c in commands))
69 c for c in commands))
70
70
71 # The last bytes are the SHA1 of the first 12 bytes.
71 # The last bytes are the SHA1 of the first 12 bytes.
72 EMPTY_PACK = (
72 EMPTY_PACK = (
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 )
75 )
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
77
77
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
79 extras):
79 extras):
80 files = frozenset(f.lower() for f in os.listdir(content_path))
80 files = frozenset(f.lower() for f in os.listdir(content_path))
81 valid_dir_signature = self.git_folder_signature.issubset(files)
81 valid_dir_signature = self.git_folder_signature.issubset(files)
82
82
83 if not valid_dir_signature:
83 if not valid_dir_signature:
84 raise OSError('%s missing git signature' % content_path)
84 raise OSError('%s missing git signature' % content_path)
85
85
86 self.content_path = content_path
86 self.content_path = content_path
87 self.repo_name = repo_name
87 self.repo_name = repo_name
88 self.extras = extras
88 self.extras = extras
89 self.git_path = git_path
89 self.git_path = git_path
90 self.update_server_info = update_server_info
90 self.update_server_info = update_server_info
91
91
92 def _get_fixedpath(self, path):
92 def _get_fixedpath(self, path):
93 """
93 """
94 Small fix for repo_path
94 Small fix for repo_path
95
95
96 :param path:
96 :param path:
97 """
97 """
98 return path.split(self.repo_name, 1)[-1].strip('/')
98 return path.split(self.repo_name, 1)[-1].strip('/')
99
99
100 def inforefs(self, request, unused_environ):
100 def inforefs(self, request, unused_environ):
101 """
101 """
102 WSGI Response producer for HTTP GET Git Smart
102 WSGI Response producer for HTTP GET Git Smart
103 HTTP /info/refs request.
103 HTTP /info/refs request.
104 """
104 """
105
105
106 git_command = request.GET.get('service')
106 git_command = request.GET.get('service')
107 if git_command not in self.commands:
107 if git_command not in self.commands:
108 log.debug('command %s not allowed', git_command)
108 log.debug('command %s not allowed', git_command)
109 return exc.HTTPForbidden()
109 return exc.HTTPForbidden()
110
110
111 # please, resist the urge to add '\n' to git capture and increment
111 # please, resist the urge to add '\n' to git capture and increment
112 # line count by 1.
112 # line count by 1.
113 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
113 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
114 # a part of protocol.
114 # a part of protocol.
115 # The code in Git client not only does NOT need '\n', but actually
115 # The code in Git client not only does NOT need '\n', but actually
116 # blows up if you sprinkle "flush" (0000) as "0001\n".
116 # blows up if you sprinkle "flush" (0000) as "0001\n".
117 # It reads binary, per number of bytes specified.
117 # It reads binary, per number of bytes specified.
118 # if you do add '\n' as part of data, count it.
118 # if you do add '\n' as part of data, count it.
119 server_advert = '# service=%s\n' % git_command
119 server_advert = '# service=%s\n' % git_command
120 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
120 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
121 try:
121 try:
122 gitenv = dict(os.environ)
122 gitenv = dict(os.environ)
123 # forget all configs
123 # forget all configs
124 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
124 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
125 command = [self.git_path, git_command[4:], '--stateless-rpc',
125 command = [self.git_path, git_command[4:], '--stateless-rpc',
126 '--advertise-refs', self.content_path]
126 '--advertise-refs', self.content_path]
127 out = subprocessio.SubprocessIOChunker(
127 out = subprocessio.SubprocessIOChunker(
128 command,
128 command,
129 env=gitenv,
129 env=gitenv,
130 starting_values=[packet_len + server_advert + '0000'],
130 starting_values=[packet_len + server_advert + '0000'],
131 shell=False
131 shell=False
132 )
132 )
133 except EnvironmentError:
133 except EnvironmentError:
134 log.exception('Error processing command')
134 log.exception('Error processing command')
135 raise exc.HTTPExpectationFailed()
135 raise exc.HTTPExpectationFailed()
136
136
137 resp = Response()
137 resp = Response()
138 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
138 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
139 resp.charset = None
139 resp.charset = None
140 resp.app_iter = out
140 resp.app_iter = out
141
141
142 return resp
142 return resp
143
143
144 def _get_want_capabilities(self, request):
144 def _get_want_capabilities(self, request):
145 """Read the capabilities found in the first want line of the request."""
145 """Read the capabilities found in the first want line of the request."""
146 pos = request.body_file_seekable.tell()
146 pos = request.body_file_seekable.tell()
147 first_line = request.body_file_seekable.readline()
147 first_line = request.body_file_seekable.readline()
148 request.body_file_seekable.seek(pos)
148 request.body_file_seekable.seek(pos)
149
149
150 return frozenset(
150 return frozenset(
151 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
151 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
152
152
153 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
153 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
154 """
154 """
155 Construct a response with an empty PACK file.
155 Construct a response with an empty PACK file.
156
156
157 We use an empty PACK file, as that would trigger the failure of the pull
157 We use an empty PACK file, as that would trigger the failure of the pull
158 or clone command.
158 or clone command.
159
159
160 We also print in the error output a message explaining why the command
160 We also print in the error output a message explaining why the command
161 was aborted.
161 was aborted.
162
162
163 If aditionally, the user is accepting messages we send them the output
163 If aditionally, the user is accepting messages we send them the output
164 of the pre-pull hook.
164 of the pre-pull hook.
165
165
166 Note that for clients not supporting side-band we just send them the
166 Note that for clients not supporting side-band we just send them the
167 emtpy PACK file.
167 emtpy PACK file.
168 """
168 """
169 if self.SIDE_BAND_CAPS.intersection(capabilities):
169 if self.SIDE_BAND_CAPS.intersection(capabilities):
170 response = []
170 response = []
171 proto = dulwich.protocol.Protocol(None, response.append)
171 proto = dulwich.protocol.Protocol(None, response.append)
172 proto.write_pkt_line('NAK\n')
172 proto.write_pkt_line('NAK\n')
173 self._write_sideband_to_proto(pre_pull_messages, proto,
173 self._write_sideband_to_proto(pre_pull_messages, proto,
174 capabilities)
174 capabilities)
175 # N.B.(skreft): Do not change the sideband channel to 3, as that
175 # N.B.(skreft): Do not change the sideband channel to 3, as that
176 # produces a fatal error in the client:
176 # produces a fatal error in the client:
177 # fatal: error in sideband demultiplexer
177 # fatal: error in sideband demultiplexer
178 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
178 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
179 proto.write_sideband(1, self.EMPTY_PACK)
179 proto.write_sideband(1, self.EMPTY_PACK)
180
180
181 # writes 0000
181 # writes 0000
182 proto.write_pkt_line(None)
182 proto.write_pkt_line(None)
183
183
184 return response
184 return response
185 else:
185 else:
186 return [self.EMPTY_PACK]
186 return [self.EMPTY_PACK]
187
187
188 def _write_sideband_to_proto(self, data, proto, capabilities):
188 def _write_sideband_to_proto(self, data, proto, capabilities):
189 """
189 """
190 Write the data to the proto's sideband number 2.
190 Write the data to the proto's sideband number 2.
191
191
192 We do not use dulwich's write_sideband directly as it only supports
192 We do not use dulwich's write_sideband directly as it only supports
193 side-band-64k.
193 side-band-64k.
194 """
194 """
195 if not data:
195 if not data:
196 return
196 return
197
197
198 # N.B.(skreft): The values below are explained in the pack protocol
198 # N.B.(skreft): The values below are explained in the pack protocol
199 # documentation, section Packfile Data.
199 # documentation, section Packfile Data.
200 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
200 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
201 if 'side-band-64k' in capabilities:
201 if 'side-band-64k' in capabilities:
202 chunk_size = 65515
202 chunk_size = 65515
203 elif 'side-band' in capabilities:
203 elif 'side-band' in capabilities:
204 chunk_size = 995
204 chunk_size = 995
205 else:
205 else:
206 return
206 return
207
207
208 chunker = (
208 chunker = (
209 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
209 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
210
210
211 for chunk in chunker:
211 for chunk in chunker:
212 proto.write_sideband(2, chunk)
212 proto.write_sideband(2, chunk)
213
213
214 def _get_messages(self, data, capabilities):
214 def _get_messages(self, data, capabilities):
215 """Return a list with packets for sending data in sideband number 2."""
215 """Return a list with packets for sending data in sideband number 2."""
216 response = []
216 response = []
217 proto = dulwich.protocol.Protocol(None, response.append)
217 proto = dulwich.protocol.Protocol(None, response.append)
218
218
219 self._write_sideband_to_proto(data, proto, capabilities)
219 self._write_sideband_to_proto(data, proto, capabilities)
220
220
221 return response
221 return response
222
222
223 def _inject_messages_to_response(self, response, capabilities,
223 def _inject_messages_to_response(self, response, capabilities,
224 start_messages, end_messages):
224 start_messages, end_messages):
225 """
225 """
226 Given a list reponse we inject the pre/post-pull messages.
226 Given a list reponse we inject the pre/post-pull messages.
227
227
228 We only inject the messages if the client supports sideband, and the
228 We only inject the messages if the client supports sideband, and the
229 response has the format:
229 response has the format:
230 0008NAK\n...0000
230 0008NAK\n...0000
231
231
232 Note that we do not check the no-progress capability as by default, git
232 Note that we do not check the no-progress capability as by default, git
233 sends it, which effectively would block all messages.
233 sends it, which effectively would block all messages.
234 """
234 """
235 if not self.SIDE_BAND_CAPS.intersection(capabilities):
235 if not self.SIDE_BAND_CAPS.intersection(capabilities):
236 return response
236 return response
237
237
238 if (not response[0].startswith('0008NAK\n') or
238 if (not response[0].startswith('0008NAK\n') or
239 not response[-1].endswith('0000')):
239 not response[-1].endswith('0000')):
240 return response
240 return response
241
241
242 if not start_messages and not end_messages:
242 if not start_messages and not end_messages:
243 return response
243 return response
244
244
245 new_response = ['0008NAK\n']
245 new_response = ['0008NAK\n']
246 new_response.extend(self._get_messages(start_messages, capabilities))
246 new_response.extend(self._get_messages(start_messages, capabilities))
247 if len(response) == 1:
247 if len(response) == 1:
248 new_response.append(response[0][8:-4])
248 new_response.append(response[0][8:-4])
249 else:
249 else:
250 new_response.append(response[0][8:])
250 new_response.append(response[0][8:])
251 new_response.extend(response[1:-1])
251 new_response.extend(response[1:-1])
252 new_response.append(response[-1][:-4])
252 new_response.append(response[-1][:-4])
253 new_response.extend(self._get_messages(end_messages, capabilities))
253 new_response.extend(self._get_messages(end_messages, capabilities))
254 new_response.append('0000')
254 new_response.append('0000')
255
255
256 return new_response
256 return new_response
257
257
258 def backend(self, request, environ):
258 def backend(self, request, environ):
259 """
259 """
260 WSGI Response producer for HTTP POST Git Smart HTTP requests.
260 WSGI Response producer for HTTP POST Git Smart HTTP requests.
261 Reads commands and data from HTTP POST's body.
261 Reads commands and data from HTTP POST's body.
262 returns an iterator obj with contents of git command's
262 returns an iterator obj with contents of git command's
263 response to stdout
263 response to stdout
264 """
264 """
265 # TODO(skreft): think how we could detect an HTTPLockedException, as
265 # TODO(skreft): think how we could detect an HTTPLockedException, as
266 # we probably want to have the same mechanism used by mercurial and
266 # we probably want to have the same mechanism used by mercurial and
267 # simplevcs.
267 # simplevcs.
268 # For that we would need to parse the output of the command looking for
268 # For that we would need to parse the output of the command looking for
269 # some signs of the HTTPLockedError, parse the data and reraise it in
269 # some signs of the HTTPLockedError, parse the data and reraise it in
270 # pygrack. However, that would interfere with the streaming.
270 # pygrack. However, that would interfere with the streaming.
271 #
271 #
272 # Now the output of a blocked push is:
272 # Now the output of a blocked push is:
273 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
273 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
274 # POST git-receive-pack (1047 bytes)
274 # POST git-receive-pack (1047 bytes)
275 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
275 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
276 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
276 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
277 # ! [remote rejected] master -> master (pre-receive hook declined)
277 # ! [remote rejected] master -> master (pre-receive hook declined)
278 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
278 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
279
279
280 git_command = self._get_fixedpath(request.path_info)
280 git_command = self._get_fixedpath(request.path_info)
281 if git_command not in self.commands:
281 if git_command not in self.commands:
282 log.debug('command %s not allowed', git_command)
282 log.debug('command %s not allowed', git_command)
283 return exc.HTTPForbidden()
283 return exc.HTTPForbidden()
284
284
285 capabilities = None
285 capabilities = None
286 if git_command == 'git-upload-pack':
286 if git_command == 'git-upload-pack':
287 capabilities = self._get_want_capabilities(request)
287 capabilities = self._get_want_capabilities(request)
288
288
289 if 'CONTENT_LENGTH' in environ:
289 if 'CONTENT_LENGTH' in environ:
290 inputstream = FileWrapper(request.body_file_seekable,
290 inputstream = FileWrapper(request.body_file_seekable,
291 request.content_length)
291 request.content_length)
292 else:
292 else:
293 inputstream = request.body_file_seekable
293 inputstream = request.body_file_seekable
294
294
295 resp = Response()
295 resp = Response()
296 resp.content_type = ('application/x-%s-result' %
296 resp.content_type = ('application/x-%s-result' %
297 git_command.encode('utf8'))
297 git_command.encode('utf8'))
298 resp.charset = None
298 resp.charset = None
299
299
300 if git_command == 'git-upload-pack':
300 if git_command == 'git-upload-pack':
301 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
301 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
302 if status != 0:
302 if status != 0:
303 resp.app_iter = self._build_failed_pre_pull_response(
303 resp.app_iter = self._build_failed_pre_pull_response(
304 capabilities, pre_pull_messages)
304 capabilities, pre_pull_messages)
305 return resp
305 return resp
306
306
307 gitenv = dict(os.environ)
307 gitenv = dict(os.environ)
308 # forget all configs
308 # forget all configs
309 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
309 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
310 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
310 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
311 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
311 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
312 self.content_path]
312 self.content_path]
313 log.debug('handling cmd %s', cmd)
313 log.debug('handling cmd %s', cmd)
314
314
315 out = subprocessio.SubprocessIOChunker(
315 out = subprocessio.SubprocessIOChunker(
316 cmd,
316 cmd,
317 inputstream=inputstream,
317 inputstream=inputstream,
318 env=gitenv,
318 env=gitenv,
319 cwd=self.content_path,
319 cwd=self.content_path,
320 shell=False,
320 shell=False,
321 fail_on_stderr=False,
321 fail_on_stderr=False,
322 fail_on_return_code=False
322 fail_on_return_code=False
323 )
323 )
324
324
325 if self.update_server_info and git_command == 'git-receive-pack':
325 if self.update_server_info and git_command == 'git-receive-pack':
326 # We need to fully consume the iterator here, as the
326 # We need to fully consume the iterator here, as the
327 # update-server-info command needs to be run after the push.
327 # update-server-info command needs to be run after the push.
328 out = list(out)
328 out = list(out)
329
329
330 # Updating refs manually after each push.
330 # Updating refs manually after each push.
331 # This is required as some clients are exposing Git repos internally
331 # This is required as some clients are exposing Git repos internally
332 # with the dumb protocol.
332 # with the dumb protocol.
333 cmd = [self.git_path, 'update-server-info']
333 cmd = [self.git_path, 'update-server-info']
334 log.debug('handling cmd %s', cmd)
334 log.debug('handling cmd %s', cmd)
335 output = subprocessio.SubprocessIOChunker(
335 output = subprocessio.SubprocessIOChunker(
336 cmd,
336 cmd,
337 inputstream=inputstream,
337 inputstream=inputstream,
338 env=gitenv,
338 env=gitenv,
339 cwd=self.content_path,
339 cwd=self.content_path,
340 shell=False,
340 shell=False,
341 fail_on_stderr=False,
341 fail_on_stderr=False,
342 fail_on_return_code=False
342 fail_on_return_code=False
343 )
343 )
344 # Consume all the output so the subprocess finishes
344 # Consume all the output so the subprocess finishes
345 for _ in output:
345 for _ in output:
346 pass
346 pass
347
347
348 if git_command == 'git-upload-pack':
348 if git_command == 'git-upload-pack':
349 out = list(out)
349 out = list(out)
350 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
350 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
351 resp.app_iter = self._inject_messages_to_response(
351 resp.app_iter = self._inject_messages_to_response(
352 out, capabilities, pre_pull_messages, post_pull_messages)
352 out, capabilities, pre_pull_messages, post_pull_messages)
353 else:
353 else:
354 resp.app_iter = out
354 resp.app_iter = out
355
355
356 return resp
356 return resp
357
357
358 def __call__(self, environ, start_response):
358 def __call__(self, environ, start_response):
359 request = Request(environ)
359 request = Request(environ)
360 _path = self._get_fixedpath(request.path_info)
360 _path = self._get_fixedpath(request.path_info)
361 if _path.startswith('info/refs'):
361 if _path.startswith('info/refs'):
362 app = self.inforefs
362 app = self.inforefs
363 else:
363 else:
364 app = self.backend
364 app = self.backend
365
365
366 try:
366 try:
367 resp = app(request, environ)
367 resp = app(request, environ)
368 except exc.HTTPException as error:
368 except exc.HTTPException as error:
369 log.exception('HTTP Error')
369 log.exception('HTTP Error')
370 resp = error
370 resp = error
371 except Exception:
371 except Exception:
372 log.exception('Unknown error')
372 log.exception('Unknown error')
373 resp = exc.HTTPInternalServerError()
373 resp = exc.HTTPInternalServerError()
374
374
375 return resp(environ, start_response)
375 return resp(environ, start_response)
@@ -1,34 +1,34 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from vcsserver import scm_app, wsgi_app_caller
18 from vcsserver import scm_app, wsgi_app_caller
19
19
20
20
21 class GitRemoteWsgi(object):
21 class GitRemoteWsgi(object):
22 def handle(self, environ, input_data, *args, **kwargs):
22 def handle(self, environ, input_data, *args, **kwargs):
23 app = wsgi_app_caller.WSGIAppCaller(
23 app = wsgi_app_caller.WSGIAppCaller(
24 scm_app.create_git_wsgi_app(*args, **kwargs))
24 scm_app.create_git_wsgi_app(*args, **kwargs))
25
25
26 return app.handle(environ, input_data)
26 return app.handle(environ, input_data)
27
27
28
28
29 class HgRemoteWsgi(object):
29 class HgRemoteWsgi(object):
30 def handle(self, environ, input_data, *args, **kwargs):
30 def handle(self, environ, input_data, *args, **kwargs):
31 app = wsgi_app_caller.WSGIAppCaller(
31 app = wsgi_app_caller.WSGIAppCaller(
32 scm_app.create_hg_wsgi_app(*args, **kwargs))
32 scm_app.create_hg_wsgi_app(*args, **kwargs))
33
33
34 return app.handle(environ, input_data)
34 return app.handle(environ, input_data)
@@ -1,174 +1,174 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20
20
21 import mercurial
21 import mercurial
22 import mercurial.error
22 import mercurial.error
23 import mercurial.hgweb.common
23 import mercurial.hgweb.common
24 import mercurial.hgweb.hgweb_mod
24 import mercurial.hgweb.hgweb_mod
25 import mercurial.hgweb.protocol
25 import mercurial.hgweb.protocol
26 import webob.exc
26 import webob.exc
27
27
28 from vcsserver import pygrack, exceptions, settings
28 from vcsserver import pygrack, exceptions, settings
29
29
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 # propagated from mercurial documentation
34 # propagated from mercurial documentation
35 HG_UI_SECTIONS = [
35 HG_UI_SECTIONS = [
36 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
36 'alias', 'auth', 'decode/encode', 'defaults', 'diff', 'email', 'extensions',
37 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
37 'format', 'merge-patterns', 'merge-tools', 'hooks', 'http_proxy', 'smtp',
38 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
38 'patch', 'paths', 'profiling', 'server', 'trusted', 'ui', 'web',
39 ]
39 ]
40
40
41
41
42 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
42 class HgWeb(mercurial.hgweb.hgweb_mod.hgweb):
43 """Extension of hgweb that simplifies some functions."""
43 """Extension of hgweb that simplifies some functions."""
44
44
45 def _get_view(self, repo):
45 def _get_view(self, repo):
46 """Views are not supported."""
46 """Views are not supported."""
47 return repo
47 return repo
48
48
49 def loadsubweb(self):
49 def loadsubweb(self):
50 """The result is only used in the templater method which is not used."""
50 """The result is only used in the templater method which is not used."""
51 return None
51 return None
52
52
53 def run(self):
53 def run(self):
54 """Unused function so raise an exception if accidentally called."""
54 """Unused function so raise an exception if accidentally called."""
55 raise NotImplementedError
55 raise NotImplementedError
56
56
57 def templater(self, req):
57 def templater(self, req):
58 """Function used in an unreachable code path.
58 """Function used in an unreachable code path.
59
59
60 This code is unreachable because we guarantee that the HTTP request,
60 This code is unreachable because we guarantee that the HTTP request,
61 corresponds to a Mercurial command. See the is_hg method. So, we are
61 corresponds to a Mercurial command. See the is_hg method. So, we are
62 never going to get a user-visible url.
62 never going to get a user-visible url.
63 """
63 """
64 raise NotImplementedError
64 raise NotImplementedError
65
65
66 def archivelist(self, nodeid):
66 def archivelist(self, nodeid):
67 """Unused function so raise an exception if accidentally called."""
67 """Unused function so raise an exception if accidentally called."""
68 raise NotImplementedError
68 raise NotImplementedError
69
69
70 def run_wsgi(self, req):
70 def run_wsgi(self, req):
71 """Check the request has a valid command, failing fast otherwise."""
71 """Check the request has a valid command, failing fast otherwise."""
72 cmd = req.form.get('cmd', [''])[0]
72 cmd = req.form.get('cmd', [''])[0]
73 if not mercurial.hgweb.protocol.iscmd(cmd):
73 if not mercurial.hgweb.protocol.iscmd(cmd):
74 req.respond(
74 req.respond(
75 mercurial.hgweb.common.ErrorResponse(
75 mercurial.hgweb.common.ErrorResponse(
76 mercurial.hgweb.common.HTTP_BAD_REQUEST),
76 mercurial.hgweb.common.HTTP_BAD_REQUEST),
77 mercurial.hgweb.protocol.HGTYPE
77 mercurial.hgweb.protocol.HGTYPE
78 )
78 )
79 return ['']
79 return ['']
80
80
81 return super(HgWeb, self).run_wsgi(req)
81 return super(HgWeb, self).run_wsgi(req)
82
82
83
83
84 def make_hg_ui_from_config(repo_config):
84 def make_hg_ui_from_config(repo_config):
85 baseui = mercurial.ui.ui()
85 baseui = mercurial.ui.ui()
86
86
87 # clean the baseui object
87 # clean the baseui object
88 baseui._ocfg = mercurial.config.config()
88 baseui._ocfg = mercurial.config.config()
89 baseui._ucfg = mercurial.config.config()
89 baseui._ucfg = mercurial.config.config()
90 baseui._tcfg = mercurial.config.config()
90 baseui._tcfg = mercurial.config.config()
91
91
92 for section, option, value in repo_config:
92 for section, option, value in repo_config:
93 baseui.setconfig(section, option, value)
93 baseui.setconfig(section, option, value)
94
94
95 # make our hgweb quiet so it doesn't print output
95 # make our hgweb quiet so it doesn't print output
96 baseui.setconfig('ui', 'quiet', 'true')
96 baseui.setconfig('ui', 'quiet', 'true')
97
97
98 return baseui
98 return baseui
99
99
100
100
101 def update_hg_ui_from_hgrc(baseui, repo_path):
101 def update_hg_ui_from_hgrc(baseui, repo_path):
102 path = os.path.join(repo_path, '.hg', 'hgrc')
102 path = os.path.join(repo_path, '.hg', 'hgrc')
103
103
104 if not os.path.isfile(path):
104 if not os.path.isfile(path):
105 log.debug('hgrc file is not present at %s, skipping...', path)
105 log.debug('hgrc file is not present at %s, skipping...', path)
106 return
106 return
107 log.debug('reading hgrc from %s', path)
107 log.debug('reading hgrc from %s', path)
108 cfg = mercurial.config.config()
108 cfg = mercurial.config.config()
109 cfg.read(path)
109 cfg.read(path)
110 for section in HG_UI_SECTIONS:
110 for section in HG_UI_SECTIONS:
111 for k, v in cfg.items(section):
111 for k, v in cfg.items(section):
112 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
112 log.debug('settings ui from file: [%s] %s=%s', section, k, v)
113 baseui.setconfig(section, k, v)
113 baseui.setconfig(section, k, v)
114
114
115
115
116 def create_hg_wsgi_app(repo_path, repo_name, config):
116 def create_hg_wsgi_app(repo_path, repo_name, config):
117 """
117 """
118 Prepares a WSGI application to handle Mercurial requests.
118 Prepares a WSGI application to handle Mercurial requests.
119
119
120 :param config: is a list of 3-item tuples representing a ConfigObject
120 :param config: is a list of 3-item tuples representing a ConfigObject
121 (it is the serialized version of the config object).
121 (it is the serialized version of the config object).
122 """
122 """
123 log.debug("Creating Mercurial WSGI application")
123 log.debug("Creating Mercurial WSGI application")
124
124
125 baseui = make_hg_ui_from_config(config)
125 baseui = make_hg_ui_from_config(config)
126 update_hg_ui_from_hgrc(baseui, repo_path)
126 update_hg_ui_from_hgrc(baseui, repo_path)
127
127
128 try:
128 try:
129 return HgWeb(repo_path, name=repo_name, baseui=baseui)
129 return HgWeb(repo_path, name=repo_name, baseui=baseui)
130 except mercurial.error.RequirementError as exc:
130 except mercurial.error.RequirementError as exc:
131 raise exceptions.RequirementException(exc)
131 raise exceptions.RequirementException(exc)
132
132
133
133
134 class GitHandler(object):
134 class GitHandler(object):
135 def __init__(self, repo_location, repo_name, git_path, update_server_info,
135 def __init__(self, repo_location, repo_name, git_path, update_server_info,
136 extras):
136 extras):
137 if not os.path.isdir(repo_location):
137 if not os.path.isdir(repo_location):
138 raise OSError(repo_location)
138 raise OSError(repo_location)
139 self.content_path = repo_location
139 self.content_path = repo_location
140 self.repo_name = repo_name
140 self.repo_name = repo_name
141 self.repo_location = repo_location
141 self.repo_location = repo_location
142 self.extras = extras
142 self.extras = extras
143 self.git_path = git_path
143 self.git_path = git_path
144 self.update_server_info = update_server_info
144 self.update_server_info = update_server_info
145
145
146 def __call__(self, environ, start_response):
146 def __call__(self, environ, start_response):
147 app = webob.exc.HTTPNotFound()
147 app = webob.exc.HTTPNotFound()
148 candidate_paths = (
148 candidate_paths = (
149 self.content_path, os.path.join(self.content_path, '.git'))
149 self.content_path, os.path.join(self.content_path, '.git'))
150
150
151 for content_path in candidate_paths:
151 for content_path in candidate_paths:
152 try:
152 try:
153 app = pygrack.GitRepository(
153 app = pygrack.GitRepository(
154 self.repo_name, content_path, self.git_path,
154 self.repo_name, content_path, self.git_path,
155 self.update_server_info, self.extras)
155 self.update_server_info, self.extras)
156 break
156 break
157 except OSError:
157 except OSError:
158 continue
158 continue
159
159
160 return app(environ, start_response)
160 return app(environ, start_response)
161
161
162
162
163 def create_git_wsgi_app(repo_path, repo_name, config):
163 def create_git_wsgi_app(repo_path, repo_name, config):
164 """
164 """
165 Creates a WSGI application to handle Git requests.
165 Creates a WSGI application to handle Git requests.
166
166
167 :param config: is a dictionary holding the extras.
167 :param config: is a dictionary holding the extras.
168 """
168 """
169 git_path = settings.GIT_EXECUTABLE
169 git_path = settings.GIT_EXECUTABLE
170 update_server_info = config.pop('git_update_server_info')
170 update_server_info = config.pop('git_update_server_info')
171 app = GitHandler(
171 app = GitHandler(
172 repo_path, repo_name, git_path, update_server_info, config)
172 repo_path, repo_name, git_path, update_server_info, config)
173
173
174 return app
174 return app
@@ -1,78 +1,78 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import gc
18 import gc
19 import logging
19 import logging
20 import os
20 import os
21 import time
21 import time
22
22
23
23
24 log = logging.getLogger(__name__)
24 log = logging.getLogger(__name__)
25
25
26
26
27 class VcsServer(object):
27 class VcsServer(object):
28 """
28 """
29 Exposed remote interface of the vcsserver itself.
29 Exposed remote interface of the vcsserver itself.
30
30
31 This object can be used to manage the server remotely. Right now the main
31 This object can be used to manage the server remotely. Right now the main
32 use case is to allow to shut down the server.
32 use case is to allow to shut down the server.
33 """
33 """
34
34
35 _shutdown = False
35 _shutdown = False
36
36
37 def shutdown(self):
37 def shutdown(self):
38 self._shutdown = True
38 self._shutdown = True
39
39
40 def ping(self):
40 def ping(self):
41 """
41 """
42 Utility to probe a server connection.
42 Utility to probe a server connection.
43 """
43 """
44 log.debug("Received server ping.")
44 log.debug("Received server ping.")
45
45
46 def echo(self, data):
46 def echo(self, data):
47 """
47 """
48 Utility for performance testing.
48 Utility for performance testing.
49
49
50 Allows to pass in arbitrary data and will return this data.
50 Allows to pass in arbitrary data and will return this data.
51 """
51 """
52 log.debug("Received server echo.")
52 log.debug("Received server echo.")
53 return data
53 return data
54
54
55 def sleep(self, seconds):
55 def sleep(self, seconds):
56 """
56 """
57 Utility to simulate long running server interaction.
57 Utility to simulate long running server interaction.
58 """
58 """
59 log.debug("Sleeping %s seconds", seconds)
59 log.debug("Sleeping %s seconds", seconds)
60 time.sleep(seconds)
60 time.sleep(seconds)
61
61
62 def get_pid(self):
62 def get_pid(self):
63 """
63 """
64 Allows to discover the PID based on a proxy object.
64 Allows to discover the PID based on a proxy object.
65 """
65 """
66 return os.getpid()
66 return os.getpid()
67
67
68 def run_gc(self):
68 def run_gc(self):
69 """
69 """
70 Allows to trigger the garbage collector.
70 Allows to trigger the garbage collector.
71
71
72 Main intention is to support statistics gathering during test runs.
72 Main intention is to support statistics gathering during test runs.
73 """
73 """
74 freed_objects = gc.collect()
74 freed_objects = gc.collect()
75 return {
75 return {
76 'freed_objects': freed_objects,
76 'freed_objects': freed_objects,
77 'garbage': len(gc.garbage),
77 'garbage': len(gc.garbage),
78 }
78 }
@@ -1,30 +1,30 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19 PYRO_PORT = 9900
19 PYRO_PORT = 9900
20
20
21 PYRO_GIT = 'git_remote'
21 PYRO_GIT = 'git_remote'
22 PYRO_HG = 'hg_remote'
22 PYRO_HG = 'hg_remote'
23 PYRO_SVN = 'svn_remote'
23 PYRO_SVN = 'svn_remote'
24 PYRO_VCSSERVER = 'vcs_server'
24 PYRO_VCSSERVER = 'vcs_server'
25 PYRO_GIT_REMOTE_WSGI = 'git_remote_wsgi'
25 PYRO_GIT_REMOTE_WSGI = 'git_remote_wsgi'
26 PYRO_HG_REMOTE_WSGI = 'hg_remote_wsgi'
26 PYRO_HG_REMOTE_WSGI = 'hg_remote_wsgi'
27
27
28 WIRE_ENCODING = 'UTF-8'
28 WIRE_ENCODING = 'UTF-8'
29
29
30 GIT_EXECUTABLE = 'git'
30 GIT_EXECUTABLE = 'git'
@@ -1,627 +1,651 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 from urllib2 import URLError
20 from urllib2 import URLError
21 import logging
21 import logging
22 import posixpath as vcspath
22 import posixpath as vcspath
23 import StringIO
23 import StringIO
24 import subprocess
24 import subprocess
25 import urllib
25 import urllib
26
26
27 import svn.client
27 import svn.client
28 import svn.core
28 import svn.core
29 import svn.delta
29 import svn.delta
30 import svn.diff
30 import svn.diff
31 import svn.fs
31 import svn.fs
32 import svn.repos
32 import svn.repos
33
33
34 from vcsserver import svn_diff
34 from vcsserver import svn_diff
35 from vcsserver import exceptions
35 from vcsserver import exceptions
36 from vcsserver.base import RepoFactory
36 from vcsserver.base import RepoFactory
37
37
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 # Set of svn compatible version flags.
42 # Set of svn compatible version flags.
43 # Compare with subversion/svnadmin/svnadmin.c
43 # Compare with subversion/svnadmin/svnadmin.c
44 svn_compatible_versions = set([
44 svn_compatible_versions = set([
45 'pre-1.4-compatible',
45 'pre-1.4-compatible',
46 'pre-1.5-compatible',
46 'pre-1.5-compatible',
47 'pre-1.6-compatible',
47 'pre-1.6-compatible',
48 'pre-1.8-compatible',
48 'pre-1.8-compatible',
49 ])
49 ])
50
50
51
51
52 def reraise_safe_exceptions(func):
52 def reraise_safe_exceptions(func):
53 """Decorator for converting svn exceptions to something neutral."""
53 """Decorator for converting svn exceptions to something neutral."""
54 def wrapper(*args, **kwargs):
54 def wrapper(*args, **kwargs):
55 try:
55 try:
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except Exception as e:
57 except Exception as e:
58 if not hasattr(e, '_vcs_kind'):
58 if not hasattr(e, '_vcs_kind'):
59 log.exception("Unhandled exception in hg remote call")
59 log.exception("Unhandled exception in hg remote call")
60 raise_from_original(exceptions.UnhandledException)
60 raise_from_original(exceptions.UnhandledException)
61 raise
61 raise
62 return wrapper
62 return wrapper
63
63
64
64
65 def raise_from_original(new_type):
65 def raise_from_original(new_type):
66 """
66 """
67 Raise a new exception type with original args and traceback.
67 Raise a new exception type with original args and traceback.
68 """
68 """
69 _, original, traceback = sys.exc_info()
69 _, original, traceback = sys.exc_info()
70 try:
70 try:
71 raise new_type(*original.args), None, traceback
71 raise new_type(*original.args), None, traceback
72 finally:
72 finally:
73 del traceback
73 del traceback
74
74
75
75
76 class SubversionFactory(RepoFactory):
76 class SubversionFactory(RepoFactory):
77
77
78 def _create_repo(self, wire, create, compatible_version):
78 def _create_repo(self, wire, create, compatible_version):
79 path = svn.core.svn_path_canonicalize(wire['path'])
79 path = svn.core.svn_path_canonicalize(wire['path'])
80 if create:
80 if create:
81 fs_config = {}
81 fs_config = {}
82 if compatible_version:
82 if compatible_version:
83 if compatible_version not in svn_compatible_versions:
83 if compatible_version not in svn_compatible_versions:
84 raise Exception('Unknown SVN compatible version "{}"'
84 raise Exception('Unknown SVN compatible version "{}"'
85 .format(compatible_version))
85 .format(compatible_version))
86 log.debug('Create SVN repo with compatible version "%s"',
86 log.debug('Create SVN repo with compatible version "%s"',
87 compatible_version)
87 compatible_version)
88 fs_config[compatible_version] = '1'
88 fs_config[compatible_version] = '1'
89 repo = svn.repos.create(path, "", "", None, fs_config)
89 repo = svn.repos.create(path, "", "", None, fs_config)
90 else:
90 else:
91 repo = svn.repos.open(path)
91 repo = svn.repos.open(path)
92 return repo
92 return repo
93
93
94 def repo(self, wire, create=False, compatible_version=None):
94 def repo(self, wire, create=False, compatible_version=None):
95 def create_new_repo():
95 def create_new_repo():
96 return self._create_repo(wire, create, compatible_version)
96 return self._create_repo(wire, create, compatible_version)
97
97
98 return self._repo(wire, create_new_repo)
98 return self._repo(wire, create_new_repo)
99
99
100
100
101
101
102 NODE_TYPE_MAPPING = {
102 NODE_TYPE_MAPPING = {
103 svn.core.svn_node_file: 'file',
103 svn.core.svn_node_file: 'file',
104 svn.core.svn_node_dir: 'dir',
104 svn.core.svn_node_dir: 'dir',
105 }
105 }
106
106
107
107
108 class SvnRemote(object):
108 class SvnRemote(object):
109
109
110 def __init__(self, factory, hg_factory=None):
110 def __init__(self, factory, hg_factory=None):
111 self._factory = factory
111 self._factory = factory
112 # TODO: Remove once we do not use internal Mercurial objects anymore
112 # TODO: Remove once we do not use internal Mercurial objects anymore
113 # for subversion
113 # for subversion
114 self._hg_factory = hg_factory
114 self._hg_factory = hg_factory
115
115
116 @reraise_safe_exceptions
116 @reraise_safe_exceptions
117 def discover_svn_version(self):
117 def discover_svn_version(self):
118 try:
118 try:
119 import svn.core
119 import svn.core
120 svn_ver = svn.core.SVN_VERSION
120 svn_ver = svn.core.SVN_VERSION
121 except ImportError:
121 except ImportError:
122 svn_ver = None
122 svn_ver = None
123 return svn_ver
123 return svn_ver
124
124
125 def check_url(self, url, config_items):
125 def check_url(self, url, config_items):
126 # this can throw exception if not installed, but we detect this
126 # this can throw exception if not installed, but we detect this
127 from hgsubversion import svnrepo
127 from hgsubversion import svnrepo
128
128
129 baseui = self._hg_factory._create_config(config_items)
129 baseui = self._hg_factory._create_config(config_items)
130 # uuid function get's only valid UUID from proper repo, else
130 # uuid function get's only valid UUID from proper repo, else
131 # throws exception
131 # throws exception
132 try:
132 try:
133 svnrepo.svnremoterepo(baseui, url).svn.uuid
133 svnrepo.svnremoterepo(baseui, url).svn.uuid
134 except:
134 except:
135 log.debug("Invalid svn url: %s", url)
135 log.debug("Invalid svn url: %s", url)
136 raise URLError(
136 raise URLError(
137 '"%s" is not a valid Subversion source url.' % (url, ))
137 '"%s" is not a valid Subversion source url.' % (url, ))
138 return True
138 return True
139
139
140 def is_path_valid_repository(self, wire, path):
140 def is_path_valid_repository(self, wire, path):
141 try:
141 try:
142 svn.repos.open(path)
142 svn.repos.open(path)
143 except svn.core.SubversionException:
143 except svn.core.SubversionException:
144 log.debug("Invalid Subversion path %s", path)
144 log.debug("Invalid Subversion path %s", path)
145 return False
145 return False
146 return True
146 return True
147
147
148 def lookup(self, wire, revision):
148 def lookup(self, wire, revision):
149 if revision not in [-1, None, 'HEAD']:
149 if revision not in [-1, None, 'HEAD']:
150 raise NotImplementedError
150 raise NotImplementedError
151 repo = self._factory.repo(wire)
151 repo = self._factory.repo(wire)
152 fs_ptr = svn.repos.fs(repo)
152 fs_ptr = svn.repos.fs(repo)
153 head = svn.fs.youngest_rev(fs_ptr)
153 head = svn.fs.youngest_rev(fs_ptr)
154 return head
154 return head
155
155
156 def lookup_interval(self, wire, start_ts, end_ts):
156 def lookup_interval(self, wire, start_ts, end_ts):
157 repo = self._factory.repo(wire)
157 repo = self._factory.repo(wire)
158 fsobj = svn.repos.fs(repo)
158 fsobj = svn.repos.fs(repo)
159 start_rev = None
159 start_rev = None
160 end_rev = None
160 end_rev = None
161 if start_ts:
161 if start_ts:
162 start_ts_svn = apr_time_t(start_ts)
162 start_ts_svn = apr_time_t(start_ts)
163 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
163 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
164 else:
164 else:
165 start_rev = 1
165 start_rev = 1
166 if end_ts:
166 if end_ts:
167 end_ts_svn = apr_time_t(end_ts)
167 end_ts_svn = apr_time_t(end_ts)
168 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
168 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
169 else:
169 else:
170 end_rev = svn.fs.youngest_rev(fsobj)
170 end_rev = svn.fs.youngest_rev(fsobj)
171 return start_rev, end_rev
171 return start_rev, end_rev
172
172
173 def revision_properties(self, wire, revision):
173 def revision_properties(self, wire, revision):
174 repo = self._factory.repo(wire)
174 repo = self._factory.repo(wire)
175 fs_ptr = svn.repos.fs(repo)
175 fs_ptr = svn.repos.fs(repo)
176 return svn.fs.revision_proplist(fs_ptr, revision)
176 return svn.fs.revision_proplist(fs_ptr, revision)
177
177
178 def revision_changes(self, wire, revision):
178 def revision_changes(self, wire, revision):
179
179
180 repo = self._factory.repo(wire)
180 repo = self._factory.repo(wire)
181 fsobj = svn.repos.fs(repo)
181 fsobj = svn.repos.fs(repo)
182 rev_root = svn.fs.revision_root(fsobj, revision)
182 rev_root = svn.fs.revision_root(fsobj, revision)
183
183
184 editor = svn.repos.ChangeCollector(fsobj, rev_root)
184 editor = svn.repos.ChangeCollector(fsobj, rev_root)
185 editor_ptr, editor_baton = svn.delta.make_editor(editor)
185 editor_ptr, editor_baton = svn.delta.make_editor(editor)
186 base_dir = ""
186 base_dir = ""
187 send_deltas = False
187 send_deltas = False
188 svn.repos.replay2(
188 svn.repos.replay2(
189 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
189 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
190 editor_ptr, editor_baton, None)
190 editor_ptr, editor_baton, None)
191
191
192 added = []
192 added = []
193 changed = []
193 changed = []
194 removed = []
194 removed = []
195
195
196 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
196 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
197 for path, change in editor.changes.iteritems():
197 for path, change in editor.changes.iteritems():
198 # TODO: Decide what to do with directory nodes. Subversion can add
198 # TODO: Decide what to do with directory nodes. Subversion can add
199 # empty directories.
199 # empty directories.
200
200
201 if change.item_kind == svn.core.svn_node_dir:
201 if change.item_kind == svn.core.svn_node_dir:
202 continue
202 continue
203 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
203 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
204 added.append(path)
204 added.append(path)
205 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
205 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
206 svn.repos.CHANGE_ACTION_REPLACE]:
206 svn.repos.CHANGE_ACTION_REPLACE]:
207 changed.append(path)
207 changed.append(path)
208 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
208 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
209 removed.append(path)
209 removed.append(path)
210 else:
210 else:
211 raise NotImplementedError(
211 raise NotImplementedError(
212 "Action %s not supported on path %s" % (
212 "Action %s not supported on path %s" % (
213 change.action, path))
213 change.action, path))
214
214
215 changes = {
215 changes = {
216 'added': added,
216 'added': added,
217 'changed': changed,
217 'changed': changed,
218 'removed': removed,
218 'removed': removed,
219 }
219 }
220 return changes
220 return changes
221
221
222 def node_history(self, wire, path, revision, limit):
222 def node_history(self, wire, path, revision, limit):
223 cross_copies = False
223 cross_copies = False
224 repo = self._factory.repo(wire)
224 repo = self._factory.repo(wire)
225 fsobj = svn.repos.fs(repo)
225 fsobj = svn.repos.fs(repo)
226 rev_root = svn.fs.revision_root(fsobj, revision)
226 rev_root = svn.fs.revision_root(fsobj, revision)
227
227
228 history_revisions = []
228 history_revisions = []
229 history = svn.fs.node_history(rev_root, path)
229 history = svn.fs.node_history(rev_root, path)
230 history = svn.fs.history_prev(history, cross_copies)
230 history = svn.fs.history_prev(history, cross_copies)
231 while history:
231 while history:
232 __, node_revision = svn.fs.history_location(history)
232 __, node_revision = svn.fs.history_location(history)
233 history_revisions.append(node_revision)
233 history_revisions.append(node_revision)
234 if limit and len(history_revisions) >= limit:
234 if limit and len(history_revisions) >= limit:
235 break
235 break
236 history = svn.fs.history_prev(history, cross_copies)
236 history = svn.fs.history_prev(history, cross_copies)
237 return history_revisions
237 return history_revisions
238
238
239 def node_properties(self, wire, path, revision):
239 def node_properties(self, wire, path, revision):
240 repo = self._factory.repo(wire)
240 repo = self._factory.repo(wire)
241 fsobj = svn.repos.fs(repo)
241 fsobj = svn.repos.fs(repo)
242 rev_root = svn.fs.revision_root(fsobj, revision)
242 rev_root = svn.fs.revision_root(fsobj, revision)
243 return svn.fs.node_proplist(rev_root, path)
243 return svn.fs.node_proplist(rev_root, path)
244
244
245 def file_annotate(self, wire, path, revision):
245 def file_annotate(self, wire, path, revision):
246 abs_path = 'file://' + urllib.pathname2url(
246 abs_path = 'file://' + urllib.pathname2url(
247 vcspath.join(wire['path'], path))
247 vcspath.join(wire['path'], path))
248 file_uri = svn.core.svn_path_canonicalize(abs_path)
248 file_uri = svn.core.svn_path_canonicalize(abs_path)
249
249
250 start_rev = svn_opt_revision_value_t(0)
250 start_rev = svn_opt_revision_value_t(0)
251 peg_rev = svn_opt_revision_value_t(revision)
251 peg_rev = svn_opt_revision_value_t(revision)
252 end_rev = peg_rev
252 end_rev = peg_rev
253
253
254 annotations = []
254 annotations = []
255
255
256 def receiver(line_no, revision, author, date, line, pool):
256 def receiver(line_no, revision, author, date, line, pool):
257 annotations.append((line_no, revision, line))
257 annotations.append((line_no, revision, line))
258
258
259 # TODO: Cannot use blame5, missing typemap function in the swig code
259 # TODO: Cannot use blame5, missing typemap function in the swig code
260 try:
260 try:
261 svn.client.blame2(
261 svn.client.blame2(
262 file_uri, peg_rev, start_rev, end_rev,
262 file_uri, peg_rev, start_rev, end_rev,
263 receiver, svn.client.create_context())
263 receiver, svn.client.create_context())
264 except svn.core.SubversionException as exc:
264 except svn.core.SubversionException as exc:
265 log.exception("Error during blame operation.")
265 log.exception("Error during blame operation.")
266 raise Exception(
266 raise Exception(
267 "Blame not supported or file does not exist at path %s. "
267 "Blame not supported or file does not exist at path %s. "
268 "Error %s." % (path, exc))
268 "Error %s." % (path, exc))
269
269
270 return annotations
270 return annotations
271
271
272 def get_node_type(self, wire, path, rev=None):
272 def get_node_type(self, wire, path, rev=None):
273 repo = self._factory.repo(wire)
273 repo = self._factory.repo(wire)
274 fs_ptr = svn.repos.fs(repo)
274 fs_ptr = svn.repos.fs(repo)
275 if rev is None:
275 if rev is None:
276 rev = svn.fs.youngest_rev(fs_ptr)
276 rev = svn.fs.youngest_rev(fs_ptr)
277 root = svn.fs.revision_root(fs_ptr, rev)
277 root = svn.fs.revision_root(fs_ptr, rev)
278 node = svn.fs.check_path(root, path)
278 node = svn.fs.check_path(root, path)
279 return NODE_TYPE_MAPPING.get(node, None)
279 return NODE_TYPE_MAPPING.get(node, None)
280
280
281 def get_nodes(self, wire, path, revision=None):
281 def get_nodes(self, wire, path, revision=None):
282 repo = self._factory.repo(wire)
282 repo = self._factory.repo(wire)
283 fsobj = svn.repos.fs(repo)
283 fsobj = svn.repos.fs(repo)
284 if revision is None:
284 if revision is None:
285 revision = svn.fs.youngest_rev(fsobj)
285 revision = svn.fs.youngest_rev(fsobj)
286 root = svn.fs.revision_root(fsobj, revision)
286 root = svn.fs.revision_root(fsobj, revision)
287 entries = svn.fs.dir_entries(root, path)
287 entries = svn.fs.dir_entries(root, path)
288 result = []
288 result = []
289 for entry_path, entry_info in entries.iteritems():
289 for entry_path, entry_info in entries.iteritems():
290 result.append(
290 result.append(
291 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
291 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
292 return result
292 return result
293
293
294 def get_file_content(self, wire, path, rev=None):
294 def get_file_content(self, wire, path, rev=None):
295 repo = self._factory.repo(wire)
295 repo = self._factory.repo(wire)
296 fsobj = svn.repos.fs(repo)
296 fsobj = svn.repos.fs(repo)
297 if rev is None:
297 if rev is None:
298 rev = svn.fs.youngest_revision(fsobj)
298 rev = svn.fs.youngest_revision(fsobj)
299 root = svn.fs.revision_root(fsobj, rev)
299 root = svn.fs.revision_root(fsobj, rev)
300 content = svn.core.Stream(svn.fs.file_contents(root, path))
300 content = svn.core.Stream(svn.fs.file_contents(root, path))
301 return content.read()
301 return content.read()
302
302
303 def get_file_size(self, wire, path, revision=None):
303 def get_file_size(self, wire, path, revision=None):
304 repo = self._factory.repo(wire)
304 repo = self._factory.repo(wire)
305 fsobj = svn.repos.fs(repo)
305 fsobj = svn.repos.fs(repo)
306 if revision is None:
306 if revision is None:
307 revision = svn.fs.youngest_revision(fsobj)
307 revision = svn.fs.youngest_revision(fsobj)
308 root = svn.fs.revision_root(fsobj, revision)
308 root = svn.fs.revision_root(fsobj, revision)
309 size = svn.fs.file_length(root, path)
309 size = svn.fs.file_length(root, path)
310 return size
310 return size
311
311
312 def create_repository(self, wire, compatible_version=None):
312 def create_repository(self, wire, compatible_version=None):
313 log.info('Creating Subversion repository in path "%s"', wire['path'])
313 log.info('Creating Subversion repository in path "%s"', wire['path'])
314 self._factory.repo(wire, create=True,
314 self._factory.repo(wire, create=True,
315 compatible_version=compatible_version)
315 compatible_version=compatible_version)
316
316
317 def import_remote_repository(self, wire, src_url):
317 def import_remote_repository(self, wire, src_url):
318 repo_path = wire['path']
318 repo_path = wire['path']
319 if not self.is_path_valid_repository(wire, repo_path):
319 if not self.is_path_valid_repository(wire, repo_path):
320 raise Exception(
320 raise Exception(
321 "Path %s is not a valid Subversion repository." % repo_path)
321 "Path %s is not a valid Subversion repository." % repo_path)
322 # TODO: johbo: URL checks ?
322 # TODO: johbo: URL checks ?
323 rdump = subprocess.Popen(
323 rdump = subprocess.Popen(
324 ['svnrdump', 'dump', '--non-interactive', src_url],
324 ['svnrdump', 'dump', '--non-interactive', src_url],
325 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
325 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
326 load = subprocess.Popen(
326 load = subprocess.Popen(
327 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
327 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
328
328
329 # TODO: johbo: This can be a very long operation, might be better
329 # TODO: johbo: This can be a very long operation, might be better
330 # to track some kind of status and provide an api to check if the
330 # to track some kind of status and provide an api to check if the
331 # import is done.
331 # import is done.
332 rdump.wait()
332 rdump.wait()
333 load.wait()
333 load.wait()
334
334
335 if rdump.returncode != 0:
335 if rdump.returncode != 0:
336 errors = rdump.stderr.read()
336 errors = rdump.stderr.read()
337 log.error('svnrdump dump failed: statuscode %s: message: %s',
337 log.error('svnrdump dump failed: statuscode %s: message: %s',
338 rdump.returncode, errors)
338 rdump.returncode, errors)
339 reason = 'UNKNOWN'
339 reason = 'UNKNOWN'
340 if 'svnrdump: E230001:' in errors:
340 if 'svnrdump: E230001:' in errors:
341 reason = 'INVALID_CERTIFICATE'
341 reason = 'INVALID_CERTIFICATE'
342 raise Exception(
342 raise Exception(
343 'Failed to dump the remote repository from %s.' % src_url,
343 'Failed to dump the remote repository from %s.' % src_url,
344 reason)
344 reason)
345 if load.returncode != 0:
345 if load.returncode != 0:
346 raise Exception(
346 raise Exception(
347 'Failed to load the dump of remote repository from %s.' %
347 'Failed to load the dump of remote repository from %s.' %
348 (src_url, ))
348 (src_url, ))
349
349
350 def commit(self, wire, message, author, timestamp, updated, removed):
350 def commit(self, wire, message, author, timestamp, updated, removed):
351 assert isinstance(message, str)
351 assert isinstance(message, str)
352 assert isinstance(author, str)
352 assert isinstance(author, str)
353
353
354 repo = self._factory.repo(wire)
354 repo = self._factory.repo(wire)
355 fsobj = svn.repos.fs(repo)
355 fsobj = svn.repos.fs(repo)
356
356
357 rev = svn.fs.youngest_rev(fsobj)
357 rev = svn.fs.youngest_rev(fsobj)
358 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
358 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
359 txn_root = svn.fs.txn_root(txn)
359 txn_root = svn.fs.txn_root(txn)
360
360
361 for node in updated:
361 for node in updated:
362 TxnNodeProcessor(node, txn_root).update()
362 TxnNodeProcessor(node, txn_root).update()
363 for node in removed:
363 for node in removed:
364 TxnNodeProcessor(node, txn_root).remove()
364 TxnNodeProcessor(node, txn_root).remove()
365
365
366 commit_id = svn.repos.fs_commit_txn(repo, txn)
366 commit_id = svn.repos.fs_commit_txn(repo, txn)
367
367
368 if timestamp:
368 if timestamp:
369 apr_time = apr_time_t(timestamp)
369 apr_time = apr_time_t(timestamp)
370 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
370 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
371 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
371 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
372
372
373 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
373 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
374 return commit_id
374 return commit_id
375
375
376 def diff(self, wire, rev1, rev2, path1=None, path2=None,
376 def diff(self, wire, rev1, rev2, path1=None, path2=None,
377 ignore_whitespace=False, context=3):
377 ignore_whitespace=False, context=3):
378
378 wire.update(cache=False)
379 wire.update(cache=False)
379 repo = self._factory.repo(wire)
380 repo = self._factory.repo(wire)
380 diff_creator = SvnDiffer(
381 diff_creator = SvnDiffer(
381 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
382 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
383 try:
382 return diff_creator.generate_diff()
384 return diff_creator.generate_diff()
385 except svn.core.SubversionException as e:
386 log.exception(
387 "Error during diff operation operation. "
388 "Path might not exist %s, %s" % (path1, path2))
389 return ""
383
390
384
391
385 class SvnDiffer(object):
392 class SvnDiffer(object):
386 """
393 """
387 Utility to create diffs based on difflib and the Subversion api
394 Utility to create diffs based on difflib and the Subversion api
388 """
395 """
389
396
390 binary_content = False
397 binary_content = False
391
398
392 def __init__(
399 def __init__(
393 self, repo, src_rev, src_path, tgt_rev, tgt_path,
400 self, repo, src_rev, src_path, tgt_rev, tgt_path,
394 ignore_whitespace, context):
401 ignore_whitespace, context):
395 self.repo = repo
402 self.repo = repo
396 self.ignore_whitespace = ignore_whitespace
403 self.ignore_whitespace = ignore_whitespace
397 self.context = context
404 self.context = context
398
405
399 fsobj = svn.repos.fs(repo)
406 fsobj = svn.repos.fs(repo)
400
407
401 self.tgt_rev = tgt_rev
408 self.tgt_rev = tgt_rev
402 self.tgt_path = tgt_path or ''
409 self.tgt_path = tgt_path or ''
403 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
410 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
404 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
411 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
405
412
406 self.src_rev = src_rev
413 self.src_rev = src_rev
407 self.src_path = src_path or self.tgt_path
414 self.src_path = src_path or self.tgt_path
408 self.src_root = svn.fs.revision_root(fsobj, src_rev)
415 self.src_root = svn.fs.revision_root(fsobj, src_rev)
409 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
416 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
410
417
411 self._validate()
418 self._validate()
412
419
413 def _validate(self):
420 def _validate(self):
414 if (self.tgt_kind != svn.core.svn_node_none and
421 if (self.tgt_kind != svn.core.svn_node_none and
415 self.src_kind != svn.core.svn_node_none and
422 self.src_kind != svn.core.svn_node_none and
416 self.src_kind != self.tgt_kind):
423 self.src_kind != self.tgt_kind):
417 # TODO: johbo: proper error handling
424 # TODO: johbo: proper error handling
418 raise Exception(
425 raise Exception(
419 "Source and target are not compatible for diff generation. "
426 "Source and target are not compatible for diff generation. "
420 "Source type: %s, target type: %s" %
427 "Source type: %s, target type: %s" %
421 (self.src_kind, self.tgt_kind))
428 (self.src_kind, self.tgt_kind))
422
429
423 def generate_diff(self):
430 def generate_diff(self):
424 buf = StringIO.StringIO()
431 buf = StringIO.StringIO()
425 if self.tgt_kind == svn.core.svn_node_dir:
432 if self.tgt_kind == svn.core.svn_node_dir:
426 self._generate_dir_diff(buf)
433 self._generate_dir_diff(buf)
427 else:
434 else:
428 self._generate_file_diff(buf)
435 self._generate_file_diff(buf)
429 return buf.getvalue()
436 return buf.getvalue()
430
437
431 def _generate_dir_diff(self, buf):
438 def _generate_dir_diff(self, buf):
432 editor = DiffChangeEditor()
439 editor = DiffChangeEditor()
433 editor_ptr, editor_baton = svn.delta.make_editor(editor)
440 editor_ptr, editor_baton = svn.delta.make_editor(editor)
434 svn.repos.dir_delta2(
441 svn.repos.dir_delta2(
435 self.src_root,
442 self.src_root,
436 self.src_path,
443 self.src_path,
437 '', # src_entry
444 '', # src_entry
438 self.tgt_root,
445 self.tgt_root,
439 self.tgt_path,
446 self.tgt_path,
440 editor_ptr, editor_baton,
447 editor_ptr, editor_baton,
441 authorization_callback_allow_all,
448 authorization_callback_allow_all,
442 False, # text_deltas
449 False, # text_deltas
443 svn.core.svn_depth_infinity, # depth
450 svn.core.svn_depth_infinity, # depth
444 False, # entry_props
451 False, # entry_props
445 False, # ignore_ancestry
452 False, # ignore_ancestry
446 )
453 )
447
454
448 for path, __, change in sorted(editor.changes):
455 for path, __, change in sorted(editor.changes):
449 self._generate_node_diff(
456 self._generate_node_diff(
450 buf, change, path, self.tgt_path, path, self.src_path)
457 buf, change, path, self.tgt_path, path, self.src_path)
451
458
452 def _generate_file_diff(self, buf):
459 def _generate_file_diff(self, buf):
453 change = None
460 change = None
454 if self.src_kind == svn.core.svn_node_none:
461 if self.src_kind == svn.core.svn_node_none:
455 change = "add"
462 change = "add"
456 elif self.tgt_kind == svn.core.svn_node_none:
463 elif self.tgt_kind == svn.core.svn_node_none:
457 change = "delete"
464 change = "delete"
458 tgt_base, tgt_path = vcspath.split(self.tgt_path)
465 tgt_base, tgt_path = vcspath.split(self.tgt_path)
459 src_base, src_path = vcspath.split(self.src_path)
466 src_base, src_path = vcspath.split(self.src_path)
460 self._generate_node_diff(
467 self._generate_node_diff(
461 buf, change, tgt_path, tgt_base, src_path, src_base)
468 buf, change, tgt_path, tgt_base, src_path, src_base)
462
469
463 def _generate_node_diff(
470 def _generate_node_diff(
464 self, buf, change, tgt_path, tgt_base, src_path, src_base):
471 self, buf, change, tgt_path, tgt_base, src_path, src_base):
472
473 if self.src_rev == self.tgt_rev and tgt_base == src_base:
474 # makes consistent behaviour with git/hg to return empty diff if
475 # we compare same revisions
476 return
477
465 tgt_full_path = vcspath.join(tgt_base, tgt_path)
478 tgt_full_path = vcspath.join(tgt_base, tgt_path)
466 src_full_path = vcspath.join(src_base, src_path)
479 src_full_path = vcspath.join(src_base, src_path)
467
480
468 self.binary_content = False
481 self.binary_content = False
469 mime_type = self._get_mime_type(tgt_full_path)
482 mime_type = self._get_mime_type(tgt_full_path)
483
470 if mime_type and not mime_type.startswith('text'):
484 if mime_type and not mime_type.startswith('text'):
471 self.binary_content = True
485 self.binary_content = True
472 buf.write("=" * 67 + '\n')
486 buf.write("=" * 67 + '\n')
473 buf.write("Cannot display: file marked as a binary type.\n")
487 buf.write("Cannot display: file marked as a binary type.\n")
474 buf.write("svn:mime-type = %s\n" % mime_type)
488 buf.write("svn:mime-type = %s\n" % mime_type)
475 buf.write("Index: %s\n" % (tgt_path, ))
489 buf.write("Index: %s\n" % (tgt_path, ))
476 buf.write("=" * 67 + '\n')
490 buf.write("=" * 67 + '\n')
477 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
491 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
478 'tgt_path': tgt_path})
492 'tgt_path': tgt_path})
479
493
480 if change == 'add':
494 if change == 'add':
481 # TODO: johbo: SVN is missing a zero here compared to git
495 # TODO: johbo: SVN is missing a zero here compared to git
482 buf.write("new file mode 10644\n")
496 buf.write("new file mode 10644\n")
497
498 #TODO(marcink): intro to binary detection of svn patches
499 # if self.binary_content:
500 # buf.write('GIT binary patch\n')
501
483 buf.write("--- /dev/null\t(revision 0)\n")
502 buf.write("--- /dev/null\t(revision 0)\n")
484 src_lines = []
503 src_lines = []
485 else:
504 else:
486 if change == 'delete':
505 if change == 'delete':
487 buf.write("deleted file mode 10644\n")
506 buf.write("deleted file mode 10644\n")
507
508 #TODO(marcink): intro to binary detection of svn patches
509 # if self.binary_content:
510 # buf.write('GIT binary patch\n')
511
488 buf.write("--- a/%s\t(revision %s)\n" % (
512 buf.write("--- a/%s\t(revision %s)\n" % (
489 src_path, self.src_rev))
513 src_path, self.src_rev))
490 src_lines = self._svn_readlines(self.src_root, src_full_path)
514 src_lines = self._svn_readlines(self.src_root, src_full_path)
491
515
492 if change == 'delete':
516 if change == 'delete':
493 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
517 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
494 tgt_lines = []
518 tgt_lines = []
495 else:
519 else:
496 buf.write("+++ b/%s\t(revision %s)\n" % (
520 buf.write("+++ b/%s\t(revision %s)\n" % (
497 tgt_path, self.tgt_rev))
521 tgt_path, self.tgt_rev))
498 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
522 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
499
523
500 if not self.binary_content:
524 if not self.binary_content:
501 udiff = svn_diff.unified_diff(
525 udiff = svn_diff.unified_diff(
502 src_lines, tgt_lines, context=self.context,
526 src_lines, tgt_lines, context=self.context,
503 ignore_blank_lines=self.ignore_whitespace,
527 ignore_blank_lines=self.ignore_whitespace,
504 ignore_case=False,
528 ignore_case=False,
505 ignore_space_changes=self.ignore_whitespace)
529 ignore_space_changes=self.ignore_whitespace)
506 buf.writelines(udiff)
530 buf.writelines(udiff)
507
531
508 def _get_mime_type(self, path):
532 def _get_mime_type(self, path):
509 try:
533 try:
510 mime_type = svn.fs.node_prop(
534 mime_type = svn.fs.node_prop(
511 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
535 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
512 except svn.core.SubversionException:
536 except svn.core.SubversionException:
513 mime_type = svn.fs.node_prop(
537 mime_type = svn.fs.node_prop(
514 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
538 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
515 return mime_type
539 return mime_type
516
540
517 def _svn_readlines(self, fs_root, node_path):
541 def _svn_readlines(self, fs_root, node_path):
518 if self.binary_content:
542 if self.binary_content:
519 return []
543 return []
520 node_kind = svn.fs.check_path(fs_root, node_path)
544 node_kind = svn.fs.check_path(fs_root, node_path)
521 if node_kind not in (
545 if node_kind not in (
522 svn.core.svn_node_file, svn.core.svn_node_symlink):
546 svn.core.svn_node_file, svn.core.svn_node_symlink):
523 return []
547 return []
524 content = svn.core.Stream(
548 content = svn.core.Stream(
525 svn.fs.file_contents(fs_root, node_path)).read()
549 svn.fs.file_contents(fs_root, node_path)).read()
526 return content.splitlines(True)
550 return content.splitlines(True)
527
551
528
552
529 class DiffChangeEditor(svn.delta.Editor):
553 class DiffChangeEditor(svn.delta.Editor):
530 """
554 """
531 Records changes between two given revisions
555 Records changes between two given revisions
532 """
556 """
533
557
534 def __init__(self):
558 def __init__(self):
535 self.changes = []
559 self.changes = []
536
560
537 def delete_entry(self, path, revision, parent_baton, pool=None):
561 def delete_entry(self, path, revision, parent_baton, pool=None):
538 self.changes.append((path, None, 'delete'))
562 self.changes.append((path, None, 'delete'))
539
563
540 def add_file(
564 def add_file(
541 self, path, parent_baton, copyfrom_path, copyfrom_revision,
565 self, path, parent_baton, copyfrom_path, copyfrom_revision,
542 file_pool=None):
566 file_pool=None):
543 self.changes.append((path, 'file', 'add'))
567 self.changes.append((path, 'file', 'add'))
544
568
545 def open_file(self, path, parent_baton, base_revision, file_pool=None):
569 def open_file(self, path, parent_baton, base_revision, file_pool=None):
546 self.changes.append((path, 'file', 'change'))
570 self.changes.append((path, 'file', 'change'))
547
571
548
572
549 def authorization_callback_allow_all(root, path, pool):
573 def authorization_callback_allow_all(root, path, pool):
550 return True
574 return True
551
575
552
576
553 class TxnNodeProcessor(object):
577 class TxnNodeProcessor(object):
554 """
578 """
555 Utility to process the change of one node within a transaction root.
579 Utility to process the change of one node within a transaction root.
556
580
557 It encapsulates the knowledge of how to add, update or remove
581 It encapsulates the knowledge of how to add, update or remove
558 a node for a given transaction root. The purpose is to support the method
582 a node for a given transaction root. The purpose is to support the method
559 `SvnRemote.commit`.
583 `SvnRemote.commit`.
560 """
584 """
561
585
562 def __init__(self, node, txn_root):
586 def __init__(self, node, txn_root):
563 assert isinstance(node['path'], str)
587 assert isinstance(node['path'], str)
564
588
565 self.node = node
589 self.node = node
566 self.txn_root = txn_root
590 self.txn_root = txn_root
567
591
568 def update(self):
592 def update(self):
569 self._ensure_parent_dirs()
593 self._ensure_parent_dirs()
570 self._add_file_if_node_does_not_exist()
594 self._add_file_if_node_does_not_exist()
571 self._update_file_content()
595 self._update_file_content()
572 self._update_file_properties()
596 self._update_file_properties()
573
597
574 def remove(self):
598 def remove(self):
575 svn.fs.delete(self.txn_root, self.node['path'])
599 svn.fs.delete(self.txn_root, self.node['path'])
576 # TODO: Clean up directory if empty
600 # TODO: Clean up directory if empty
577
601
578 def _ensure_parent_dirs(self):
602 def _ensure_parent_dirs(self):
579 curdir = vcspath.dirname(self.node['path'])
603 curdir = vcspath.dirname(self.node['path'])
580 dirs_to_create = []
604 dirs_to_create = []
581 while not self._svn_path_exists(curdir):
605 while not self._svn_path_exists(curdir):
582 dirs_to_create.append(curdir)
606 dirs_to_create.append(curdir)
583 curdir = vcspath.dirname(curdir)
607 curdir = vcspath.dirname(curdir)
584
608
585 for curdir in reversed(dirs_to_create):
609 for curdir in reversed(dirs_to_create):
586 log.debug('Creating missing directory "%s"', curdir)
610 log.debug('Creating missing directory "%s"', curdir)
587 svn.fs.make_dir(self.txn_root, curdir)
611 svn.fs.make_dir(self.txn_root, curdir)
588
612
589 def _svn_path_exists(self, path):
613 def _svn_path_exists(self, path):
590 path_status = svn.fs.check_path(self.txn_root, path)
614 path_status = svn.fs.check_path(self.txn_root, path)
591 return path_status != svn.core.svn_node_none
615 return path_status != svn.core.svn_node_none
592
616
593 def _add_file_if_node_does_not_exist(self):
617 def _add_file_if_node_does_not_exist(self):
594 kind = svn.fs.check_path(self.txn_root, self.node['path'])
618 kind = svn.fs.check_path(self.txn_root, self.node['path'])
595 if kind == svn.core.svn_node_none:
619 if kind == svn.core.svn_node_none:
596 svn.fs.make_file(self.txn_root, self.node['path'])
620 svn.fs.make_file(self.txn_root, self.node['path'])
597
621
598 def _update_file_content(self):
622 def _update_file_content(self):
599 assert isinstance(self.node['content'], str)
623 assert isinstance(self.node['content'], str)
600 handler, baton = svn.fs.apply_textdelta(
624 handler, baton = svn.fs.apply_textdelta(
601 self.txn_root, self.node['path'], None, None)
625 self.txn_root, self.node['path'], None, None)
602 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
626 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
603
627
604 def _update_file_properties(self):
628 def _update_file_properties(self):
605 properties = self.node.get('properties', {})
629 properties = self.node.get('properties', {})
606 for key, value in properties.iteritems():
630 for key, value in properties.iteritems():
607 svn.fs.change_node_prop(
631 svn.fs.change_node_prop(
608 self.txn_root, self.node['path'], key, value)
632 self.txn_root, self.node['path'], key, value)
609
633
610
634
611 def apr_time_t(timestamp):
635 def apr_time_t(timestamp):
612 """
636 """
613 Convert a Python timestamp into APR timestamp type apr_time_t
637 Convert a Python timestamp into APR timestamp type apr_time_t
614 """
638 """
615 return timestamp * 1E6
639 return timestamp * 1E6
616
640
617
641
618 def svn_opt_revision_value_t(num):
642 def svn_opt_revision_value_t(num):
619 """
643 """
620 Put `num` into a `svn_opt_revision_value_t` structure.
644 Put `num` into a `svn_opt_revision_value_t` structure.
621 """
645 """
622 value = svn.core.svn_opt_revision_value_t()
646 value = svn.core.svn_opt_revision_value_t()
623 value.number = num
647 value.number = num
624 revision = svn.core.svn_opt_revision_t()
648 revision = svn.core.svn_opt_revision_t()
625 revision.kind = svn.core.svn_opt_revision_number
649 revision.kind = svn.core.svn_opt_revision_number
626 revision.value = value
650 revision.value = value
627 return revision
651 return revision
@@ -1,207 +1,209 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 #
2 #
3 # Copyright (C) 2004-2009 Edgewall Software
3 # Copyright (C) 2004-2009 Edgewall Software
4 # Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
4 # Copyright (C) 2004-2006 Christopher Lenz <cmlenz@gmx.de>
5 # All rights reserved.
5 # All rights reserved.
6 #
6 #
7 # This software is licensed as described in the file COPYING, which
7 # This software is licensed as described in the file COPYING, which
8 # you should have received as part of this distribution. The terms
8 # you should have received as part of this distribution. The terms
9 # are also available at http://trac.edgewall.org/wiki/TracLicense.
9 # are also available at http://trac.edgewall.org/wiki/TracLicense.
10 #
10 #
11 # This software consists of voluntary contributions made by many
11 # This software consists of voluntary contributions made by many
12 # individuals. For the exact contribution history, see the revision
12 # individuals. For the exact contribution history, see the revision
13 # history and logs, available at http://trac.edgewall.org/log/.
13 # history and logs, available at http://trac.edgewall.org/log/.
14 #
14 #
15 # Author: Christopher Lenz <cmlenz@gmx.de>
15 # Author: Christopher Lenz <cmlenz@gmx.de>
16
16
17 import difflib
17 import difflib
18
18
19
19
20 def get_filtered_hunks(fromlines, tolines, context=None,
20 def get_filtered_hunks(fromlines, tolines, context=None,
21 ignore_blank_lines=False, ignore_case=False,
21 ignore_blank_lines=False, ignore_case=False,
22 ignore_space_changes=False):
22 ignore_space_changes=False):
23 """Retrieve differences in the form of `difflib.SequenceMatcher`
23 """Retrieve differences in the form of `difflib.SequenceMatcher`
24 opcodes, grouped according to the ``context`` and ``ignore_*``
24 opcodes, grouped according to the ``context`` and ``ignore_*``
25 parameters.
25 parameters.
26
26
27 :param fromlines: list of lines corresponding to the old content
27 :param fromlines: list of lines corresponding to the old content
28 :param tolines: list of lines corresponding to the new content
28 :param tolines: list of lines corresponding to the new content
29 :param ignore_blank_lines: differences about empty lines only are ignored
29 :param ignore_blank_lines: differences about empty lines only are ignored
30 :param ignore_case: upper case / lower case only differences are ignored
30 :param ignore_case: upper case / lower case only differences are ignored
31 :param ignore_space_changes: differences in amount of spaces are ignored
31 :param ignore_space_changes: differences in amount of spaces are ignored
32 :param context: the number of "equal" lines kept for representing
32 :param context: the number of "equal" lines kept for representing
33 the context of the change
33 the context of the change
34 :return: generator of grouped `difflib.SequenceMatcher` opcodes
34 :return: generator of grouped `difflib.SequenceMatcher` opcodes
35
35
36 If none of the ``ignore_*`` parameters is `True`, there's nothing
36 If none of the ``ignore_*`` parameters is `True`, there's nothing
37 to filter out the results will come straight from the
37 to filter out the results will come straight from the
38 SequenceMatcher.
38 SequenceMatcher.
39 """
39 """
40 hunks = get_hunks(fromlines, tolines, context)
40 hunks = get_hunks(fromlines, tolines, context)
41 if ignore_space_changes or ignore_case or ignore_blank_lines:
41 if ignore_space_changes or ignore_case or ignore_blank_lines:
42 hunks = filter_ignorable_lines(hunks, fromlines, tolines, context,
42 hunks = filter_ignorable_lines(hunks, fromlines, tolines, context,
43 ignore_blank_lines, ignore_case,
43 ignore_blank_lines, ignore_case,
44 ignore_space_changes)
44 ignore_space_changes)
45 return hunks
45 return hunks
46
46
47
47
48 def get_hunks(fromlines, tolines, context=None):
48 def get_hunks(fromlines, tolines, context=None):
49 """Generator yielding grouped opcodes describing differences .
49 """Generator yielding grouped opcodes describing differences .
50
50
51 See `get_filtered_hunks` for the parameter descriptions.
51 See `get_filtered_hunks` for the parameter descriptions.
52 """
52 """
53 matcher = difflib.SequenceMatcher(None, fromlines, tolines)
53 matcher = difflib.SequenceMatcher(None, fromlines, tolines)
54 if context is None:
54 if context is None:
55 return (hunk for hunk in [matcher.get_opcodes()])
55 return (hunk for hunk in [matcher.get_opcodes()])
56 else:
56 else:
57 return matcher.get_grouped_opcodes(context)
57 return matcher.get_grouped_opcodes(context)
58
58
59
59
60 def filter_ignorable_lines(hunks, fromlines, tolines, context,
60 def filter_ignorable_lines(hunks, fromlines, tolines, context,
61 ignore_blank_lines, ignore_case,
61 ignore_blank_lines, ignore_case,
62 ignore_space_changes):
62 ignore_space_changes):
63 """Detect line changes that should be ignored and emits them as
63 """Detect line changes that should be ignored and emits them as
64 tagged as "equal", possibly joined with the preceding and/or
64 tagged as "equal", possibly joined with the preceding and/or
65 following "equal" block.
65 following "equal" block.
66
66
67 See `get_filtered_hunks` for the parameter descriptions.
67 See `get_filtered_hunks` for the parameter descriptions.
68 """
68 """
69 def is_ignorable(tag, fromlines, tolines):
69 def is_ignorable(tag, fromlines, tolines):
70 if tag == 'delete' and ignore_blank_lines:
70 if tag == 'delete' and ignore_blank_lines:
71 if ''.join(fromlines) == '':
71 if ''.join(fromlines) == '':
72 return True
72 return True
73 elif tag == 'insert' and ignore_blank_lines:
73 elif tag == 'insert' and ignore_blank_lines:
74 if ''.join(tolines) == '':
74 if ''.join(tolines) == '':
75 return True
75 return True
76 elif tag == 'replace' and (ignore_case or ignore_space_changes):
76 elif tag == 'replace' and (ignore_case or ignore_space_changes):
77 if len(fromlines) != len(tolines):
77 if len(fromlines) != len(tolines):
78 return False
78 return False
79 def f(str):
79
80 def f(input_str):
80 if ignore_case:
81 if ignore_case:
81 str = str.lower()
82 input_str = input_str.lower()
82 if ignore_space_changes:
83 if ignore_space_changes:
83 str = ' '.join(str.split())
84 input_str = ' '.join(input_str.split())
84 return str
85 return input_str
86
85 for i in range(len(fromlines)):
87 for i in range(len(fromlines)):
86 if f(fromlines[i]) != f(tolines[i]):
88 if f(fromlines[i]) != f(tolines[i]):
87 return False
89 return False
88 return True
90 return True
89
91
90 hunks = list(hunks)
92 hunks = list(hunks)
91 opcodes = []
93 opcodes = []
92 ignored_lines = False
94 ignored_lines = False
93 prev = None
95 prev = None
94 for hunk in hunks:
96 for hunk in hunks:
95 for tag, i1, i2, j1, j2 in hunk:
97 for tag, i1, i2, j1, j2 in hunk:
96 if tag == 'equal':
98 if tag == 'equal':
97 if prev:
99 if prev:
98 prev = (tag, prev[1], i2, prev[3], j2)
100 prev = (tag, prev[1], i2, prev[3], j2)
99 else:
101 else:
100 prev = (tag, i1, i2, j1, j2)
102 prev = (tag, i1, i2, j1, j2)
101 else:
103 else:
102 if is_ignorable(tag, fromlines[i1:i2], tolines[j1:j2]):
104 if is_ignorable(tag, fromlines[i1:i2], tolines[j1:j2]):
103 ignored_lines = True
105 ignored_lines = True
104 if prev:
106 if prev:
105 prev = 'equal', prev[1], i2, prev[3], j2
107 prev = 'equal', prev[1], i2, prev[3], j2
106 else:
108 else:
107 prev = 'equal', i1, i2, j1, j2
109 prev = 'equal', i1, i2, j1, j2
108 continue
110 continue
109 if prev:
111 if prev:
110 opcodes.append(prev)
112 opcodes.append(prev)
111 opcodes.append((tag, i1, i2, j1, j2))
113 opcodes.append((tag, i1, i2, j1, j2))
112 prev = None
114 prev = None
113 if prev:
115 if prev:
114 opcodes.append(prev)
116 opcodes.append(prev)
115
117
116 if ignored_lines:
118 if ignored_lines:
117 if context is None:
119 if context is None:
118 yield opcodes
120 yield opcodes
119 else:
121 else:
120 # we leave at most n lines with the tag 'equal' before and after
122 # we leave at most n lines with the tag 'equal' before and after
121 # every change
123 # every change
122 n = context
124 n = context
123 nn = n + n
125 nn = n + n
124
126
125 group = []
127 group = []
126 def all_equal():
128 def all_equal():
127 all(op[0] == 'equal' for op in group)
129 all(op[0] == 'equal' for op in group)
128 for idx, (tag, i1, i2, j1, j2) in enumerate(opcodes):
130 for idx, (tag, i1, i2, j1, j2) in enumerate(opcodes):
129 if idx == 0 and tag == 'equal': # Fixup leading unchanged block
131 if idx == 0 and tag == 'equal': # Fixup leading unchanged block
130 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
132 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
131 elif tag == 'equal' and i2 - i1 > nn:
133 elif tag == 'equal' and i2 - i1 > nn:
132 group.append((tag, i1, min(i2, i1 + n), j1,
134 group.append((tag, i1, min(i2, i1 + n), j1,
133 min(j2, j1 + n)))
135 min(j2, j1 + n)))
134 if not all_equal():
136 if not all_equal():
135 yield group
137 yield group
136 group = []
138 group = []
137 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
139 i1, j1 = max(i1, i2 - n), max(j1, j2 - n)
138 group.append((tag, i1, i2, j1, j2))
140 group.append((tag, i1, i2, j1, j2))
139
141
140 if group and not (len(group) == 1 and group[0][0] == 'equal'):
142 if group and not (len(group) == 1 and group[0][0] == 'equal'):
141 if group[-1][0] == 'equal': # Fixup trailing unchanged block
143 if group[-1][0] == 'equal': # Fixup trailing unchanged block
142 tag, i1, i2, j1, j2 = group[-1]
144 tag, i1, i2, j1, j2 = group[-1]
143 group[-1] = tag, i1, min(i2, i1 + n), j1, min(j2, j1 + n)
145 group[-1] = tag, i1, min(i2, i1 + n), j1, min(j2, j1 + n)
144 if not all_equal():
146 if not all_equal():
145 yield group
147 yield group
146 else:
148 else:
147 for hunk in hunks:
149 for hunk in hunks:
148 yield hunk
150 yield hunk
149
151
150
152
151 NO_NEWLINE_AT_END = '\\ No newline at end of file'
153 NO_NEWLINE_AT_END = '\\ No newline at end of file'
152
154
153
155
154 def unified_diff(fromlines, tolines, context=None, ignore_blank_lines=0,
156 def unified_diff(fromlines, tolines, context=None, ignore_blank_lines=0,
155 ignore_case=0, ignore_space_changes=0, lineterm='\n'):
157 ignore_case=0, ignore_space_changes=0, lineterm='\n'):
156 """
158 """
157 Generator producing lines corresponding to a textual diff.
159 Generator producing lines corresponding to a textual diff.
158
160
159 See `get_filtered_hunks` for the parameter descriptions.
161 See `get_filtered_hunks` for the parameter descriptions.
160 """
162 """
161 # TODO: johbo: Check if this can be nicely integrated into the matching
163 # TODO: johbo: Check if this can be nicely integrated into the matching
162 if ignore_space_changes:
164 if ignore_space_changes:
163 fromlines = [l.strip() for l in fromlines]
165 fromlines = [l.strip() for l in fromlines]
164 tolines = [l.strip() for l in tolines]
166 tolines = [l.strip() for l in tolines]
165
167
166 for group in get_filtered_hunks(fromlines, tolines, context,
168 for group in get_filtered_hunks(fromlines, tolines, context,
167 ignore_blank_lines, ignore_case,
169 ignore_blank_lines, ignore_case,
168 ignore_space_changes):
170 ignore_space_changes):
169 i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
171 i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
170 if i1 == 0 and i2 == 0:
172 if i1 == 0 and i2 == 0:
171 i1, i2 = -1, -1 # support for Add changes
173 i1, i2 = -1, -1 # support for Add changes
172 if j1 == 0 and j2 == 0:
174 if j1 == 0 and j2 == 0:
173 j1, j2 = -1, -1 # support for Delete changes
175 j1, j2 = -1, -1 # support for Delete changes
174 yield '@@ -%s +%s @@%s' % (
176 yield '@@ -%s +%s @@%s' % (
175 _hunk_range(i1 + 1, i2 - i1),
177 _hunk_range(i1 + 1, i2 - i1),
176 _hunk_range(j1 + 1, j2 - j1),
178 _hunk_range(j1 + 1, j2 - j1),
177 lineterm)
179 lineterm)
178 for tag, i1, i2, j1, j2 in group:
180 for tag, i1, i2, j1, j2 in group:
179 if tag == 'equal':
181 if tag == 'equal':
180 for line in fromlines[i1:i2]:
182 for line in fromlines[i1:i2]:
181 if not line.endswith(lineterm):
183 if not line.endswith(lineterm):
182 yield ' ' + line + lineterm
184 yield ' ' + line + lineterm
183 yield NO_NEWLINE_AT_END + lineterm
185 yield NO_NEWLINE_AT_END + lineterm
184 else:
186 else:
185 yield ' ' + line
187 yield ' ' + line
186 else:
188 else:
187 if tag in ('replace', 'delete'):
189 if tag in ('replace', 'delete'):
188 for line in fromlines[i1:i2]:
190 for line in fromlines[i1:i2]:
189 if not line.endswith(lineterm):
191 if not line.endswith(lineterm):
190 yield '-' + line + lineterm
192 yield '-' + line + lineterm
191 yield NO_NEWLINE_AT_END + lineterm
193 yield NO_NEWLINE_AT_END + lineterm
192 else:
194 else:
193 yield '-' + line
195 yield '-' + line
194 if tag in ('replace', 'insert'):
196 if tag in ('replace', 'insert'):
195 for line in tolines[j1:j2]:
197 for line in tolines[j1:j2]:
196 if not line.endswith(lineterm):
198 if not line.endswith(lineterm):
197 yield '+' + line + lineterm
199 yield '+' + line + lineterm
198 yield NO_NEWLINE_AT_END + lineterm
200 yield NO_NEWLINE_AT_END + lineterm
199 else:
201 else:
200 yield '+' + line
202 yield '+' + line
201
203
202
204
203 def _hunk_range(start, length):
205 def _hunk_range(start, length):
204 if length != 1:
206 if length != 1:
205 return '%d,%d' % (start, length)
207 return '%d,%d' % (start, length)
206 else:
208 else:
207 return '%d' % (start, )
209 return '%d' % (start, )
@@ -1,57 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import socket
18 import socket
19
19
20 import pytest
20 import pytest
21
21
22
22
23 def pytest_addoption(parser):
23 def pytest_addoption(parser):
24 parser.addoption(
24 parser.addoption(
25 '--repeat', type=int, default=100,
25 '--repeat', type=int, default=100,
26 help="Number of repetitions in performance tests.")
26 help="Number of repetitions in performance tests.")
27
27
28
28
29 @pytest.fixture(scope='session')
29 @pytest.fixture(scope='session')
30 def repeat(request):
30 def repeat(request):
31 """
31 """
32 The number of repetitions is based on this fixture.
32 The number of repetitions is based on this fixture.
33
33
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
34 Slower calls may divide it by 10 or 100. It is chosen in a way so that the
35 tests are not too slow in our default test suite.
35 tests are not too slow in our default test suite.
36 """
36 """
37 return request.config.getoption('--repeat')
37 return request.config.getoption('--repeat')
38
38
39
39
40 @pytest.fixture(scope='session')
40 @pytest.fixture(scope='session')
41 def vcsserver_port(request):
41 def vcsserver_port(request):
42 port = get_available_port()
42 port = get_available_port()
43 print 'Using vcsserver port %s' % (port, )
43 print 'Using vcsserver port %s' % (port, )
44 return port
44 return port
45
45
46
46
47 def get_available_port():
47 def get_available_port():
48 family = socket.AF_INET
48 family = socket.AF_INET
49 socktype = socket.SOCK_STREAM
49 socktype = socket.SOCK_STREAM
50 host = '127.0.0.1'
50 host = '127.0.0.1'
51
51
52 mysocket = socket.socket(family, socktype)
52 mysocket = socket.socket(family, socktype)
53 mysocket.bind((host, 0))
53 mysocket.bind((host, 0))
54 port = mysocket.getsockname()[1]
54 port = mysocket.getsockname()[1]
55 mysocket.close()
55 mysocket.close()
56 del mysocket
56 del mysocket
57 return port
57 return port
@@ -1,71 +1,71 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import tempfile
20 import tempfile
21
21
22 import configobj
22 import configobj
23
23
24
24
25 class TestINI(object):
25 class ContextINI(object):
26 """
26 """
27 Allows to create a new test.ini file as a copy of existing one with edited
27 Allows to create a new test.ini file as a copy of existing one with edited
28 data. If existing file is not present, it creates a new one. Example usage::
28 data. If existing file is not present, it creates a new one. Example usage::
29
29
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 print 'vcsserver --config=%s' % new_test_ini
31 print 'vcsserver --config=%s' % new_test_ini
32 """
32 """
33
33
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 destroy=True):
35 destroy=True):
36 self.ini_file_path = ini_file_path
36 self.ini_file_path = ini_file_path
37 self.ini_params = ini_params
37 self.ini_params = ini_params
38 self.new_path = None
38 self.new_path = None
39 self.new_path_prefix = new_file_prefix or 'test'
39 self.new_path_prefix = new_file_prefix or 'test'
40 self.destroy = destroy
40 self.destroy = destroy
41
41
42 def __enter__(self):
42 def __enter__(self):
43 _, pref = tempfile.mkstemp()
43 _, pref = tempfile.mkstemp()
44 loc = tempfile.gettempdir()
44 loc = tempfile.gettempdir()
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 pref, self.new_path_prefix, self.ini_file_path))
46 pref, self.new_path_prefix, self.ini_file_path))
47
47
48 # copy ini file and modify according to the params, if we re-use a file
48 # copy ini file and modify according to the params, if we re-use a file
49 if os.path.isfile(self.ini_file_path):
49 if os.path.isfile(self.ini_file_path):
50 shutil.copy(self.ini_file_path, self.new_path)
50 shutil.copy(self.ini_file_path, self.new_path)
51 else:
51 else:
52 # create new dump file for configObj to write to.
52 # create new dump file for configObj to write to.
53 with open(self.new_path, 'wb'):
53 with open(self.new_path, 'wb'):
54 pass
54 pass
55
55
56 config = configobj.ConfigObj(
56 config = configobj.ConfigObj(
57 self.new_path, file_error=True, write_empty_values=True)
57 self.new_path, file_error=True, write_empty_values=True)
58
58
59 for data in self.ini_params:
59 for data in self.ini_params:
60 section, ini_params = data.items()[0]
60 section, ini_params = data.items()[0]
61 key, val = ini_params.items()[0]
61 key, val = ini_params.items()[0]
62 if section not in config:
62 if section not in config:
63 config[section] = {}
63 config[section] = {}
64 config[section][key] = val
64 config[section][key] = val
65
65
66 config.write()
66 config.write()
67 return self.new_path
67 return self.new_path
68
68
69 def __exit__(self, exc_type, exc_val, exc_tb):
69 def __exit__(self, exc_type, exc_val, exc_tb):
70 if self.destroy:
70 if self.destroy:
71 os.remove(self.new_path)
71 os.remove(self.new_path)
@@ -1,162 +1,162 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.fetch(
81 self.remote_git.fetch(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire=None, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102 def test_remove_ref(self):
102 def test_remove_ref(self):
103 ref_to_remove = 'refs/tags/v0.1.9'
103 ref_to_remove = 'refs/tags/v0.1.9'
104 self.mock_repo.refs = SAMPLE_REFS.copy()
104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 self.remote_git.remove_ref(None, ref_to_remove)
105 self.remote_git.remove_ref(None, ref_to_remove)
106 assert ref_to_remove not in self.mock_repo.refs
106 assert ref_to_remove not in self.mock_repo.refs
107
107
108
108
109 class TestReraiseSafeExceptions(object):
109 class TestReraiseSafeExceptions(object):
110 def test_method_decorated_with_reraise_safe_exceptions(self):
110 def test_method_decorated_with_reraise_safe_exceptions(self):
111 factory = Mock()
111 factory = Mock()
112 git_remote = git.GitRemote(factory)
112 git_remote = git.GitRemote(factory)
113
113
114 def fake_function():
114 def fake_function():
115 return None
115 return None
116
116
117 decorator = git.reraise_safe_exceptions(fake_function)
117 decorator = git.reraise_safe_exceptions(fake_function)
118
118
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 for method_name, method in methods:
120 for method_name, method in methods:
121 if not method_name.startswith('_'):
121 if not method_name.startswith('_'):
122 assert method.im_func.__code__ == decorator.__code__
122 assert method.im_func.__code__ == decorator.__code__
123
123
124 @pytest.mark.parametrize('side_effect, expected_type', [
124 @pytest.mark.parametrize('side_effect, expected_type', [
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 (dulwich.errors.HangupException(), 'error'),
129 (dulwich.errors.HangupException(), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 ])
131 ])
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 @git.reraise_safe_exceptions
133 @git.reraise_safe_exceptions
134 def fake_method():
134 def fake_method():
135 raise side_effect
135 raise side_effect
136
136
137 with pytest.raises(Exception) as exc_info:
137 with pytest.raises(Exception) as exc_info:
138 fake_method()
138 fake_method()
139 assert type(exc_info.value) == Exception
139 assert type(exc_info.value) == Exception
140 assert exc_info.value._vcs_kind == expected_type
140 assert exc_info.value._vcs_kind == expected_type
141
141
142
142
143 class TestDulwichRepoWrapper(object):
143 class TestDulwichRepoWrapper(object):
144 def test_calls_close_on_delete(self):
144 def test_calls_close_on_delete(self):
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 with isdir_patcher:
146 with isdir_patcher:
147 repo = git.Repo('/tmp/abcde')
147 repo = git.Repo('/tmp/abcde')
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 del repo
149 del repo
150 close_mock.assert_called_once_with()
150 close_mock.assert_called_once_with()
151
151
152
152
153 class TestGitFactory(object):
153 class TestGitFactory(object):
154 def test_create_repo_returns_dulwich_wrapper(self):
154 def test_create_repo_returns_dulwich_wrapper(self):
155 factory = git.GitFactory(repo_cache=Mock())
155 factory = git.GitFactory(repo_cache=Mock())
156 wire = {
156 wire = {
157 'path': '/tmp/abcde'
157 'path': '/tmp/abcde'
158 }
158 }
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
159 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
160 with isdir_patcher:
160 with isdir_patcher:
161 result = factory._create_repo(wire, True)
161 result = factory._create_repo(wire, True)
162 assert isinstance(result, git.Repo)
162 assert isinstance(result, git.Repo)
@@ -1,127 +1,127 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19 import sys
19 import sys
20 import traceback
20 import traceback
21
21
22 import pytest
22 import pytest
23 from mercurial.error import LookupError
23 from mercurial.error import LookupError
24 from mock import Mock, MagicMock, patch
24 from mock import Mock, MagicMock, patch
25
25
26 from vcsserver import exceptions, hg, hgcompat
26 from vcsserver import exceptions, hg, hgcompat
27
27
28
28
29 class TestHGLookup(object):
29 class TestHGLookup(object):
30 def setup(self):
30 def setup(self):
31 self.mock_repo = MagicMock()
31 self.mock_repo = MagicMock()
32 self.mock_repo.__getitem__.side_effect = LookupError(
32 self.mock_repo.__getitem__.side_effect = LookupError(
33 'revision_or_commit_id', 'index', 'message')
33 'revision_or_commit_id', 'index', 'message')
34 factory = Mock()
34 factory = Mock()
35 factory.repo = Mock(return_value=self.mock_repo)
35 factory.repo = Mock(return_value=self.mock_repo)
36 self.remote_hg = hg.HgRemote(factory)
36 self.remote_hg = hg.HgRemote(factory)
37
37
38 def test_fail_lookup_hg(self):
38 def test_fail_lookup_hg(self):
39 with pytest.raises(Exception) as exc_info:
39 with pytest.raises(Exception) as exc_info:
40 self.remote_hg.lookup(
40 self.remote_hg.lookup(
41 wire=None, revision='revision_or_commit_id', both=True)
41 wire=None, revision='revision_or_commit_id', both=True)
42
42
43 assert exc_info.value._vcs_kind == 'lookup'
43 assert exc_info.value._vcs_kind == 'lookup'
44 assert 'revision_or_commit_id' in exc_info.value.args
44 assert 'revision_or_commit_id' in exc_info.value.args
45
45
46
46
47 class TestDiff(object):
47 class TestDiff(object):
48 def test_raising_safe_exception_when_lookup_failed(self):
48 def test_raising_safe_exception_when_lookup_failed(self):
49 repo = Mock()
49 repo = Mock()
50 factory = Mock()
50 factory = Mock()
51 factory.repo = Mock(return_value=repo)
51 factory.repo = Mock(return_value=repo)
52 hg_remote = hg.HgRemote(factory)
52 hg_remote = hg.HgRemote(factory)
53 with patch('mercurial.patch.diff') as diff_mock:
53 with patch('mercurial.patch.diff') as diff_mock:
54 diff_mock.side_effect = LookupError(
54 diff_mock.side_effect = LookupError(
55 'deadbeef', 'index', 'message')
55 'deadbeef', 'index', 'message')
56 with pytest.raises(Exception) as exc_info:
56 with pytest.raises(Exception) as exc_info:
57 hg_remote.diff(
57 hg_remote.diff(
58 wire=None, rev1='deadbeef', rev2='deadbee1',
58 wire=None, rev1='deadbeef', rev2='deadbee1',
59 file_filter=None, opt_git=True, opt_ignorews=True,
59 file_filter=None, opt_git=True, opt_ignorews=True,
60 context=3)
60 context=3)
61 assert type(exc_info.value) == Exception
61 assert type(exc_info.value) == Exception
62 assert exc_info.value._vcs_kind == 'lookup'
62 assert exc_info.value._vcs_kind == 'lookup'
63
63
64
64
65 class TestReraiseSafeExceptions(object):
65 class TestReraiseSafeExceptions(object):
66 def test_method_decorated_with_reraise_safe_exceptions(self):
66 def test_method_decorated_with_reraise_safe_exceptions(self):
67 factory = Mock()
67 factory = Mock()
68 hg_remote = hg.HgRemote(factory)
68 hg_remote = hg.HgRemote(factory)
69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
69 methods = inspect.getmembers(hg_remote, predicate=inspect.ismethod)
70 decorator = hg.reraise_safe_exceptions(None)
70 decorator = hg.reraise_safe_exceptions(None)
71 for method_name, method in methods:
71 for method_name, method in methods:
72 if not method_name.startswith('_'):
72 if not method_name.startswith('_'):
73 assert method.im_func.__code__ == decorator.__code__
73 assert method.im_func.__code__ == decorator.__code__
74
74
75 @pytest.mark.parametrize('side_effect, expected_type', [
75 @pytest.mark.parametrize('side_effect, expected_type', [
76 (hgcompat.Abort(), 'abort'),
76 (hgcompat.Abort(), 'abort'),
77 (hgcompat.InterventionRequired(), 'abort'),
77 (hgcompat.InterventionRequired(), 'abort'),
78 (hgcompat.RepoLookupError(), 'lookup'),
78 (hgcompat.RepoLookupError(), 'lookup'),
79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
79 (hgcompat.LookupError('deadbeef', 'index', 'message'), 'lookup'),
80 (hgcompat.RepoError(), 'error'),
80 (hgcompat.RepoError(), 'error'),
81 (hgcompat.RequirementError(), 'requirement'),
81 (hgcompat.RequirementError(), 'requirement'),
82 ])
82 ])
83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
83 def test_safe_exceptions_reraised(self, side_effect, expected_type):
84 @hg.reraise_safe_exceptions
84 @hg.reraise_safe_exceptions
85 def fake_method():
85 def fake_method():
86 raise side_effect
86 raise side_effect
87
87
88 with pytest.raises(Exception) as exc_info:
88 with pytest.raises(Exception) as exc_info:
89 fake_method()
89 fake_method()
90 assert type(exc_info.value) == Exception
90 assert type(exc_info.value) == Exception
91 assert exc_info.value._vcs_kind == expected_type
91 assert exc_info.value._vcs_kind == expected_type
92
92
93 def test_keeps_original_traceback(self):
93 def test_keeps_original_traceback(self):
94 @hg.reraise_safe_exceptions
94 @hg.reraise_safe_exceptions
95 def fake_method():
95 def fake_method():
96 try:
96 try:
97 raise hgcompat.Abort()
97 raise hgcompat.Abort()
98 except:
98 except:
99 self.original_traceback = traceback.format_tb(
99 self.original_traceback = traceback.format_tb(
100 sys.exc_info()[2])
100 sys.exc_info()[2])
101 raise
101 raise
102
102
103 try:
103 try:
104 fake_method()
104 fake_method()
105 except Exception:
105 except Exception:
106 new_traceback = traceback.format_tb(sys.exc_info()[2])
106 new_traceback = traceback.format_tb(sys.exc_info()[2])
107
107
108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
108 new_traceback_tail = new_traceback[-len(self.original_traceback):]
109 assert new_traceback_tail == self.original_traceback
109 assert new_traceback_tail == self.original_traceback
110
110
111 def test_maps_unknow_exceptions_to_unhandled(self):
111 def test_maps_unknow_exceptions_to_unhandled(self):
112 @hg.reraise_safe_exceptions
112 @hg.reraise_safe_exceptions
113 def stub_method():
113 def stub_method():
114 raise ValueError('stub')
114 raise ValueError('stub')
115
115
116 with pytest.raises(Exception) as exc_info:
116 with pytest.raises(Exception) as exc_info:
117 stub_method()
117 stub_method()
118 assert exc_info.value._vcs_kind == 'unhandled'
118 assert exc_info.value._vcs_kind == 'unhandled'
119
119
120 def test_does_not_map_known_exceptions(self):
120 def test_does_not_map_known_exceptions(self):
121 @hg.reraise_safe_exceptions
121 @hg.reraise_safe_exceptions
122 def stub_method():
122 def stub_method():
123 raise exceptions.LookupException('stub')
123 raise exceptions.LookupException('stub')
124
124
125 with pytest.raises(Exception) as exc_info:
125 with pytest.raises(Exception) as exc_info:
126 stub_method()
126 stub_method()
127 assert exc_info.value._vcs_kind == 'lookup'
127 assert exc_info.value._vcs_kind == 'lookup'
@@ -1,125 +1,125 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import hgcompat, hgpatches
21 from vcsserver import hgcompat, hgpatches
22
22
23
23
24 LARGEFILES_CAPABILITY = 'largefiles=serve'
24 LARGEFILES_CAPABILITY = 'largefiles=serve'
25
25
26
26
27 def test_patch_largefiles_capabilities_applies_patch(
27 def test_patch_largefiles_capabilities_applies_patch(
28 patched_capabilities):
28 patched_capabilities):
29 lfproto = hgcompat.largefiles.proto
29 lfproto = hgcompat.largefiles.proto
30 hgpatches.patch_largefiles_capabilities()
30 hgpatches.patch_largefiles_capabilities()
31 assert lfproto.capabilities.func_name == '_dynamic_capabilities'
31 assert lfproto.capabilities.func_name == '_dynamic_capabilities'
32
32
33
33
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
34 def test_dynamic_capabilities_uses_original_function_if_not_enabled(
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
35 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
36 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
36 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
37 hgcompat.largefiles.proto, stub_extensions)
37 hgcompat.largefiles.proto, stub_extensions)
38
38
39 caps = dynamic_capabilities(stub_repo, stub_proto)
39 caps = dynamic_capabilities(stub_repo, stub_proto)
40
40
41 stub_extensions.assert_called_once_with(stub_ui)
41 stub_extensions.assert_called_once_with(stub_ui)
42 assert LARGEFILES_CAPABILITY not in caps
42 assert LARGEFILES_CAPABILITY not in caps
43
43
44
44
45 def test_dynamic_capabilities_uses_updated_capabilitiesorig(
45 def test_dynamic_capabilities_uses_updated_capabilitiesorig(
46 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
46 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
47 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
47 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
48 hgcompat.largefiles.proto, stub_extensions)
48 hgcompat.largefiles.proto, stub_extensions)
49
49
50 # This happens when the extension is loaded for the first time, important
50 # This happens when the extension is loaded for the first time, important
51 # to ensure that an updated function is correctly picked up.
51 # to ensure that an updated function is correctly picked up.
52 hgcompat.largefiles.proto.capabilitiesorig = mock.Mock(
52 hgcompat.largefiles.proto.capabilitiesorig = mock.Mock(
53 return_value='REPLACED')
53 return_value='REPLACED')
54
54
55 caps = dynamic_capabilities(stub_repo, stub_proto)
55 caps = dynamic_capabilities(stub_repo, stub_proto)
56 assert 'REPLACED' == caps
56 assert 'REPLACED' == caps
57
57
58
58
59 def test_dynamic_capabilities_ignores_updated_capabilities(
59 def test_dynamic_capabilities_ignores_updated_capabilities(
60 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
60 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
61 stub_extensions.return_value = [('largefiles', mock.Mock())]
61 stub_extensions.return_value = [('largefiles', mock.Mock())]
62 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
62 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
63 hgcompat.largefiles.proto, stub_extensions)
63 hgcompat.largefiles.proto, stub_extensions)
64
64
65 # This happens when the extension is loaded for the first time, important
65 # This happens when the extension is loaded for the first time, important
66 # to ensure that an updated function is correctly picked up.
66 # to ensure that an updated function is correctly picked up.
67 hgcompat.largefiles.proto.capabilities = mock.Mock(
67 hgcompat.largefiles.proto.capabilities = mock.Mock(
68 side_effect=Exception('Must not be called'))
68 side_effect=Exception('Must not be called'))
69
69
70 dynamic_capabilities(stub_repo, stub_proto)
70 dynamic_capabilities(stub_repo, stub_proto)
71
71
72
72
73 def test_dynamic_capabilities_uses_largefiles_if_enabled(
73 def test_dynamic_capabilities_uses_largefiles_if_enabled(
74 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
74 stub_repo, stub_proto, stub_ui, stub_extensions, patched_capabilities):
75 stub_extensions.return_value = [('largefiles', mock.Mock())]
75 stub_extensions.return_value = [('largefiles', mock.Mock())]
76
76
77 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
77 dynamic_capabilities = hgpatches._dynamic_capabilities_wrapper(
78 hgcompat.largefiles.proto, stub_extensions)
78 hgcompat.largefiles.proto, stub_extensions)
79
79
80 caps = dynamic_capabilities(stub_repo, stub_proto)
80 caps = dynamic_capabilities(stub_repo, stub_proto)
81
81
82 stub_extensions.assert_called_once_with(stub_ui)
82 stub_extensions.assert_called_once_with(stub_ui)
83 assert LARGEFILES_CAPABILITY in caps
83 assert LARGEFILES_CAPABILITY in caps
84
84
85
85
86 @pytest.fixture
86 @pytest.fixture
87 def patched_capabilities(request):
87 def patched_capabilities(request):
88 """
88 """
89 Patch in `capabilitiesorig` and restore both capability functions.
89 Patch in `capabilitiesorig` and restore both capability functions.
90 """
90 """
91 lfproto = hgcompat.largefiles.proto
91 lfproto = hgcompat.largefiles.proto
92 orig_capabilities = lfproto.capabilities
92 orig_capabilities = lfproto.capabilities
93 orig_capabilitiesorig = lfproto.capabilitiesorig
93 orig_capabilitiesorig = lfproto.capabilitiesorig
94
94
95 lfproto.capabilitiesorig = mock.Mock(return_value='ORIG')
95 lfproto.capabilitiesorig = mock.Mock(return_value='ORIG')
96
96
97 @request.addfinalizer
97 @request.addfinalizer
98 def restore():
98 def restore():
99 lfproto.capabilities = orig_capabilities
99 lfproto.capabilities = orig_capabilities
100 lfproto.capabilitiesorig = orig_capabilitiesorig
100 lfproto.capabilitiesorig = orig_capabilitiesorig
101
101
102
102
103 @pytest.fixture
103 @pytest.fixture
104 def stub_repo(stub_ui):
104 def stub_repo(stub_ui):
105 repo = mock.Mock()
105 repo = mock.Mock()
106 repo.ui = stub_ui
106 repo.ui = stub_ui
107 return repo
107 return repo
108
108
109
109
110 @pytest.fixture
110 @pytest.fixture
111 def stub_proto(stub_ui):
111 def stub_proto(stub_ui):
112 proto = mock.Mock()
112 proto = mock.Mock()
113 proto.ui = stub_ui
113 proto.ui = stub_ui
114 return proto
114 return proto
115
115
116
116
117 @pytest.fixture
117 @pytest.fixture
118 def stub_ui():
118 def stub_ui():
119 return hgcompat.ui.ui()
119 return hgcompat.ui.ui()
120
120
121
121
122 @pytest.fixture
122 @pytest.fixture
123 def stub_extensions():
123 def stub_extensions():
124 extensions = mock.Mock(return_value=tuple())
124 extensions = mock.Mock(return_value=tuple())
125 return extensions
125 return extensions
@@ -1,549 +1,549 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import contextlib
18 import contextlib
19 import io
19 import io
20 import threading
20 import threading
21 from BaseHTTPServer import BaseHTTPRequestHandler
21 from BaseHTTPServer import BaseHTTPRequestHandler
22 from SocketServer import TCPServer
22 from SocketServer import TCPServer
23
23
24 import mercurial.ui
24 import mercurial.ui
25 import mock
25 import mock
26 import pytest
26 import pytest
27 import simplejson as json
27 import simplejson as json
28
28
29 from vcsserver import hooks
29 from vcsserver import hooks
30
30
31
31
32 class HooksStub(object):
32 class HooksStub(object):
33 """
33 """
34 Simulates a Proy4.Proxy object.
34 Simulates a Proy4.Proxy object.
35
35
36 Will always return `result`, no matter which hook has been called on it.
36 Will always return `result`, no matter which hook has been called on it.
37 """
37 """
38
38
39 def __init__(self, result):
39 def __init__(self, result):
40 self._result = result
40 self._result = result
41
41
42 def __call__(self, hooks_uri):
42 def __call__(self, hooks_uri):
43 return self
43 return self
44
44
45 def __enter__(self):
45 def __enter__(self):
46 return self
46 return self
47
47
48 def __exit__(self, exc_type, exc_value, traceback):
48 def __exit__(self, exc_type, exc_value, traceback):
49 pass
49 pass
50
50
51 def __getattr__(self, name):
51 def __getattr__(self, name):
52 return mock.Mock(return_value=self._result)
52 return mock.Mock(return_value=self._result)
53
53
54
54
55 @contextlib.contextmanager
55 @contextlib.contextmanager
56 def mock_hook_response(
56 def mock_hook_response(
57 status=0, output='', exception=None, exception_args=None):
57 status=0, output='', exception=None, exception_args=None):
58 response = {
58 response = {
59 'status': status,
59 'status': status,
60 'output': output,
60 'output': output,
61 }
61 }
62 if exception:
62 if exception:
63 response.update({
63 response.update({
64 'exception': exception,
64 'exception': exception,
65 'exception_args': exception_args,
65 'exception_args': exception_args,
66 })
66 })
67
67
68 with mock.patch('Pyro4.Proxy', HooksStub(response)):
68 with mock.patch('Pyro4.Proxy', HooksStub(response)):
69 yield
69 yield
70
70
71
71
72 def get_hg_ui(extras=None):
72 def get_hg_ui(extras=None):
73 """Create a Config object with a valid RC_SCM_DATA entry."""
73 """Create a Config object with a valid RC_SCM_DATA entry."""
74 extras = extras or {}
74 extras = extras or {}
75 required_extras = {
75 required_extras = {
76 'username': '',
76 'username': '',
77 'repository': '',
77 'repository': '',
78 'locked_by': '',
78 'locked_by': '',
79 'scm': '',
79 'scm': '',
80 'make_lock': '',
80 'make_lock': '',
81 'action': '',
81 'action': '',
82 'ip': '',
82 'ip': '',
83 'hooks_uri': 'fake_hooks_uri',
83 'hooks_uri': 'fake_hooks_uri',
84 }
84 }
85 required_extras.update(extras)
85 required_extras.update(extras)
86 hg_ui = mercurial.ui.ui()
86 hg_ui = mercurial.ui.ui()
87 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
87 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
88
88
89 return hg_ui
89 return hg_ui
90
90
91
91
92 def test_call_hook_no_error(capsys):
92 def test_call_hook_no_error(capsys):
93 extras = {
93 extras = {
94 'hooks_uri': 'fake_hook_uri',
94 'hooks_uri': 'fake_hook_uri',
95 }
95 }
96 expected_output = 'My mock outptut'
96 expected_output = 'My mock outptut'
97 writer = mock.Mock()
97 writer = mock.Mock()
98
98
99 with mock_hook_response(status=1, output=expected_output):
99 with mock_hook_response(status=1, output=expected_output):
100 hooks._call_hook('hook_name', extras, writer)
100 hooks._call_hook('hook_name', extras, writer)
101
101
102 out, err = capsys.readouterr()
102 out, err = capsys.readouterr()
103
103
104 writer.write.assert_called_with(expected_output)
104 writer.write.assert_called_with(expected_output)
105 assert err == ''
105 assert err == ''
106
106
107
107
108 def test_call_hook_with_exception(capsys):
108 def test_call_hook_with_exception(capsys):
109 extras = {
109 extras = {
110 'hooks_uri': 'fake_hook_uri',
110 'hooks_uri': 'fake_hook_uri',
111 }
111 }
112 expected_output = 'My mock outptut'
112 expected_output = 'My mock outptut'
113 writer = mock.Mock()
113 writer = mock.Mock()
114
114
115 with mock_hook_response(status=1, output=expected_output,
115 with mock_hook_response(status=1, output=expected_output,
116 exception='TypeError',
116 exception='TypeError',
117 exception_args=('Mock exception', )):
117 exception_args=('Mock exception', )):
118 with pytest.raises(Exception) as excinfo:
118 with pytest.raises(Exception) as excinfo:
119 hooks._call_hook('hook_name', extras, writer)
119 hooks._call_hook('hook_name', extras, writer)
120
120
121 assert excinfo.type == Exception
121 assert excinfo.type == Exception
122 assert 'Mock exception' in str(excinfo.value)
122 assert 'Mock exception' in str(excinfo.value)
123
123
124 out, err = capsys.readouterr()
124 out, err = capsys.readouterr()
125
125
126 writer.write.assert_called_with(expected_output)
126 writer.write.assert_called_with(expected_output)
127 assert err == ''
127 assert err == ''
128
128
129
129
130 def test_call_hook_with_locked_exception(capsys):
130 def test_call_hook_with_locked_exception(capsys):
131 extras = {
131 extras = {
132 'hooks_uri': 'fake_hook_uri',
132 'hooks_uri': 'fake_hook_uri',
133 }
133 }
134 expected_output = 'My mock outptut'
134 expected_output = 'My mock outptut'
135 writer = mock.Mock()
135 writer = mock.Mock()
136
136
137 with mock_hook_response(status=1, output=expected_output,
137 with mock_hook_response(status=1, output=expected_output,
138 exception='HTTPLockedRC',
138 exception='HTTPLockedRC',
139 exception_args=('message',)):
139 exception_args=('message',)):
140 with pytest.raises(Exception) as excinfo:
140 with pytest.raises(Exception) as excinfo:
141 hooks._call_hook('hook_name', extras, writer)
141 hooks._call_hook('hook_name', extras, writer)
142
142
143 assert excinfo.value._vcs_kind == 'repo_locked'
143 assert excinfo.value._vcs_kind == 'repo_locked'
144 assert 'message' == str(excinfo.value)
144 assert 'message' == str(excinfo.value)
145
145
146 out, err = capsys.readouterr()
146 out, err = capsys.readouterr()
147
147
148 writer.write.assert_called_with(expected_output)
148 writer.write.assert_called_with(expected_output)
149 assert err == ''
149 assert err == ''
150
150
151
151
152 def test_call_hook_with_stdout():
152 def test_call_hook_with_stdout():
153 extras = {
153 extras = {
154 'hooks_uri': 'fake_hook_uri',
154 'hooks_uri': 'fake_hook_uri',
155 }
155 }
156 expected_output = 'My mock outptut'
156 expected_output = 'My mock outptut'
157
157
158 stdout = io.BytesIO()
158 stdout = io.BytesIO()
159 with mock_hook_response(status=1, output=expected_output):
159 with mock_hook_response(status=1, output=expected_output):
160 hooks._call_hook('hook_name', extras, stdout)
160 hooks._call_hook('hook_name', extras, stdout)
161
161
162 assert stdout.getvalue() == expected_output
162 assert stdout.getvalue() == expected_output
163
163
164
164
165 def test_repo_size():
165 def test_repo_size():
166 hg_ui = get_hg_ui()
166 hg_ui = get_hg_ui()
167
167
168 with mock_hook_response(status=1):
168 with mock_hook_response(status=1):
169 assert hooks.repo_size(hg_ui, None) == 1
169 assert hooks.repo_size(hg_ui, None) == 1
170
170
171
171
172 def test_pre_pull():
172 def test_pre_pull():
173 hg_ui = get_hg_ui()
173 hg_ui = get_hg_ui()
174
174
175 with mock_hook_response(status=1):
175 with mock_hook_response(status=1):
176 assert hooks.pre_pull(hg_ui, None) == 1
176 assert hooks.pre_pull(hg_ui, None) == 1
177
177
178
178
179 def test_post_pull():
179 def test_post_pull():
180 hg_ui = get_hg_ui()
180 hg_ui = get_hg_ui()
181
181
182 with mock_hook_response(status=1):
182 with mock_hook_response(status=1):
183 assert hooks.post_pull(hg_ui, None) == 1
183 assert hooks.post_pull(hg_ui, None) == 1
184
184
185
185
186 def test_pre_push():
186 def test_pre_push():
187 hg_ui = get_hg_ui()
187 hg_ui = get_hg_ui()
188
188
189 with mock_hook_response(status=1):
189 with mock_hook_response(status=1):
190 assert hooks.pre_push(hg_ui, None) == 1
190 assert hooks.pre_push(hg_ui, None) == 1
191
191
192
192
193 def test_post_push():
193 def test_post_push():
194 hg_ui = get_hg_ui()
194 hg_ui = get_hg_ui()
195
195
196 with mock_hook_response(status=1):
196 with mock_hook_response(status=1):
197 with mock.patch('vcsserver.hooks._rev_range_hash', return_value=[]):
197 with mock.patch('vcsserver.hooks._rev_range_hash', return_value=[]):
198 assert hooks.post_push(hg_ui, None, None) == 1
198 assert hooks.post_push(hg_ui, None, None) == 1
199
199
200
200
201 def test_git_pre_receive():
201 def test_git_pre_receive():
202 extras = {
202 extras = {
203 'hooks': ['push'],
203 'hooks': ['push'],
204 'hooks_uri': 'fake_hook_uri',
204 'hooks_uri': 'fake_hook_uri',
205 }
205 }
206 with mock_hook_response(status=1):
206 with mock_hook_response(status=1):
207 response = hooks.git_pre_receive(None, None,
207 response = hooks.git_pre_receive(None, None,
208 {'RC_SCM_DATA': json.dumps(extras)})
208 {'RC_SCM_DATA': json.dumps(extras)})
209 assert response == 1
209 assert response == 1
210
210
211
211
212 def test_git_pre_receive_is_disabled():
212 def test_git_pre_receive_is_disabled():
213 extras = {'hooks': ['pull']}
213 extras = {'hooks': ['pull']}
214 response = hooks.git_pre_receive(None, None,
214 response = hooks.git_pre_receive(None, None,
215 {'RC_SCM_DATA': json.dumps(extras)})
215 {'RC_SCM_DATA': json.dumps(extras)})
216
216
217 assert response == 0
217 assert response == 0
218
218
219
219
220 def test_git_post_receive_no_subprocess_call():
220 def test_git_post_receive_no_subprocess_call():
221 extras = {
221 extras = {
222 'hooks': ['push'],
222 'hooks': ['push'],
223 'hooks_uri': 'fake_hook_uri',
223 'hooks_uri': 'fake_hook_uri',
224 }
224 }
225 # Setting revision_lines to '' avoid all subprocess_calls
225 # Setting revision_lines to '' avoid all subprocess_calls
226 with mock_hook_response(status=1):
226 with mock_hook_response(status=1):
227 response = hooks.git_post_receive(None, '',
227 response = hooks.git_post_receive(None, '',
228 {'RC_SCM_DATA': json.dumps(extras)})
228 {'RC_SCM_DATA': json.dumps(extras)})
229 assert response == 1
229 assert response == 1
230
230
231
231
232 def test_git_post_receive_is_disabled():
232 def test_git_post_receive_is_disabled():
233 extras = {'hooks': ['pull']}
233 extras = {'hooks': ['pull']}
234 response = hooks.git_post_receive(None, '',
234 response = hooks.git_post_receive(None, '',
235 {'RC_SCM_DATA': json.dumps(extras)})
235 {'RC_SCM_DATA': json.dumps(extras)})
236
236
237 assert response == 0
237 assert response == 0
238
238
239
239
240 def test_git_post_receive_calls_repo_size():
240 def test_git_post_receive_calls_repo_size():
241 extras = {'hooks': ['push', 'repo_size']}
241 extras = {'hooks': ['push', 'repo_size']}
242 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
242 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
243 hooks.git_post_receive(
243 hooks.git_post_receive(
244 None, '', {'RC_SCM_DATA': json.dumps(extras)})
244 None, '', {'RC_SCM_DATA': json.dumps(extras)})
245 extras.update({'commit_ids': []})
245 extras.update({'commit_ids': []})
246 expected_calls = [
246 expected_calls = [
247 mock.call('repo_size', extras, mock.ANY),
247 mock.call('repo_size', extras, mock.ANY),
248 mock.call('post_push', extras, mock.ANY),
248 mock.call('post_push', extras, mock.ANY),
249 ]
249 ]
250 assert call_hook_mock.call_args_list == expected_calls
250 assert call_hook_mock.call_args_list == expected_calls
251
251
252
252
253 def test_git_post_receive_does_not_call_disabled_repo_size():
253 def test_git_post_receive_does_not_call_disabled_repo_size():
254 extras = {'hooks': ['push']}
254 extras = {'hooks': ['push']}
255 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
255 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
256 hooks.git_post_receive(
256 hooks.git_post_receive(
257 None, '', {'RC_SCM_DATA': json.dumps(extras)})
257 None, '', {'RC_SCM_DATA': json.dumps(extras)})
258 extras.update({'commit_ids': []})
258 extras.update({'commit_ids': []})
259 expected_calls = [
259 expected_calls = [
260 mock.call('post_push', extras, mock.ANY)
260 mock.call('post_push', extras, mock.ANY)
261 ]
261 ]
262 assert call_hook_mock.call_args_list == expected_calls
262 assert call_hook_mock.call_args_list == expected_calls
263
263
264
264
265 def test_repo_size_exception_does_not_affect_git_post_receive():
265 def test_repo_size_exception_does_not_affect_git_post_receive():
266 extras = {'hooks': ['push', 'repo_size']}
266 extras = {'hooks': ['push', 'repo_size']}
267 status = 0
267 status = 0
268
268
269 def side_effect(name, *args, **kwargs):
269 def side_effect(name, *args, **kwargs):
270 if name == 'repo_size':
270 if name == 'repo_size':
271 raise Exception('Fake exception')
271 raise Exception('Fake exception')
272 else:
272 else:
273 return status
273 return status
274
274
275 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
275 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
276 call_hook_mock.side_effect = side_effect
276 call_hook_mock.side_effect = side_effect
277 result = hooks.git_post_receive(
277 result = hooks.git_post_receive(
278 None, '', {'RC_SCM_DATA': json.dumps(extras)})
278 None, '', {'RC_SCM_DATA': json.dumps(extras)})
279 assert result == status
279 assert result == status
280
280
281
281
282 @mock.patch('vcsserver.hooks._run_command')
282 @mock.patch('vcsserver.hooks._run_command')
283 def test_git_post_receive_first_commit_sub_branch(cmd_mock):
283 def test_git_post_receive_first_commit_sub_branch(cmd_mock):
284 def cmd_mock_returns(args):
284 def cmd_mock_returns(args):
285 if args == ['git', 'show', 'HEAD']:
285 if args == ['git', 'show', 'HEAD']:
286 raise
286 raise
287 if args == ['git', 'for-each-ref', '--format=%(refname)',
287 if args == ['git', 'for-each-ref', '--format=%(refname)',
288 'refs/heads/*']:
288 'refs/heads/*']:
289 return 'refs/heads/test-branch2/sub-branch'
289 return 'refs/heads/test-branch2/sub-branch'
290 if args == ['git', 'log', '--reverse', '--pretty=format:%H', '--',
290 if args == ['git', 'log', '--reverse', '--pretty=format:%H', '--',
291 '9695eef57205c17566a3ae543be187759b310bb7', '--not',
291 '9695eef57205c17566a3ae543be187759b310bb7', '--not',
292 'refs/heads/test-branch2/sub-branch']:
292 'refs/heads/test-branch2/sub-branch']:
293 return ''
293 return ''
294
294
295 cmd_mock.side_effect = cmd_mock_returns
295 cmd_mock.side_effect = cmd_mock_returns
296
296
297 extras = {
297 extras = {
298 'hooks': ['push'],
298 'hooks': ['push'],
299 'hooks_uri': 'fake_hook_uri'
299 'hooks_uri': 'fake_hook_uri'
300 }
300 }
301 rev_lines = ['0000000000000000000000000000000000000000 '
301 rev_lines = ['0000000000000000000000000000000000000000 '
302 '9695eef57205c17566a3ae543be187759b310bb7 '
302 '9695eef57205c17566a3ae543be187759b310bb7 '
303 'refs/heads/feature/sub-branch\n']
303 'refs/heads/feature/sub-branch\n']
304 with mock_hook_response(status=0):
304 with mock_hook_response(status=0):
305 response = hooks.git_post_receive(None, rev_lines,
305 response = hooks.git_post_receive(None, rev_lines,
306 {'RC_SCM_DATA': json.dumps(extras)})
306 {'RC_SCM_DATA': json.dumps(extras)})
307
307
308 calls = [
308 calls = [
309 mock.call(['git', 'show', 'HEAD']),
309 mock.call(['git', 'show', 'HEAD']),
310 mock.call(['git', 'symbolic-ref', 'HEAD',
310 mock.call(['git', 'symbolic-ref', 'HEAD',
311 'refs/heads/feature/sub-branch']),
311 'refs/heads/feature/sub-branch']),
312 ]
312 ]
313 cmd_mock.assert_has_calls(calls, any_order=True)
313 cmd_mock.assert_has_calls(calls, any_order=True)
314 assert response == 0
314 assert response == 0
315
315
316
316
317 @mock.patch('vcsserver.hooks._run_command')
317 @mock.patch('vcsserver.hooks._run_command')
318 def test_git_post_receive_first_commit_revs(cmd_mock):
318 def test_git_post_receive_first_commit_revs(cmd_mock):
319 extras = {
319 extras = {
320 'hooks': ['push'],
320 'hooks': ['push'],
321 'hooks_uri': 'fake_hook_uri'
321 'hooks_uri': 'fake_hook_uri'
322 }
322 }
323 rev_lines = [
323 rev_lines = [
324 '0000000000000000000000000000000000000000 '
324 '0000000000000000000000000000000000000000 '
325 '9695eef57205c17566a3ae543be187759b310bb7 refs/heads/master\n']
325 '9695eef57205c17566a3ae543be187759b310bb7 refs/heads/master\n']
326 with mock_hook_response(status=0):
326 with mock_hook_response(status=0):
327 response = hooks.git_post_receive(
327 response = hooks.git_post_receive(
328 None, rev_lines, {'RC_SCM_DATA': json.dumps(extras)})
328 None, rev_lines, {'RC_SCM_DATA': json.dumps(extras)})
329
329
330 calls = [
330 calls = [
331 mock.call(['git', 'show', 'HEAD']),
331 mock.call(['git', 'show', 'HEAD']),
332 mock.call(['git', 'for-each-ref', '--format=%(refname)',
332 mock.call(['git', 'for-each-ref', '--format=%(refname)',
333 'refs/heads/*']),
333 'refs/heads/*']),
334 mock.call(['git', 'log', '--reverse', '--pretty=format:%H',
334 mock.call(['git', 'log', '--reverse', '--pretty=format:%H',
335 '--', '9695eef57205c17566a3ae543be187759b310bb7', '--not',
335 '--', '9695eef57205c17566a3ae543be187759b310bb7', '--not',
336 ''])
336 ''])
337 ]
337 ]
338 cmd_mock.assert_has_calls(calls, any_order=True)
338 cmd_mock.assert_has_calls(calls, any_order=True)
339
339
340 assert response == 0
340 assert response == 0
341
341
342
342
343 def test_git_pre_pull():
343 def test_git_pre_pull():
344 extras = {
344 extras = {
345 'hooks': ['pull'],
345 'hooks': ['pull'],
346 'hooks_uri': 'fake_hook_uri',
346 'hooks_uri': 'fake_hook_uri',
347 }
347 }
348 with mock_hook_response(status=1, output='foo'):
348 with mock_hook_response(status=1, output='foo'):
349 assert hooks.git_pre_pull(extras) == hooks.HookResponse(1, 'foo')
349 assert hooks.git_pre_pull(extras) == hooks.HookResponse(1, 'foo')
350
350
351
351
352 def test_git_pre_pull_exception_is_caught():
352 def test_git_pre_pull_exception_is_caught():
353 extras = {
353 extras = {
354 'hooks': ['pull'],
354 'hooks': ['pull'],
355 'hooks_uri': 'fake_hook_uri',
355 'hooks_uri': 'fake_hook_uri',
356 }
356 }
357 with mock_hook_response(status=2, exception=Exception('foo')):
357 with mock_hook_response(status=2, exception=Exception('foo')):
358 assert hooks.git_pre_pull(extras).status == 128
358 assert hooks.git_pre_pull(extras).status == 128
359
359
360
360
361 def test_git_pre_pull_is_disabled():
361 def test_git_pre_pull_is_disabled():
362 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
362 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
363
363
364
364
365 def test_git_post_pull():
365 def test_git_post_pull():
366 extras = {
366 extras = {
367 'hooks': ['pull'],
367 'hooks': ['pull'],
368 'hooks_uri': 'fake_hook_uri',
368 'hooks_uri': 'fake_hook_uri',
369 }
369 }
370 with mock_hook_response(status=1, output='foo'):
370 with mock_hook_response(status=1, output='foo'):
371 assert hooks.git_post_pull(extras) == hooks.HookResponse(1, 'foo')
371 assert hooks.git_post_pull(extras) == hooks.HookResponse(1, 'foo')
372
372
373
373
374 def test_git_post_pull_exception_is_caught():
374 def test_git_post_pull_exception_is_caught():
375 extras = {
375 extras = {
376 'hooks': ['pull'],
376 'hooks': ['pull'],
377 'hooks_uri': 'fake_hook_uri',
377 'hooks_uri': 'fake_hook_uri',
378 }
378 }
379 with mock_hook_response(status=2, exception='Exception',
379 with mock_hook_response(status=2, exception='Exception',
380 exception_args=('foo',)):
380 exception_args=('foo',)):
381 assert hooks.git_post_pull(extras).status == 128
381 assert hooks.git_post_pull(extras).status == 128
382
382
383
383
384 def test_git_post_pull_is_disabled():
384 def test_git_post_pull_is_disabled():
385 assert (
385 assert (
386 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
386 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
387
387
388
388
389 class TestGetHooksClient(object):
389 class TestGetHooksClient(object):
390 def test_returns_pyro_client_when_protocol_matches(self):
390 def test_returns_pyro_client_when_protocol_matches(self):
391 hooks_uri = 'localhost:8000'
391 hooks_uri = 'localhost:8000'
392 result = hooks._get_hooks_client({
392 result = hooks._get_hooks_client({
393 'hooks_uri': hooks_uri,
393 'hooks_uri': hooks_uri,
394 'hooks_protocol': 'pyro4'
394 'hooks_protocol': 'pyro4'
395 })
395 })
396 assert isinstance(result, hooks.HooksPyro4Client)
396 assert isinstance(result, hooks.HooksPyro4Client)
397 assert result.hooks_uri == hooks_uri
397 assert result.hooks_uri == hooks_uri
398
398
399 def test_returns_http_client_when_protocol_matches(self):
399 def test_returns_http_client_when_protocol_matches(self):
400 hooks_uri = 'localhost:8000'
400 hooks_uri = 'localhost:8000'
401 result = hooks._get_hooks_client({
401 result = hooks._get_hooks_client({
402 'hooks_uri': hooks_uri,
402 'hooks_uri': hooks_uri,
403 'hooks_protocol': 'http'
403 'hooks_protocol': 'http'
404 })
404 })
405 assert isinstance(result, hooks.HooksHttpClient)
405 assert isinstance(result, hooks.HooksHttpClient)
406 assert result.hooks_uri == hooks_uri
406 assert result.hooks_uri == hooks_uri
407
407
408 def test_returns_pyro4_client_when_no_protocol_is_specified(self):
408 def test_returns_pyro4_client_when_no_protocol_is_specified(self):
409 hooks_uri = 'localhost:8000'
409 hooks_uri = 'localhost:8000'
410 result = hooks._get_hooks_client({
410 result = hooks._get_hooks_client({
411 'hooks_uri': hooks_uri
411 'hooks_uri': hooks_uri
412 })
412 })
413 assert isinstance(result, hooks.HooksPyro4Client)
413 assert isinstance(result, hooks.HooksPyro4Client)
414 assert result.hooks_uri == hooks_uri
414 assert result.hooks_uri == hooks_uri
415
415
416 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
416 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
417 fake_module = mock.Mock()
417 fake_module = mock.Mock()
418 import_patcher = mock.patch.object(
418 import_patcher = mock.patch.object(
419 hooks.importlib, 'import_module', return_value=fake_module)
419 hooks.importlib, 'import_module', return_value=fake_module)
420 fake_module_name = 'fake.module'
420 fake_module_name = 'fake.module'
421 with import_patcher as import_mock:
421 with import_patcher as import_mock:
422 result = hooks._get_hooks_client(
422 result = hooks._get_hooks_client(
423 {'hooks_module': fake_module_name})
423 {'hooks_module': fake_module_name})
424
424
425 import_mock.assert_called_once_with(fake_module_name)
425 import_mock.assert_called_once_with(fake_module_name)
426 assert isinstance(result, hooks.HooksDummyClient)
426 assert isinstance(result, hooks.HooksDummyClient)
427 assert result._hooks_module == fake_module
427 assert result._hooks_module == fake_module
428
428
429
429
430 class TestHooksHttpClient(object):
430 class TestHooksHttpClient(object):
431 def test_init_sets_hooks_uri(self):
431 def test_init_sets_hooks_uri(self):
432 uri = 'localhost:3000'
432 uri = 'localhost:3000'
433 client = hooks.HooksHttpClient(uri)
433 client = hooks.HooksHttpClient(uri)
434 assert client.hooks_uri == uri
434 assert client.hooks_uri == uri
435
435
436 def test_serialize_returns_json_string(self):
436 def test_serialize_returns_json_string(self):
437 client = hooks.HooksHttpClient('localhost:3000')
437 client = hooks.HooksHttpClient('localhost:3000')
438 hook_name = 'test'
438 hook_name = 'test'
439 extras = {
439 extras = {
440 'first': 1,
440 'first': 1,
441 'second': 'two'
441 'second': 'two'
442 }
442 }
443 result = client._serialize(hook_name, extras)
443 result = client._serialize(hook_name, extras)
444 expected_result = json.dumps({
444 expected_result = json.dumps({
445 'method': hook_name,
445 'method': hook_name,
446 'extras': extras
446 'extras': extras
447 })
447 })
448 assert result == expected_result
448 assert result == expected_result
449
449
450 def test_call_queries_http_server(self, http_mirror):
450 def test_call_queries_http_server(self, http_mirror):
451 client = hooks.HooksHttpClient(http_mirror.uri)
451 client = hooks.HooksHttpClient(http_mirror.uri)
452 hook_name = 'test'
452 hook_name = 'test'
453 extras = {
453 extras = {
454 'first': 1,
454 'first': 1,
455 'second': 'two'
455 'second': 'two'
456 }
456 }
457 result = client(hook_name, extras)
457 result = client(hook_name, extras)
458 expected_result = {
458 expected_result = {
459 'method': hook_name,
459 'method': hook_name,
460 'extras': extras
460 'extras': extras
461 }
461 }
462 assert result == expected_result
462 assert result == expected_result
463
463
464
464
465 class TestHooksDummyClient(object):
465 class TestHooksDummyClient(object):
466 def test_init_imports_hooks_module(self):
466 def test_init_imports_hooks_module(self):
467 hooks_module_name = 'rhodecode.fake.module'
467 hooks_module_name = 'rhodecode.fake.module'
468 hooks_module = mock.MagicMock()
468 hooks_module = mock.MagicMock()
469
469
470 import_patcher = mock.patch.object(
470 import_patcher = mock.patch.object(
471 hooks.importlib, 'import_module', return_value=hooks_module)
471 hooks.importlib, 'import_module', return_value=hooks_module)
472 with import_patcher as import_mock:
472 with import_patcher as import_mock:
473 client = hooks.HooksDummyClient(hooks_module_name)
473 client = hooks.HooksDummyClient(hooks_module_name)
474 import_mock.assert_called_once_with(hooks_module_name)
474 import_mock.assert_called_once_with(hooks_module_name)
475 assert client._hooks_module == hooks_module
475 assert client._hooks_module == hooks_module
476
476
477 def test_call_returns_hook_result(self):
477 def test_call_returns_hook_result(self):
478 hooks_module_name = 'rhodecode.fake.module'
478 hooks_module_name = 'rhodecode.fake.module'
479 hooks_module = mock.MagicMock()
479 hooks_module = mock.MagicMock()
480 import_patcher = mock.patch.object(
480 import_patcher = mock.patch.object(
481 hooks.importlib, 'import_module', return_value=hooks_module)
481 hooks.importlib, 'import_module', return_value=hooks_module)
482 with import_patcher:
482 with import_patcher:
483 client = hooks.HooksDummyClient(hooks_module_name)
483 client = hooks.HooksDummyClient(hooks_module_name)
484
484
485 result = client('post_push', {})
485 result = client('post_push', {})
486 hooks_module.Hooks.assert_called_once_with()
486 hooks_module.Hooks.assert_called_once_with()
487 assert result == hooks_module.Hooks().__enter__().post_push()
487 assert result == hooks_module.Hooks().__enter__().post_push()
488
488
489
489
490 class TestHooksPyro4Client(object):
490 class TestHooksPyro4Client(object):
491 def test_init_sets_hooks_uri(self):
491 def test_init_sets_hooks_uri(self):
492 uri = 'localhost:3000'
492 uri = 'localhost:3000'
493 client = hooks.HooksPyro4Client(uri)
493 client = hooks.HooksPyro4Client(uri)
494 assert client.hooks_uri == uri
494 assert client.hooks_uri == uri
495
495
496 def test_call_returns_hook_value(self):
496 def test_call_returns_hook_value(self):
497 hooks_uri = 'localhost:3000'
497 hooks_uri = 'localhost:3000'
498 client = hooks.HooksPyro4Client(hooks_uri)
498 client = hooks.HooksPyro4Client(hooks_uri)
499 hooks_module = mock.Mock()
499 hooks_module = mock.Mock()
500 context_manager = mock.MagicMock()
500 context_manager = mock.MagicMock()
501 context_manager.__enter__.return_value = hooks_module
501 context_manager.__enter__.return_value = hooks_module
502 pyro4_patcher = mock.patch.object(
502 pyro4_patcher = mock.patch.object(
503 hooks.Pyro4, 'Proxy', return_value=context_manager)
503 hooks.Pyro4, 'Proxy', return_value=context_manager)
504 extras = {
504 extras = {
505 'test': 'test'
505 'test': 'test'
506 }
506 }
507 with pyro4_patcher as pyro4_mock:
507 with pyro4_patcher as pyro4_mock:
508 result = client('post_push', extras)
508 result = client('post_push', extras)
509 pyro4_mock.assert_called_once_with(hooks_uri)
509 pyro4_mock.assert_called_once_with(hooks_uri)
510 hooks_module.post_push.assert_called_once_with(extras)
510 hooks_module.post_push.assert_called_once_with(extras)
511 assert result == hooks_module.post_push.return_value
511 assert result == hooks_module.post_push.return_value
512
512
513
513
514 @pytest.fixture
514 @pytest.fixture
515 def http_mirror(request):
515 def http_mirror(request):
516 server = MirrorHttpServer()
516 server = MirrorHttpServer()
517 request.addfinalizer(server.stop)
517 request.addfinalizer(server.stop)
518 return server
518 return server
519
519
520
520
521 class MirrorHttpHandler(BaseHTTPRequestHandler):
521 class MirrorHttpHandler(BaseHTTPRequestHandler):
522 def do_POST(self):
522 def do_POST(self):
523 length = int(self.headers['Content-Length'])
523 length = int(self.headers['Content-Length'])
524 body = self.rfile.read(length).decode('utf-8')
524 body = self.rfile.read(length).decode('utf-8')
525 self.send_response(200)
525 self.send_response(200)
526 self.end_headers()
526 self.end_headers()
527 self.wfile.write(body)
527 self.wfile.write(body)
528
528
529
529
530 class MirrorHttpServer(object):
530 class MirrorHttpServer(object):
531 ip_address = '127.0.0.1'
531 ip_address = '127.0.0.1'
532 port = 0
532 port = 0
533
533
534 def __init__(self):
534 def __init__(self):
535 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
535 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
536 _, self.port = self._daemon.server_address
536 _, self.port = self._daemon.server_address
537 self._thread = threading.Thread(target=self._daemon.serve_forever)
537 self._thread = threading.Thread(target=self._daemon.serve_forever)
538 self._thread.daemon = True
538 self._thread.daemon = True
539 self._thread.start()
539 self._thread.start()
540
540
541 def stop(self):
541 def stop(self):
542 self._daemon.shutdown()
542 self._daemon.shutdown()
543 self._thread.join()
543 self._thread.join()
544 self._daemon = None
544 self._daemon = None
545 self._thread = None
545 self._thread = None
546
546
547 @property
547 @property
548 def uri(self):
548 def uri(self):
549 return '{}:{}'.format(self.ip_address, self.port)
549 return '{}:{}'.format(self.ip_address, self.port)
1 NO CONTENT: file renamed from tests/test_http_performance.py to vcsserver/tests/test_http_performance.py
NO CONTENT: file renamed from tests/test_http_performance.py to vcsserver/tests/test_http_performance.py
@@ -1,57 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import main
21 from vcsserver import main
22 from vcsserver.base import obfuscate_qs
22 from vcsserver.base import obfuscate_qs
23
23
24
24
25 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
25 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
28 main.main([])
28 main.main([])
29 patch_largefiles_capabilities.assert_called_once_with()
29 patch_largefiles_capabilities.assert_called_once_with()
30
30
31
31
32 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
32 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
33 @mock.patch('vcsserver.main.MercurialFactory', None)
33 @mock.patch('vcsserver.main.MercurialFactory', None)
34 @mock.patch(
34 @mock.patch(
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
36 mock.Mock(side_effect=Exception("Must not be called")))
36 mock.Mock(side_effect=Exception("Must not be called")))
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
38 main.main([])
38 main.main([])
39
39
40
40
41 @pytest.mark.parametrize('given, expected', [
41 @pytest.mark.parametrize('given, expected', [
42 ('bad', 'bad'),
42 ('bad', 'bad'),
43 ('query&foo=bar', 'query&foo=bar'),
43 ('query&foo=bar', 'query&foo=bar'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret',
45 ('a;b;c;query&foo=bar&auth_token=secret',
46 'a&b&c&query&foo=bar&auth_token=*****'),
46 'a&b&c&query&foo=bar&auth_token=*****'),
47 ('', ''),
47 ('', ''),
48 (None, None),
48 (None, None),
49 ('foo=bar', 'foo=bar'),
49 ('foo=bar', 'foo=bar'),
50 ('auth_token=secret', 'auth_token=*****'),
50 ('auth_token=secret', 'auth_token=*****'),
51 ('auth_token=secret&api_key=secret2',
51 ('auth_token=secret&api_key=secret2',
52 'auth_token=*****&api_key=*****'),
52 'auth_token=*****&api_key=*****'),
53 ('auth_token=secret&api_key=secret2&param=value',
53 ('auth_token=secret&api_key=secret2&param=value',
54 'auth_token=*****&api_key=*****&param=value'),
54 'auth_token=*****&api_key=*****&param=value'),
55 ])
55 ])
56 def test_obfuscate_qs(given, expected):
56 def test_obfuscate_qs(given, expected):
57 assert expected == obfuscate_qs(given)
57 assert expected == obfuscate_qs(given)
@@ -1,249 +1,249 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19
19
20 import dulwich.protocol
20 import dulwich.protocol
21 import mock
21 import mock
22 import pytest
22 import pytest
23 import webob
23 import webob
24 import webtest
24 import webtest
25
25
26 from vcsserver import hooks, pygrack
26 from vcsserver import hooks, pygrack
27
27
28 # pylint: disable=redefined-outer-name,protected-access
28 # pylint: disable=redefined-outer-name,protected-access
29
29
30
30
31 @pytest.fixture()
31 @pytest.fixture()
32 def pygrack_instance(tmpdir):
32 def pygrack_instance(tmpdir):
33 """
33 """
34 Creates a pygrack app instance.
34 Creates a pygrack app instance.
35
35
36 Right now, it does not much helpful regarding the passed directory.
36 Right now, it does not much helpful regarding the passed directory.
37 It just contains the required folders to pass the signature test.
37 It just contains the required folders to pass the signature test.
38 """
38 """
39 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
39 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
40 tmpdir.mkdir(dir_name)
40 tmpdir.mkdir(dir_name)
41
41
42 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
42 return pygrack.GitRepository('repo_name', str(tmpdir), 'git', False, {})
43
43
44
44
45 @pytest.fixture()
45 @pytest.fixture()
46 def pygrack_app(pygrack_instance):
46 def pygrack_app(pygrack_instance):
47 """
47 """
48 Creates a pygrack app wrapped in webtest.TestApp.
48 Creates a pygrack app wrapped in webtest.TestApp.
49 """
49 """
50 return webtest.TestApp(pygrack_instance)
50 return webtest.TestApp(pygrack_instance)
51
51
52
52
53 def test_invalid_service_info_refs_returns_403(pygrack_app):
53 def test_invalid_service_info_refs_returns_403(pygrack_app):
54 response = pygrack_app.get('/info/refs?service=git-upload-packs',
54 response = pygrack_app.get('/info/refs?service=git-upload-packs',
55 expect_errors=True)
55 expect_errors=True)
56
56
57 assert response.status_int == 403
57 assert response.status_int == 403
58
58
59
59
60 def test_invalid_endpoint_returns_403(pygrack_app):
60 def test_invalid_endpoint_returns_403(pygrack_app):
61 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
61 response = pygrack_app.post('/git-upload-packs', expect_errors=True)
62
62
63 assert response.status_int == 403
63 assert response.status_int == 403
64
64
65
65
66 @pytest.mark.parametrize('sideband', [
66 @pytest.mark.parametrize('sideband', [
67 'side-band-64k',
67 'side-band-64k',
68 'side-band',
68 'side-band',
69 'side-band no-progress',
69 'side-band no-progress',
70 ])
70 ])
71 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
71 def test_pre_pull_hook_fails_with_sideband(pygrack_app, sideband):
72 request = ''.join([
72 request = ''.join([
73 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
73 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ',
74 'multi_ack %s ofs-delta\n' % sideband,
74 'multi_ack %s ofs-delta\n' % sideband,
75 '0000',
75 '0000',
76 '0009done\n',
76 '0009done\n',
77 ])
77 ])
78 with mock.patch('vcsserver.hooks.git_pre_pull',
78 with mock.patch('vcsserver.hooks.git_pre_pull',
79 return_value=hooks.HookResponse(1, 'foo')):
79 return_value=hooks.HookResponse(1, 'foo')):
80 response = pygrack_app.post(
80 response = pygrack_app.post(
81 '/git-upload-pack', params=request,
81 '/git-upload-pack', params=request,
82 content_type='application/x-git-upload-pack')
82 content_type='application/x-git-upload-pack')
83
83
84 data = io.BytesIO(response.body)
84 data = io.BytesIO(response.body)
85 proto = dulwich.protocol.Protocol(data.read, None)
85 proto = dulwich.protocol.Protocol(data.read, None)
86 packets = list(proto.read_pkt_seq())
86 packets = list(proto.read_pkt_seq())
87
87
88 expected_packets = [
88 expected_packets = [
89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
89 'NAK\n', '\x02foo', '\x02Pre pull hook failed: aborting\n',
90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
90 '\x01' + pygrack.GitRepository.EMPTY_PACK,
91 ]
91 ]
92 assert packets == expected_packets
92 assert packets == expected_packets
93
93
94
94
95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
95 def test_pre_pull_hook_fails_no_sideband(pygrack_app):
96 request = ''.join([
96 request = ''.join([
97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
97 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
98 'multi_ack ofs-delta\n'
98 'multi_ack ofs-delta\n'
99 '0000',
99 '0000',
100 '0009done\n',
100 '0009done\n',
101 ])
101 ])
102 with mock.patch('vcsserver.hooks.git_pre_pull',
102 with mock.patch('vcsserver.hooks.git_pre_pull',
103 return_value=hooks.HookResponse(1, 'foo')):
103 return_value=hooks.HookResponse(1, 'foo')):
104 response = pygrack_app.post(
104 response = pygrack_app.post(
105 '/git-upload-pack', params=request,
105 '/git-upload-pack', params=request,
106 content_type='application/x-git-upload-pack')
106 content_type='application/x-git-upload-pack')
107
107
108 assert response.body == pygrack.GitRepository.EMPTY_PACK
108 assert response.body == pygrack.GitRepository.EMPTY_PACK
109
109
110
110
111 def test_pull_has_hook_messages(pygrack_app):
111 def test_pull_has_hook_messages(pygrack_app):
112 request = ''.join([
112 request = ''.join([
113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
113 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
114 'multi_ack side-band-64k ofs-delta\n'
114 'multi_ack side-band-64k ofs-delta\n'
115 '0000',
115 '0000',
116 '0009done\n',
116 '0009done\n',
117 ])
117 ])
118 with mock.patch('vcsserver.hooks.git_pre_pull',
118 with mock.patch('vcsserver.hooks.git_pre_pull',
119 return_value=hooks.HookResponse(0, 'foo')):
119 return_value=hooks.HookResponse(0, 'foo')):
120 with mock.patch('vcsserver.hooks.git_post_pull',
120 with mock.patch('vcsserver.hooks.git_post_pull',
121 return_value=hooks.HookResponse(1, 'bar')):
121 return_value=hooks.HookResponse(1, 'bar')):
122 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
122 with mock.patch('vcsserver.subprocessio.SubprocessIOChunker',
123 return_value=['0008NAK\n0009subp\n0000']):
123 return_value=['0008NAK\n0009subp\n0000']):
124 response = pygrack_app.post(
124 response = pygrack_app.post(
125 '/git-upload-pack', params=request,
125 '/git-upload-pack', params=request,
126 content_type='application/x-git-upload-pack')
126 content_type='application/x-git-upload-pack')
127
127
128 data = io.BytesIO(response.body)
128 data = io.BytesIO(response.body)
129 proto = dulwich.protocol.Protocol(data.read, None)
129 proto = dulwich.protocol.Protocol(data.read, None)
130 packets = list(proto.read_pkt_seq())
130 packets = list(proto.read_pkt_seq())
131
131
132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
132 assert packets == ['NAK\n', '\x02foo', 'subp\n', '\x02bar']
133
133
134
134
135 def test_get_want_capabilities(pygrack_instance):
135 def test_get_want_capabilities(pygrack_instance):
136 data = io.BytesIO(
136 data = io.BytesIO(
137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
137 '0054want 74730d410fcb6603ace96f1dc55ea6196122532d ' +
138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
138 'multi_ack side-band-64k ofs-delta\n00000009done\n')
139
139
140 request = webob.Request({
140 request = webob.Request({
141 'wsgi.input': data,
141 'wsgi.input': data,
142 'REQUEST_METHOD': 'POST',
142 'REQUEST_METHOD': 'POST',
143 'webob.is_body_seekable': True
143 'webob.is_body_seekable': True
144 })
144 })
145
145
146 capabilities = pygrack_instance._get_want_capabilities(request)
146 capabilities = pygrack_instance._get_want_capabilities(request)
147
147
148 assert capabilities == frozenset(
148 assert capabilities == frozenset(
149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
149 ('ofs-delta', 'multi_ack', 'side-band-64k'))
150 assert data.tell() == 0
150 assert data.tell() == 0
151
151
152
152
153 @pytest.mark.parametrize('data,capabilities,expected', [
153 @pytest.mark.parametrize('data,capabilities,expected', [
154 ('foo', [], []),
154 ('foo', [], []),
155 ('', ['side-band-64k'], []),
155 ('', ['side-band-64k'], []),
156 ('', ['side-band'], []),
156 ('', ['side-band'], []),
157 ('foo', ['side-band-64k'], ['0008\x02foo']),
157 ('foo', ['side-band-64k'], ['0008\x02foo']),
158 ('foo', ['side-band'], ['0008\x02foo']),
158 ('foo', ['side-band'], ['0008\x02foo']),
159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
159 ('f'*1000, ['side-band-64k'], ['03ed\x02' + 'f' * 1000]),
160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
160 ('f'*1000, ['side-band'], ['03e8\x02' + 'f' * 995, '000a\x02fffff']),
161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
161 ('f'*65520, ['side-band-64k'], ['fff0\x02' + 'f' * 65515, '000a\x02fffff']),
162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
162 ('f'*65520, ['side-band'], ['03e8\x02' + 'f' * 995] * 65 + ['0352\x02' + 'f' * 845]),
163 ], ids=[
163 ], ids=[
164 'foo-empty',
164 'foo-empty',
165 'empty-64k', 'empty',
165 'empty-64k', 'empty',
166 'foo-64k', 'foo',
166 'foo-64k', 'foo',
167 'f-1000-64k', 'f-1000',
167 'f-1000-64k', 'f-1000',
168 'f-65520-64k', 'f-65520'])
168 'f-65520-64k', 'f-65520'])
169 def test_get_messages(pygrack_instance, data, capabilities, expected):
169 def test_get_messages(pygrack_instance, data, capabilities, expected):
170 messages = pygrack_instance._get_messages(data, capabilities)
170 messages = pygrack_instance._get_messages(data, capabilities)
171
171
172 assert messages == expected
172 assert messages == expected
173
173
174
174
175 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
175 @pytest.mark.parametrize('response,capabilities,pre_pull_messages,post_pull_messages', [
176 # Unexpected response
176 # Unexpected response
177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
177 ('unexpected_response', ['side-band-64k'], 'foo', 'bar'),
178 # No sideband
178 # No sideband
179 ('no-sideband', [], 'foo', 'bar'),
179 ('no-sideband', [], 'foo', 'bar'),
180 # No messages
180 # No messages
181 ('no-messages', ['side-band-64k'], '', ''),
181 ('no-messages', ['side-band-64k'], '', ''),
182 ])
182 ])
183 def test_inject_messages_to_response_nothing_to_do(
183 def test_inject_messages_to_response_nothing_to_do(
184 pygrack_instance, response, capabilities, pre_pull_messages,
184 pygrack_instance, response, capabilities, pre_pull_messages,
185 post_pull_messages):
185 post_pull_messages):
186 new_response = pygrack_instance._inject_messages_to_response(
186 new_response = pygrack_instance._inject_messages_to_response(
187 response, capabilities, pre_pull_messages, post_pull_messages)
187 response, capabilities, pre_pull_messages, post_pull_messages)
188
188
189 assert new_response == response
189 assert new_response == response
190
190
191
191
192 @pytest.mark.parametrize('capabilities', [
192 @pytest.mark.parametrize('capabilities', [
193 ['side-band'],
193 ['side-band'],
194 ['side-band-64k'],
194 ['side-band-64k'],
195 ])
195 ])
196 def test_inject_messages_to_response_single_element(pygrack_instance,
196 def test_inject_messages_to_response_single_element(pygrack_instance,
197 capabilities):
197 capabilities):
198 response = ['0008NAK\n0009subp\n0000']
198 response = ['0008NAK\n0009subp\n0000']
199 new_response = pygrack_instance._inject_messages_to_response(
199 new_response = pygrack_instance._inject_messages_to_response(
200 response, capabilities, 'foo', 'bar')
200 response, capabilities, 'foo', 'bar')
201
201
202 expected_response = [
202 expected_response = [
203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
203 '0008NAK\n', '0008\x02foo', '0009subp\n', '0008\x02bar', '0000']
204
204
205 assert new_response == expected_response
205 assert new_response == expected_response
206
206
207
207
208 @pytest.mark.parametrize('capabilities', [
208 @pytest.mark.parametrize('capabilities', [
209 ['side-band'],
209 ['side-band'],
210 ['side-band-64k'],
210 ['side-band-64k'],
211 ])
211 ])
212 def test_inject_messages_to_response_multi_element(pygrack_instance,
212 def test_inject_messages_to_response_multi_element(pygrack_instance,
213 capabilities):
213 capabilities):
214 response = [
214 response = [
215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
215 '0008NAK\n000asubp1\n', '000asubp2\n', '000asubp3\n', '000asubp4\n0000']
216 new_response = pygrack_instance._inject_messages_to_response(
216 new_response = pygrack_instance._inject_messages_to_response(
217 response, capabilities, 'foo', 'bar')
217 response, capabilities, 'foo', 'bar')
218
218
219 expected_response = [
219 expected_response = [
220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
220 '0008NAK\n', '0008\x02foo', '000asubp1\n', '000asubp2\n', '000asubp3\n',
221 '000asubp4\n', '0008\x02bar', '0000'
221 '000asubp4\n', '0008\x02bar', '0000'
222 ]
222 ]
223
223
224 assert new_response == expected_response
224 assert new_response == expected_response
225
225
226
226
227 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
227 def test_build_failed_pre_pull_response_no_sideband(pygrack_instance):
228 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
228 response = pygrack_instance._build_failed_pre_pull_response([], 'foo')
229
229
230 assert response == [pygrack.GitRepository.EMPTY_PACK]
230 assert response == [pygrack.GitRepository.EMPTY_PACK]
231
231
232
232
233 @pytest.mark.parametrize('capabilities', [
233 @pytest.mark.parametrize('capabilities', [
234 ['side-band'],
234 ['side-band'],
235 ['side-band-64k'],
235 ['side-band-64k'],
236 ['side-band-64k', 'no-progress'],
236 ['side-band-64k', 'no-progress'],
237 ])
237 ])
238 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
238 def test_build_failed_pre_pull_response(pygrack_instance, capabilities):
239 response = pygrack_instance._build_failed_pre_pull_response(
239 response = pygrack_instance._build_failed_pre_pull_response(
240 capabilities, 'foo')
240 capabilities, 'foo')
241
241
242 expected_response = [
242 expected_response = [
243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
243 '0008NAK\n', '0008\x02foo', '0024\x02Pre pull hook failed: aborting\n',
244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
244 '%04x\x01%s' % (len(pygrack.GitRepository.EMPTY_PACK) + 5,
245 pygrack.GitRepository.EMPTY_PACK),
245 pygrack.GitRepository.EMPTY_PACK),
246 '0000',
246 '0000',
247 ]
247 ]
248
248
249 assert response == expected_response
249 assert response == expected_response
@@ -1,86 +1,86 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 import mercurial.hg
20 import mercurial.hg
21 import mercurial.ui
21 import mercurial.ui
22 import mercurial.error
22 import mercurial.error
23 import mock
23 import mock
24 import pytest
24 import pytest
25 import webtest
25 import webtest
26
26
27 from vcsserver import scm_app
27 from vcsserver import scm_app
28
28
29
29
30 def test_hg_does_not_accept_invalid_cmd(tmpdir):
30 def test_hg_does_not_accept_invalid_cmd(tmpdir):
31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
31 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
32 app = webtest.TestApp(scm_app.HgWeb(repo))
32 app = webtest.TestApp(scm_app.HgWeb(repo))
33
33
34 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
34 response = app.get('/repo?cmd=invalidcmd', expect_errors=True)
35
35
36 assert response.status_int == 400
36 assert response.status_int == 400
37
37
38
38
39 def test_create_hg_wsgi_app_requirement_error(tmpdir):
39 def test_create_hg_wsgi_app_requirement_error(tmpdir):
40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
40 repo = mercurial.hg.repository(mercurial.ui.ui(), str(tmpdir), create=True)
41 config = (
41 config = (
42 ('paths', 'default', ''),
42 ('paths', 'default', ''),
43 )
43 )
44 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
44 with mock.patch('vcsserver.scm_app.HgWeb') as hgweb_mock:
45 hgweb_mock.side_effect = mercurial.error.RequirementError()
45 hgweb_mock.side_effect = mercurial.error.RequirementError()
46 with pytest.raises(Exception):
46 with pytest.raises(Exception):
47 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
47 scm_app.create_hg_wsgi_app(str(tmpdir), repo, config)
48
48
49
49
50 def test_git_returns_not_found(tmpdir):
50 def test_git_returns_not_found(tmpdir):
51 app = webtest.TestApp(
51 app = webtest.TestApp(
52 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
52 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
53
53
54 response = app.get('/repo_name/inforefs?service=git-upload-pack',
54 response = app.get('/repo_name/inforefs?service=git-upload-pack',
55 expect_errors=True)
55 expect_errors=True)
56
56
57 assert response.status_int == 404
57 assert response.status_int == 404
58
58
59
59
60 def test_git(tmpdir):
60 def test_git(tmpdir):
61 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
61 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
62 tmpdir.mkdir(dir_name)
62 tmpdir.mkdir(dir_name)
63
63
64 app = webtest.TestApp(
64 app = webtest.TestApp(
65 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
65 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
66
66
67 # We set service to git-upload-packs to trigger a 403
67 # We set service to git-upload-packs to trigger a 403
68 response = app.get('/repo_name/inforefs?service=git-upload-packs',
68 response = app.get('/repo_name/inforefs?service=git-upload-packs',
69 expect_errors=True)
69 expect_errors=True)
70
70
71 assert response.status_int == 403
71 assert response.status_int == 403
72
72
73
73
74 def test_git_fallbacks_to_git_folder(tmpdir):
74 def test_git_fallbacks_to_git_folder(tmpdir):
75 tmpdir.mkdir('.git')
75 tmpdir.mkdir('.git')
76 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
76 for dir_name in ('config', 'head', 'info', 'objects', 'refs'):
77 tmpdir.mkdir(os.path.join('.git', dir_name))
77 tmpdir.mkdir(os.path.join('.git', dir_name))
78
78
79 app = webtest.TestApp(
79 app = webtest.TestApp(
80 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
80 scm_app.GitHandler(str(tmpdir), 'repo_name', 'git', False, {}))
81
81
82 # We set service to git-upload-packs to trigger a 403
82 # We set service to git-upload-packs to trigger a 403
83 response = app.get('/repo_name/inforefs?service=git-upload-packs',
83 response = app.get('/repo_name/inforefs?service=git-upload-packs',
84 expect_errors=True)
84 expect_errors=True)
85
85
86 assert response.status_int == 403
86 assert response.status_int == 403
@@ -1,39 +1,39 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19
19
20 import mock
20 import mock
21 import pytest
21 import pytest
22
22
23 from vcsserver.server import VcsServer
23 from vcsserver.server import VcsServer
24
24
25
25
26 def test_provides_the_pid(server):
26 def test_provides_the_pid(server):
27 pid = server.get_pid()
27 pid = server.get_pid()
28 assert pid == os.getpid()
28 assert pid == os.getpid()
29
29
30
30
31 def test_allows_to_trigger_the_garbage_collector(server):
31 def test_allows_to_trigger_the_garbage_collector(server):
32 with mock.patch('gc.collect') as collect:
32 with mock.patch('gc.collect') as collect:
33 server.run_gc()
33 server.run_gc()
34 assert collect.called
34 assert collect.called
35
35
36
36
37 @pytest.fixture
37 @pytest.fixture
38 def server():
38 def server():
39 return VcsServer()
39 return VcsServer()
@@ -1,122 +1,122 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import os
19 import os
20 import sys
20 import sys
21
21
22 import pytest
22 import pytest
23
23
24 from vcsserver import subprocessio
24 from vcsserver import subprocessio
25
25
26
26
27 @pytest.fixture(scope='module')
27 @pytest.fixture(scope='module')
28 def environ():
28 def environ():
29 """Delete coverage variables, as they make the tests fail."""
29 """Delete coverage variables, as they make the tests fail."""
30 env = dict(os.environ)
30 env = dict(os.environ)
31 for key in env.keys():
31 for key in env.keys():
32 if key.startswith('COV_CORE_'):
32 if key.startswith('COV_CORE_'):
33 del env[key]
33 del env[key]
34
34
35 return env
35 return env
36
36
37
37
38 def _get_python_args(script):
38 def _get_python_args(script):
39 return [sys.executable, '-c',
39 return [sys.executable, '-c',
40 'import sys; import time; import shutil; ' + script]
40 'import sys; import time; import shutil; ' + script]
41
41
42
42
43 def test_raise_exception_on_non_zero_return_code(environ):
43 def test_raise_exception_on_non_zero_return_code(environ):
44 args = _get_python_args('sys.exit(1)')
44 args = _get_python_args('sys.exit(1)')
45 with pytest.raises(EnvironmentError):
45 with pytest.raises(EnvironmentError):
46 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
46 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
47
47
48
48
49 def test_does_not_fail_on_non_zero_return_code(environ):
49 def test_does_not_fail_on_non_zero_return_code(environ):
50 args = _get_python_args('sys.exit(1)')
50 args = _get_python_args('sys.exit(1)')
51 output = ''.join(subprocessio.SubprocessIOChunker(
51 output = ''.join(subprocessio.SubprocessIOChunker(
52 args, shell=False, fail_on_return_code=False, env=environ))
52 args, shell=False, fail_on_return_code=False, env=environ))
53
53
54 assert output == ''
54 assert output == ''
55
55
56
56
57 def test_raise_exception_on_stderr(environ):
57 def test_raise_exception_on_stderr(environ):
58 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
58 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
59 with pytest.raises(EnvironmentError) as excinfo:
59 with pytest.raises(EnvironmentError) as excinfo:
60 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
60 list(subprocessio.SubprocessIOChunker(args, shell=False, env=environ))
61
61
62 assert 'exited due to an error:\nX' in str(excinfo.value)
62 assert 'exited due to an error:\nX' in str(excinfo.value)
63
63
64
64
65 def test_does_not_fail_on_stderr(environ):
65 def test_does_not_fail_on_stderr(environ):
66 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
66 args = _get_python_args('sys.stderr.write("X"); time.sleep(1);')
67 output = ''.join(subprocessio.SubprocessIOChunker(
67 output = ''.join(subprocessio.SubprocessIOChunker(
68 args, shell=False, fail_on_stderr=False, env=environ))
68 args, shell=False, fail_on_stderr=False, env=environ))
69
69
70 assert output == ''
70 assert output == ''
71
71
72
72
73 @pytest.mark.parametrize('size', [1, 10**5])
73 @pytest.mark.parametrize('size', [1, 10**5])
74 def test_output_with_no_input(size, environ):
74 def test_output_with_no_input(size, environ):
75 print type(environ)
75 print type(environ)
76 data = 'X'
76 data = 'X'
77 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
77 args = _get_python_args('sys.stdout.write("%s" * %d)' % (data, size))
78 output = ''.join(subprocessio.SubprocessIOChunker(
78 output = ''.join(subprocessio.SubprocessIOChunker(
79 args, shell=False, env=environ))
79 args, shell=False, env=environ))
80
80
81 assert output == data * size
81 assert output == data * size
82
82
83
83
84 @pytest.mark.parametrize('size', [1, 10**5])
84 @pytest.mark.parametrize('size', [1, 10**5])
85 def test_output_with_no_input_does_not_fail(size, environ):
85 def test_output_with_no_input_does_not_fail(size, environ):
86 data = 'X'
86 data = 'X'
87 args = _get_python_args(
87 args = _get_python_args(
88 'sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
88 'sys.stdout.write("%s" * %d); sys.exit(1)' % (data, size))
89 output = ''.join(subprocessio.SubprocessIOChunker(
89 output = ''.join(subprocessio.SubprocessIOChunker(
90 args, shell=False, fail_on_return_code=False, env=environ))
90 args, shell=False, fail_on_return_code=False, env=environ))
91
91
92 print len(data * size), len(output)
92 print len(data * size), len(output)
93 assert output == data * size
93 assert output == data * size
94
94
95
95
96 @pytest.mark.parametrize('size', [1, 10**5])
96 @pytest.mark.parametrize('size', [1, 10**5])
97 def test_output_with_input(size, environ):
97 def test_output_with_input(size, environ):
98 data = 'X' * size
98 data = 'X' * size
99 inputstream = io.BytesIO(data)
99 inputstream = io.BytesIO(data)
100 # This acts like the cat command.
100 # This acts like the cat command.
101 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
101 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
102 output = ''.join(subprocessio.SubprocessIOChunker(
102 output = ''.join(subprocessio.SubprocessIOChunker(
103 args, shell=False, inputstream=inputstream, env=environ))
103 args, shell=False, inputstream=inputstream, env=environ))
104
104
105 print len(data), len(output)
105 print len(data), len(output)
106 assert output == data
106 assert output == data
107
107
108
108
109 @pytest.mark.parametrize('size', [1, 10**5])
109 @pytest.mark.parametrize('size', [1, 10**5])
110 def test_output_with_input_skipping_iterator(size, environ):
110 def test_output_with_input_skipping_iterator(size, environ):
111 data = 'X' * size
111 data = 'X' * size
112 inputstream = io.BytesIO(data)
112 inputstream = io.BytesIO(data)
113 # This acts like the cat command.
113 # This acts like the cat command.
114 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
114 args = _get_python_args('shutil.copyfileobj(sys.stdin, sys.stdout)')
115
115
116 # Note: assigning the chunker makes sure that it is not deleted too early
116 # Note: assigning the chunker makes sure that it is not deleted too early
117 chunker = subprocessio.SubprocessIOChunker(
117 chunker = subprocessio.SubprocessIOChunker(
118 args, shell=False, inputstream=inputstream, env=environ)
118 args, shell=False, inputstream=inputstream, env=environ)
119 output = ''.join(chunker.output)
119 output = ''.join(chunker.output)
120
120
121 print len(data), len(output)
121 print len(data), len(output)
122 assert output == data
122 assert output == data
@@ -1,67 +1,67 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import mock
19 import mock
20 import pytest
20 import pytest
21 import sys
21 import sys
22
22
23
23
24 class MockPopen(object):
24 class MockPopen(object):
25 def __init__(self, stderr):
25 def __init__(self, stderr):
26 self.stdout = io.BytesIO('')
26 self.stdout = io.BytesIO('')
27 self.stderr = io.BytesIO(stderr)
27 self.stderr = io.BytesIO(stderr)
28 self.returncode = 1
28 self.returncode = 1
29
29
30 def wait(self):
30 def wait(self):
31 pass
31 pass
32
32
33
33
34 INVALID_CERTIFICATE_STDERR = '\n'.join([
34 INVALID_CERTIFICATE_STDERR = '\n'.join([
35 'svnrdump: E230001: Unable to connect to a repository at URL url',
35 'svnrdump: E230001: Unable to connect to a repository at URL url',
36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
36 'svnrdump: E230001: Server SSL certificate verification failed: issuer is not trusted',
37 ])
37 ])
38
38
39
39
40 @pytest.mark.parametrize('stderr,expected_reason', [
40 @pytest.mark.parametrize('stderr,expected_reason', [
41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
41 (INVALID_CERTIFICATE_STDERR, 'INVALID_CERTIFICATE'),
42 ('svnrdump: E123456', 'UNKNOWN'),
42 ('svnrdump: E123456', 'UNKNOWN'),
43 ])
43 ], ids=['invalid-cert-stderr', 'svnrdump-err-123456'])
44 @pytest.mark.xfail(sys.platform == "cygwin",
44 @pytest.mark.xfail(sys.platform == "cygwin",
45 reason="SVN not packaged for Cygwin")
45 reason="SVN not packaged for Cygwin")
46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
46 def test_import_remote_repository_certificate_error(stderr, expected_reason):
47 from vcsserver import svn
47 from vcsserver import svn
48
48
49 remote = svn.SvnRemote(None)
49 remote = svn.SvnRemote(None)
50 remote.is_path_valid_repository = lambda wire, path: True
50 remote.is_path_valid_repository = lambda wire, path: True
51
51
52 with mock.patch('subprocess.Popen',
52 with mock.patch('subprocess.Popen',
53 return_value=MockPopen(stderr)):
53 return_value=MockPopen(stderr)):
54 with pytest.raises(Exception) as excinfo:
54 with pytest.raises(Exception) as excinfo:
55 remote.import_remote_repository({'path': 'path'}, 'url')
55 remote.import_remote_repository({'path': 'path'}, 'url')
56
56
57 expected_error_args = (
57 expected_error_args = (
58 'Failed to dump the remote repository from url.',
58 'Failed to dump the remote repository from url.',
59 expected_reason)
59 expected_reason)
60
60
61 assert excinfo.value.args == expected_error_args
61 assert excinfo.value.args == expected_error_args
62
62
63
63
64 def test_svn_libraries_can_be_imported():
64 def test_svn_libraries_can_be_imported():
65 import svn
65 import svn
66 import svn.client
66 import svn.client
67 assert svn.client is not None
67 assert svn.client is not None
@@ -1,132 +1,132 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import subprocess
18 import subprocess
19 import StringIO
19 import StringIO
20 import time
20 import time
21
21
22 import pytest
22 import pytest
23
23
24 from fixture import TestINI
24 from fixture import ContextINI
25
25
26
26
27 @pytest.mark.parametrize("arguments, expected_texts", [
27 @pytest.mark.parametrize("arguments, expected_texts", [
28 (['--threadpool=192'], [
28 (['--threadpool=192'], [
29 'threadpool_size: 192',
29 'threadpool_size: 192',
30 'worker pool of size 192 created',
30 'worker pool of size 192 created',
31 'Threadpool size set to 192']),
31 'Threadpool size set to 192']),
32 (['--locale=fake'], [
32 (['--locale=fake'], [
33 'Cannot set locale, not configuring the locale system']),
33 'Cannot set locale, not configuring the locale system']),
34 (['--timeout=5'], [
34 (['--timeout=5'], [
35 'Timeout for RPC calls set to 5.0 seconds']),
35 'Timeout for RPC calls set to 5.0 seconds']),
36 (['--log-level=info'], [
36 (['--log-level=info'], [
37 'log_level: info']),
37 'log_level: info']),
38 (['--port={port}'], [
38 (['--port={port}'], [
39 'port: {port}',
39 'port: {port}',
40 'created daemon on localhost:{port}']),
40 'created daemon on localhost:{port}']),
41 (['--host=127.0.0.1', '--port={port}'], [
41 (['--host=127.0.0.1', '--port={port}'], [
42 'port: {port}',
42 'port: {port}',
43 'host: 127.0.0.1',
43 'host: 127.0.0.1',
44 'created daemon on 127.0.0.1:{port}']),
44 'created daemon on 127.0.0.1:{port}']),
45 (['--config=/bad/file'], ['OSError: File /bad/file does not exist']),
45 (['--config=/bad/file'], ['OSError: File /bad/file does not exist']),
46 ])
46 ])
47 def test_vcsserver_calls(arguments, expected_texts, vcsserver_port):
47 def test_vcsserver_calls(arguments, expected_texts, vcsserver_port):
48 port_argument = '--port={port}'
48 port_argument = '--port={port}'
49 if port_argument not in arguments:
49 if port_argument not in arguments:
50 arguments.append(port_argument)
50 arguments.append(port_argument)
51 arguments = _replace_port(arguments, vcsserver_port)
51 arguments = _replace_port(arguments, vcsserver_port)
52 expected_texts = _replace_port(expected_texts, vcsserver_port)
52 expected_texts = _replace_port(expected_texts, vcsserver_port)
53 output = call_vcs_server_with_arguments(arguments)
53 output = call_vcs_server_with_arguments(arguments)
54 for text in expected_texts:
54 for text in expected_texts:
55 assert text in output
55 assert text in output
56
56
57
57
58 def _replace_port(values, port):
58 def _replace_port(values, port):
59 return [value.format(port=port) for value in values]
59 return [value.format(port=port) for value in values]
60
60
61
61
62 def test_vcsserver_with_config(vcsserver_port):
62 def test_vcsserver_with_config(vcsserver_port):
63 ini_def = [
63 ini_def = [
64 {'DEFAULT': {'host': '127.0.0.1'}},
64 {'DEFAULT': {'host': '127.0.0.1'}},
65 {'DEFAULT': {'threadpool_size': '111'}},
65 {'DEFAULT': {'threadpool_size': '111'}},
66 {'DEFAULT': {'port': vcsserver_port}},
66 {'DEFAULT': {'port': vcsserver_port}},
67 ]
67 ]
68
68
69 with TestINI('test.ini', ini_def) as new_test_ini_path:
69 with ContextINI('test.ini', ini_def) as new_test_ini_path:
70 output = call_vcs_server_with_arguments(
70 output = call_vcs_server_with_arguments(
71 ['--config=' + new_test_ini_path])
71 ['--config=' + new_test_ini_path])
72
72
73 expected_texts = [
73 expected_texts = [
74 'host: 127.0.0.1',
74 'host: 127.0.0.1',
75 'Threadpool size set to 111',
75 'Threadpool size set to 111',
76 ]
76 ]
77 for text in expected_texts:
77 for text in expected_texts:
78 assert text in output
78 assert text in output
79
79
80
80
81 def test_vcsserver_with_config_cli_overwrite(vcsserver_port):
81 def test_vcsserver_with_config_cli_overwrite(vcsserver_port):
82 ini_def = [
82 ini_def = [
83 {'DEFAULT': {'host': '127.0.0.1'}},
83 {'DEFAULT': {'host': '127.0.0.1'}},
84 {'DEFAULT': {'port': vcsserver_port}},
84 {'DEFAULT': {'port': vcsserver_port}},
85 {'DEFAULT': {'threadpool_size': '111'}},
85 {'DEFAULT': {'threadpool_size': '111'}},
86 {'DEFAULT': {'timeout': '0'}},
86 {'DEFAULT': {'timeout': '0'}},
87 ]
87 ]
88 with TestINI('test.ini', ini_def) as new_test_ini_path:
88 with ContextINI('test.ini', ini_def) as new_test_ini_path:
89 output = call_vcs_server_with_arguments([
89 output = call_vcs_server_with_arguments([
90 '--config=' + new_test_ini_path,
90 '--config=' + new_test_ini_path,
91 '--host=128.0.0.1',
91 '--host=128.0.0.1',
92 '--threadpool=256',
92 '--threadpool=256',
93 '--timeout=5'])
93 '--timeout=5'])
94 expected_texts = [
94 expected_texts = [
95 'host: 128.0.0.1',
95 'host: 128.0.0.1',
96 'Threadpool size set to 256',
96 'Threadpool size set to 256',
97 'Timeout for RPC calls set to 5.0 seconds',
97 'Timeout for RPC calls set to 5.0 seconds',
98 ]
98 ]
99 for text in expected_texts:
99 for text in expected_texts:
100 assert text in output
100 assert text in output
101
101
102
102
103 def call_vcs_server_with_arguments(args):
103 def call_vcs_server_with_arguments(args):
104 vcs = subprocess.Popen(
104 vcs = subprocess.Popen(
105 ["vcsserver"] + args,
105 ["vcsserver"] + args,
106 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
106 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
107
107
108 output = read_output_until(
108 output = read_output_until(
109 "Starting vcsserver.main", vcs.stdout)
109 "Starting vcsserver.main", vcs.stdout)
110 vcs.terminate()
110 vcs.terminate()
111 return output
111 return output
112
112
113
113
114 def call_vcs_server_with_non_existing_config_file(args):
114 def call_vcs_server_with_non_existing_config_file(args):
115 vcs = subprocess.Popen(
115 vcs = subprocess.Popen(
116 ["vcsserver", "--config=/tmp/bad"] + args,
116 ["vcsserver", "--config=/tmp/bad"] + args,
117 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
117 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
118 output = read_output_until(
118 output = read_output_until(
119 "Starting vcsserver.main", vcs.stdout)
119 "Starting vcsserver.main", vcs.stdout)
120 vcs.terminate()
120 vcs.terminate()
121 return output
121 return output
122
122
123
123
124 def read_output_until(expected, source, timeout=5):
124 def read_output_until(expected, source, timeout=5):
125 ts = time.time()
125 ts = time.time()
126 buf = StringIO.StringIO()
126 buf = StringIO.StringIO()
127 while time.time() - ts < timeout:
127 while time.time() - ts < timeout:
128 line = source.readline()
128 line = source.readline()
129 buf.write(line)
129 buf.write(line)
130 if expected in line:
130 if expected in line:
131 break
131 break
132 return buf.getvalue()
132 return buf.getvalue()
@@ -1,96 +1,96 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import wsgiref.simple_server
18 import wsgiref.simple_server
19 import wsgiref.validate
19 import wsgiref.validate
20
20
21 from vcsserver import wsgi_app_caller
21 from vcsserver import wsgi_app_caller
22
22
23
23
24 # pylint: disable=protected-access,too-many-public-methods
24 # pylint: disable=protected-access,too-many-public-methods
25
25
26
26
27 @wsgiref.validate.validator
27 @wsgiref.validate.validator
28 def demo_app(environ, start_response):
28 def demo_app(environ, start_response):
29 """WSGI app used for testing."""
29 """WSGI app used for testing."""
30 data = [
30 data = [
31 'Hello World!\n',
31 'Hello World!\n',
32 'input_data=%s\n' % environ['wsgi.input'].read(),
32 'input_data=%s\n' % environ['wsgi.input'].read(),
33 ]
33 ]
34 for key, value in sorted(environ.items()):
34 for key, value in sorted(environ.items()):
35 data.append('%s=%s\n' % (key, value))
35 data.append('%s=%s\n' % (key, value))
36
36
37 write = start_response("200 OK", [('Content-Type', 'text/plain')])
37 write = start_response("200 OK", [('Content-Type', 'text/plain')])
38 write('Old school write method\n')
38 write('Old school write method\n')
39 write('***********************\n')
39 write('***********************\n')
40 return data
40 return data
41
41
42
42
43 BASE_ENVIRON = {
43 BASE_ENVIRON = {
44 'REQUEST_METHOD': 'GET',
44 'REQUEST_METHOD': 'GET',
45 'SERVER_NAME': 'localhost',
45 'SERVER_NAME': 'localhost',
46 'SERVER_PORT': '80',
46 'SERVER_PORT': '80',
47 'SCRIPT_NAME': '',
47 'SCRIPT_NAME': '',
48 'PATH_INFO': '/',
48 'PATH_INFO': '/',
49 'QUERY_STRING': '',
49 'QUERY_STRING': '',
50 'foo.var': 'bla',
50 'foo.var': 'bla',
51 }
51 }
52
52
53
53
54 def test_complete_environ():
54 def test_complete_environ():
55 environ = dict(BASE_ENVIRON)
55 environ = dict(BASE_ENVIRON)
56 data = "data"
56 data = "data"
57 wsgi_app_caller._complete_environ(environ, data)
57 wsgi_app_caller._complete_environ(environ, data)
58 wsgiref.validate.check_environ(environ)
58 wsgiref.validate.check_environ(environ)
59
59
60 assert data == environ['wsgi.input'].read()
60 assert data == environ['wsgi.input'].read()
61
61
62
62
63 def test_start_response():
63 def test_start_response():
64 start_response = wsgi_app_caller._StartResponse()
64 start_response = wsgi_app_caller._StartResponse()
65 status = '200 OK'
65 status = '200 OK'
66 headers = [('Content-Type', 'text/plain')]
66 headers = [('Content-Type', 'text/plain')]
67 start_response(status, headers)
67 start_response(status, headers)
68
68
69 assert status == start_response.status
69 assert status == start_response.status
70 assert headers == start_response.headers
70 assert headers == start_response.headers
71
71
72
72
73 def test_start_response_with_error():
73 def test_start_response_with_error():
74 start_response = wsgi_app_caller._StartResponse()
74 start_response = wsgi_app_caller._StartResponse()
75 status = '500 Internal Server Error'
75 status = '500 Internal Server Error'
76 headers = [('Content-Type', 'text/plain')]
76 headers = [('Content-Type', 'text/plain')]
77 start_response(status, headers, (None, None, None))
77 start_response(status, headers, (None, None, None))
78
78
79 assert status == start_response.status
79 assert status == start_response.status
80 assert headers == start_response.headers
80 assert headers == start_response.headers
81
81
82
82
83 def test_wsgi_app_caller():
83 def test_wsgi_app_caller():
84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
84 caller = wsgi_app_caller.WSGIAppCaller(demo_app)
85 environ = dict(BASE_ENVIRON)
85 environ = dict(BASE_ENVIRON)
86 input_data = 'some text'
86 input_data = 'some text'
87 responses, status, headers = caller.handle(environ, input_data)
87 responses, status, headers = caller.handle(environ, input_data)
88 response = ''.join(responses)
88 response = ''.join(responses)
89
89
90 assert status == '200 OK'
90 assert status == '200 OK'
91 assert headers == [('Content-Type', 'text/plain')]
91 assert headers == [('Content-Type', 'text/plain')]
92 assert response.startswith(
92 assert response.startswith(
93 'Old school write method\n***********************\n')
93 'Old school write method\n***********************\n')
94 assert 'Hello World!\n' in response
94 assert 'Hello World!\n' in response
95 assert 'foo.var=bla\n' in response
95 assert 'foo.var=bla\n' in response
96 assert 'input_data=%s\n' % input_data in response
96 assert 'input_data=%s\n' % input_data in response
@@ -1,57 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19
19
20 # TODO: johbo: That's a copy from rhodecode
20 # TODO: johbo: That's a copy from rhodecode
21 def safe_str(unicode_, to_encoding=['utf8']):
21 def safe_str(unicode_, to_encoding=['utf8']):
22 """
22 """
23 safe str function. Does few trick to turn unicode_ into string
23 safe str function. Does few trick to turn unicode_ into string
24
24
25 In case of UnicodeEncodeError, we try to return it with encoding detected
25 In case of UnicodeEncodeError, we try to return it with encoding detected
26 by chardet library if it fails fallback to string with errors replaced
26 by chardet library if it fails fallback to string with errors replaced
27
27
28 :param unicode_: unicode to encode
28 :param unicode_: unicode to encode
29 :rtype: str
29 :rtype: str
30 :returns: str object
30 :returns: str object
31 """
31 """
32
32
33 # if it's not basestr cast to str
33 # if it's not basestr cast to str
34 if not isinstance(unicode_, basestring):
34 if not isinstance(unicode_, basestring):
35 return str(unicode_)
35 return str(unicode_)
36
36
37 if isinstance(unicode_, str):
37 if isinstance(unicode_, str):
38 return unicode_
38 return unicode_
39
39
40 if not isinstance(to_encoding, (list, tuple)):
40 if not isinstance(to_encoding, (list, tuple)):
41 to_encoding = [to_encoding]
41 to_encoding = [to_encoding]
42
42
43 for enc in to_encoding:
43 for enc in to_encoding:
44 try:
44 try:
45 return unicode_.encode(enc)
45 return unicode_.encode(enc)
46 except UnicodeEncodeError:
46 except UnicodeEncodeError:
47 pass
47 pass
48
48
49 try:
49 try:
50 import chardet
50 import chardet
51 encoding = chardet.detect(unicode_)['encoding']
51 encoding = chardet.detect(unicode_)['encoding']
52 if encoding is None:
52 if encoding is None:
53 raise UnicodeEncodeError()
53 raise UnicodeEncodeError()
54
54
55 return unicode_.encode(encoding)
55 return unicode_.encode(encoding)
56 except (ImportError, UnicodeEncodeError):
56 except (ImportError, UnicodeEncodeError):
57 return unicode_.encode(to_encoding[0], 'replace')
57 return unicode_.encode(to_encoding[0], 'replace')
@@ -1,116 +1,116 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Extract the responses of a WSGI app."""
18 """Extract the responses of a WSGI app."""
19
19
20 __all__ = ('WSGIAppCaller',)
20 __all__ = ('WSGIAppCaller',)
21
21
22 import io
22 import io
23 import logging
23 import logging
24 import os
24 import os
25
25
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29 DEV_NULL = open(os.devnull)
29 DEV_NULL = open(os.devnull)
30
30
31
31
32 def _complete_environ(environ, input_data):
32 def _complete_environ(environ, input_data):
33 """Update the missing wsgi.* variables of a WSGI environment.
33 """Update the missing wsgi.* variables of a WSGI environment.
34
34
35 :param environ: WSGI environment to update
35 :param environ: WSGI environment to update
36 :type environ: dict
36 :type environ: dict
37 :param input_data: data to be read by the app
37 :param input_data: data to be read by the app
38 :type input_data: str
38 :type input_data: str
39 """
39 """
40 environ.update({
40 environ.update({
41 'wsgi.version': (1, 0),
41 'wsgi.version': (1, 0),
42 'wsgi.url_scheme': 'http',
42 'wsgi.url_scheme': 'http',
43 'wsgi.multithread': True,
43 'wsgi.multithread': True,
44 'wsgi.multiprocess': True,
44 'wsgi.multiprocess': True,
45 'wsgi.run_once': False,
45 'wsgi.run_once': False,
46 'wsgi.input': io.BytesIO(input_data),
46 'wsgi.input': io.BytesIO(input_data),
47 'wsgi.errors': DEV_NULL,
47 'wsgi.errors': DEV_NULL,
48 })
48 })
49
49
50
50
51 # pylint: disable=too-few-public-methods
51 # pylint: disable=too-few-public-methods
52 class _StartResponse(object):
52 class _StartResponse(object):
53 """Save the arguments of a start_response call."""
53 """Save the arguments of a start_response call."""
54
54
55 __slots__ = ['status', 'headers', 'content']
55 __slots__ = ['status', 'headers', 'content']
56
56
57 def __init__(self):
57 def __init__(self):
58 self.status = None
58 self.status = None
59 self.headers = None
59 self.headers = None
60 self.content = []
60 self.content = []
61
61
62 def __call__(self, status, headers, exc_info=None):
62 def __call__(self, status, headers, exc_info=None):
63 # TODO(skreft): do something meaningful with the exc_info
63 # TODO(skreft): do something meaningful with the exc_info
64 exc_info = None # avoid dangling circular reference
64 exc_info = None # avoid dangling circular reference
65 self.status = status
65 self.status = status
66 self.headers = headers
66 self.headers = headers
67
67
68 return self.write
68 return self.write
69
69
70 def write(self, content):
70 def write(self, content):
71 """Write method returning when calling this object.
71 """Write method returning when calling this object.
72
72
73 All the data written is then available in content.
73 All the data written is then available in content.
74 """
74 """
75 self.content.append(content)
75 self.content.append(content)
76
76
77
77
78 class WSGIAppCaller(object):
78 class WSGIAppCaller(object):
79 """Calls a WSGI app."""
79 """Calls a WSGI app."""
80
80
81 def __init__(self, app):
81 def __init__(self, app):
82 """
82 """
83 :param app: WSGI app to call
83 :param app: WSGI app to call
84 """
84 """
85 self.app = app
85 self.app = app
86
86
87 def handle(self, environ, input_data):
87 def handle(self, environ, input_data):
88 """Process a request with the WSGI app.
88 """Process a request with the WSGI app.
89
89
90 The returned data of the app is fully consumed into a list.
90 The returned data of the app is fully consumed into a list.
91
91
92 :param environ: WSGI environment to update
92 :param environ: WSGI environment to update
93 :type environ: dict
93 :type environ: dict
94 :param input_data: data to be read by the app
94 :param input_data: data to be read by the app
95 :type input_data: str
95 :type input_data: str
96
96
97 :returns: a tuple with the contents, status and headers
97 :returns: a tuple with the contents, status and headers
98 :rtype: (list<str>, str, list<(str, str)>)
98 :rtype: (list<str>, str, list<(str, str)>)
99 """
99 """
100 _complete_environ(environ, input_data)
100 _complete_environ(environ, input_data)
101 start_response = _StartResponse()
101 start_response = _StartResponse()
102 log.debug("Calling wrapped WSGI application")
102 log.debug("Calling wrapped WSGI application")
103 responses = self.app(environ, start_response)
103 responses = self.app(environ, start_response)
104 responses_list = list(responses)
104 responses_list = list(responses)
105 existing_responses = start_response.content
105 existing_responses = start_response.content
106 if existing_responses:
106 if existing_responses:
107 log.debug(
107 log.debug(
108 "Adding returned response to response written via write()")
108 "Adding returned response to response written via write()")
109 existing_responses.extend(responses_list)
109 existing_responses.extend(responses_list)
110 responses_list = existing_responses
110 responses_list = existing_responses
111 if hasattr(responses, 'close'):
111 if hasattr(responses, 'close'):
112 log.debug("Closing iterator from WSGI application")
112 log.debug("Closing iterator from WSGI application")
113 responses.close()
113 responses.close()
114
114
115 log.debug("Handling of WSGI request done, returning response")
115 log.debug("Handling of WSGI request done, returning response")
116 return responses_list, start_response.status, start_response.headers
116 return responses_list, start_response.status, start_response.headers
General Comments 0
You need to be logged in to leave comments. Login now