##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r108:a1a58274 merge stable
parent child Browse files
Show More
@@ -0,0 +1,63 b''
1 diff -rup subversion-1.9.4-orig/subversion/include/svn_auth.h subversion-1.9.4/subversion/include/svn_auth.h
2 --- subversion-1.9.4-orig/subversion/include/svn_auth.h 2015-02-13 12:17:40.000000000 +0100
3 +++ subversion-1.9.4/subversion/include/svn_auth.h 2016-09-21 12:55:27.000000000 +0200
4 @@ -943,7 +943,7 @@ svn_auth_get_windows_ssl_server_trust_pr
5
6 #endif /* WIN32 && !__MINGW32__ || DOXYGEN */
7
8 -#if defined(DARWIN) || defined(DOXYGEN)
9 +#if defined(SVN_HAVE_KEYCHAIN_SERVICES) || defined(DOXYGEN)
10 /**
11 * Set @a *provider to an authentication provider of type @c
12 * svn_auth_cred_simple_t that gets/sets information from the user's
13 @@ -984,7 +984,7 @@ void
14 svn_auth_get_keychain_ssl_client_cert_pw_provider(
15 svn_auth_provider_object_t **provider,
16 apr_pool_t *pool);
17 -#endif /* DARWIN || DOXYGEN */
18 +#endif /* SVN_HAVE_KEYCHAIN_SERVICES || DOXYGEN */
19
20 /* Note that the gnome keyring unlock prompt related items below must be
21 * declared for all platforms in order to allow SWIG interfaces to be
22 diff -rup subversion-1.9.4-orig/subversion/libsvn_subr/auth.h subversion-1.9.4/subversion/libsvn_subr/auth.h
23 --- subversion-1.9.4-orig/subversion/libsvn_subr/auth.h 2015-08-27 06:00:31.000000000 +0200
24 +++ subversion-1.9.4/subversion/libsvn_subr/auth.h 2016-09-21 12:56:20.000000000 +0200
25 @@ -103,7 +103,7 @@ svn_auth__get_windows_ssl_server_trust_p
26 apr_pool_t *pool);
27 #endif /* WIN32 && !__MINGW32__ || DOXYGEN */
28
29 -#if defined(DARWIN) || defined(DOXYGEN)
30 +#if defined(SVN_HAVE_KEYCHAIN_SERVICES) || defined(DOXYGEN)
31 /**
32 * Set @a *provider to an authentication provider of type @c
33 * svn_auth_cred_simple_t that gets/sets information from the user's
34 @@ -134,7 +134,7 @@ void
35 svn_auth__get_keychain_ssl_client_cert_pw_provider(
36 svn_auth_provider_object_t **provider,
37 apr_pool_t *pool);
38 -#endif /* DARWIN || DOXYGEN */
39 +#endif /* SVN_HAVE_KEYCHAIN_SERVICES || DOXYGEN */
40
41 #if !defined(WIN32) || defined(DOXYGEN)
42 /**
43 diff -rup subversion-1.9.4-orig/subversion/libsvn_subr/deprecated.c subversion-1.9.4/subversion/libsvn_subr/deprecated.c
44 --- subversion-1.9.4-orig/subversion/libsvn_subr/deprecated.c 2015-08-27 06:00:31.000000000 +0200
45 +++ subversion-1.9.4/subversion/libsvn_subr/deprecated.c 2016-09-21 12:57:08.000000000 +0200
46 @@ -1479,7 +1479,7 @@ svn_auth_get_windows_ssl_server_trust_pr
47 #endif /* WIN32 && !__MINGW32__ */
48
49 /*** From macos_keychain.c ***/
50 -#if defined(DARWIN)
51 +#if defined(SVN_HAVE_KEYCHAIN_SERVICES)
52 void
53 svn_auth_get_keychain_simple_provider(svn_auth_provider_object_t **provider,
54 apr_pool_t *pool)
55 @@ -1494,7 +1494,7 @@ svn_auth_get_keychain_ssl_client_cert_pw
56 {
57 svn_auth__get_keychain_ssl_client_cert_pw_provider(provider, pool);
58 }
59 -#endif /* DARWIN */
60 +#endif /* SVN_HAVE_KEYCHAIN_SERVICES */
61
62 #if !defined(WIN32)
63 void
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.4.2
2 current_version = 4.5.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.4.2
12 state = in_progress
13 version = 4.5.0
16 14
@@ -1,111 +1,111 b''
1 1 ################################################################################
2 2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 3 # #
4 4 ################################################################################
5 5
6 [app:main]
7 use = egg:rhodecode-vcsserver
8
9 pyramid.default_locale_name = en
10 pyramid.includes =
11
12 # default locale used by VCS systems
13 locale = en_US.UTF-8
14
15 # cache regions, please don't change
16 beaker.cache.regions = repo_object
17 beaker.cache.repo_object.type = memorylru
18 beaker.cache.repo_object.max_items = 100
19 # cache auto-expires after N seconds
20 beaker.cache.repo_object.expire = 300
21 beaker.cache.repo_object.enabled = true
22 6
23 7 [server:main]
24 8 ## COMMON ##
25 9 host = 127.0.0.1
26 10 port = 9900
27 11
28 12
29 13 ##########################
30 14 ## GUNICORN WSGI SERVER ##
31 15 ##########################
32 ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini>
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
33 17 use = egg:gunicorn#main
34 18 ## Sets the number of process workers. You must set `instance_id = *`
35 19 ## when this option is set to more than one worker, recommended
36 20 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
37 21 ## The `instance_id = *` must be set in the [app:main] section below
38 22 workers = 2
39 23 ## process name
40 24 proc_name = rhodecode_vcsserver
41 25 ## type of worker class, one of sync, gevent
42 26 ## recommended for bigger setup is using of of other than sync one
43 27 worker_class = sync
44 28 ## The maximum number of simultaneous clients. Valid only for Gevent
45 29 #worker_connections = 10
46 30 ## max number of requests that worker will handle before being gracefully
47 31 ## restarted, could prevent memory leaks
48 32 max_requests = 1000
49 33 max_requests_jitter = 30
50 34 ## amount of time a worker can spend with handling a request before it
51 35 ## gets killed and restarted. Set to 6hrs
52 36 timeout = 21600
53 37
54 38
39 [app:main]
40 use = egg:rhodecode-vcsserver
41
42 pyramid.default_locale_name = en
43 pyramid.includes =
44
45 ## default locale used by VCS systems
46 locale = en_US.UTF-8
47
48 # cache regions, please don't change
49 beaker.cache.regions = repo_object
50 beaker.cache.repo_object.type = memorylru
51 beaker.cache.repo_object.max_items = 100
52 # cache auto-expires after N seconds
53 beaker.cache.repo_object.expire = 300
54 beaker.cache.repo_object.enabled = true
55 55
56 56
57 57 ################################
58 58 ### LOGGING CONFIGURATION ####
59 59 ################################
60 60 [loggers]
61 61 keys = root, vcsserver, pyro4, beaker
62 62
63 63 [handlers]
64 64 keys = console
65 65
66 66 [formatters]
67 67 keys = generic
68 68
69 69 #############
70 70 ## LOGGERS ##
71 71 #############
72 72 [logger_root]
73 73 level = NOTSET
74 74 handlers = console
75 75
76 76 [logger_vcsserver]
77 77 level = DEBUG
78 78 handlers =
79 79 qualname = vcsserver
80 80 propagate = 1
81 81
82 82 [logger_beaker]
83 83 level = DEBUG
84 84 handlers =
85 85 qualname = beaker
86 86 propagate = 1
87 87
88 88 [logger_pyro4]
89 89 level = DEBUG
90 90 handlers =
91 91 qualname = Pyro4
92 92 propagate = 1
93 93
94 94
95 95 ##############
96 96 ## HANDLERS ##
97 97 ##############
98 98
99 99 [handler_console]
100 100 class = StreamHandler
101 101 args = (sys.stderr,)
102 102 level = DEBUG
103 103 formatter = generic
104 104
105 105 ################
106 106 ## FORMATTERS ##
107 107 ################
108 108
109 109 [formatter_generic]
110 110 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
111 111 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
@@ -1,146 +1,138 b''
1 1 # Nix environment for the community edition
2 2 #
3 3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 4 # derivation. For advanced tweaks to pimp up the development environment we use
5 5 # "shell.nix" so that it does not have to clutter this file.
6 6
7 7 { pkgs ? (import <nixpkgs> {})
8 8 , pythonPackages ? "python27Packages"
9 9 , pythonExternalOverrides ? self: super: {}
10 10 , doCheck ? true
11 11 }:
12 12
13 13 let pkgs_ = pkgs; in
14 14
15 15 let
16 16 pkgs = pkgs_.overridePackages (self: super: {
17 17 # Override subversion derivation to
18 18 # - activate python bindings
19 # - set version to 1.8
20 subversion = super.subversion18.override {
21 httpSupport = true;
22 pythonBindings = true;
23 python = self.python27Packages.python;
24 };
19 subversion = let
20 subversionWithPython = super.subversion.override {
21 httpSupport = true;
22 pythonBindings = true;
23 python = self.python27Packages.python;
24 };
25 in pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
26 patches = (oldAttrs.patches or []) ++
27 pkgs.lib.optionals pkgs.stdenv.isDarwin [
28 # johbo: "import svn.client" fails on darwin currently.
29 ./pkgs/subversion-1.9.4-darwin.patch
30 ];
31 });
25 32 });
26 33
27 34 inherit (pkgs.lib) fix extends;
28 35
29 36 basePythonPackages = with builtins; if isAttrs pythonPackages
30 37 then pythonPackages
31 38 else getAttr pythonPackages pkgs;
32 39
33 40 elem = builtins.elem;
34 41 basename = path: with pkgs.lib; last (splitString "/" path);
35 42 startsWith = prefix: full: let
36 43 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
37 44 in actualPrefix == prefix;
38 45
39 46 src-filter = path: type: with pkgs.lib;
40 47 let
41 48 ext = last (splitString "." path);
42 49 in
43 50 !elem (basename path) [
44 51 ".git" ".hg" "__pycache__" ".eggs" "node_modules"
45 52 "build" "data" "tmp"] &&
46 53 !elem ext ["egg-info" "pyc"] &&
47 54 !startsWith "result" path;
48 55
49 56 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
50 57
51 58 pythonGeneratedPackages = self: basePythonPackages.override (a: {
52 59 inherit self;
53 60 })
54 61 // (scopedImport {
55 62 self = self;
56 63 super = basePythonPackages;
57 64 inherit pkgs;
58 65 inherit (pkgs) fetchurl fetchgit;
59 66 } ./pkgs/python-packages.nix);
60 67
61 68 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
62 69 inherit
63 70 basePythonPackages
64 71 pkgs;
65 72 };
66 73
67 74 version = builtins.readFile ./vcsserver/VERSION;
68 75
69 76 pythonLocalOverrides = self: super: {
70 77 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
71 78 inherit
72 79 doCheck
73 80 version;
74 81 name = "rhodecode-vcsserver-${version}";
75 82 releaseName = "RhodeCodeVCSServer-${version}";
76 83 src = rhodecode-vcsserver-src;
77 84
78 85 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
79 86 pkgs.git
80 87 pkgs.subversion
81 88 ]);
82 89
83 90 # TODO: johbo: Make a nicer way to expose the parts. Maybe
84 91 # pkgs/default.nix?
85 92 passthru = {
86 93 pythonPackages = self;
87 94 };
88 95
89 # Somewhat snappier setup of the development environment
90 # TODO: move into shell.nix
91 # TODO: think of supporting a stable path again, so that multiple shells
92 # can share it.
93 shellHook = ''
94 # Set locale
95 export LC_ALL="en_US.UTF-8"
96
97 tmp_path=$(mktemp -d)
98 export PATH="$tmp_path/bin:$PATH"
99 export PYTHONPATH="$tmp_path/${self.python.sitePackages}:$PYTHONPATH"
100 mkdir -p $tmp_path/${self.python.sitePackages}
101 python setup.py develop --prefix $tmp_path --allow-hosts ""
102 '';
103
104 96 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
105 97 preCheck = ''
106 98 export PATH="$out/bin:$PATH"
107 99 '';
108 100
109 101 postInstall = ''
110 102 echo "Writing meta information for rccontrol to nix-support/rccontrol"
111 103 mkdir -p $out/nix-support/rccontrol
112 104 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
113 105 echo "DONE: Meta information for rccontrol written"
114 106
115 107 ln -s ${self.pyramid}/bin/* $out/bin/
116 108 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
117 109
118 110 # Symlink version control utilities
119 111 #
120 112 # We ensure that always the correct version is available as a symlink.
121 113 # So that users calling them via the profile path will always use the
122 114 # correct version.
123 115 ln -s ${pkgs.git}/bin/git $out/bin
124 116 ln -s ${self.mercurial}/bin/hg $out/bin
125 117 ln -s ${pkgs.subversion}/bin/svn* $out/bin
126 118
127 119 for file in $out/bin/*; do
128 120 wrapProgram $file \
129 121 --set PATH $PATH \
130 122 --set PYTHONPATH $PYTHONPATH \
131 123 --set PYTHONHASHSEED random
132 124 done
133 125 '';
134 126
135 127 });
136 128 };
137 129
138 130 # Apply all overrides and fix the final package set
139 131 myPythonPackages =
140 132 (fix
141 133 (extends pythonExternalOverrides
142 134 (extends pythonLocalOverrides
143 135 (extends pythonOverrides
144 136 pythonGeneratedPackages))));
145 137
146 138 in myPythonPackages.rhodecode-vcsserver
@@ -1,56 +1,57 b''
1 1 # Overrides for the generated python-packages.nix
2 2 #
3 3 # This function is intended to be used as an extension to the generated file
4 4 # python-packages.nix. The main objective is to add needed dependencies of C
5 5 # libraries and tweak the build instructions where needed.
6 6
7 7 { pkgs, basePythonPackages }:
8 8
9 9 let
10 10 sed = "sed -i";
11 11 in
12 12
13 13 self: super: {
14 14
15 15 subvertpy = super.subvertpy.override (attrs: {
16 SVN_PREFIX = "${pkgs.subversion}";
16 # TODO: johbo: Remove the "or" once we drop 16.03 support
17 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
17 18 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
18 19 pkgs.aprutil
19 20 pkgs.subversion
20 21 ];
21 22 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
22 23 ${sed} -e "s/'gcc'/'clang'/" setup.py
23 24 '';
24 25 });
25 26
26 27 mercurial = super.mercurial.override (attrs: {
27 28 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
28 29 self.python.modules.curses
29 30 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
30 31 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
31 32 });
32 33
33 34 pyramid = super.pyramid.override (attrs: {
34 35 postFixup = ''
35 36 wrapPythonPrograms
36 37 # TODO: johbo: "wrapPython" adds this magic line which
37 38 # confuses pserve.
38 39 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
39 40 '';
40 41 });
41 42
42 43 Pyro4 = super.Pyro4.override (attrs: {
43 44 # TODO: Was not able to generate this version, needs further
44 45 # investigation.
45 46 name = "Pyro4-4.35";
46 47 src = pkgs.fetchurl {
47 48 url = "https://pypi.python.org/packages/source/P/Pyro4/Pyro4-4.35.src.tar.gz";
48 49 md5 = "cbe6cb855f086a0f092ca075005855f3";
49 50 };
50 51 });
51 52
52 53 # Avoid that setuptools is replaced, this leads to trouble
53 54 # with buildPythonPackage.
54 55 setuptools = basePythonPackages.setuptools;
55 56
56 57 }
@@ -1,471 +1,694 b''
1 # Generated by pip2nix 0.4.0.dev1
2 # See https://github.com/johbo/pip2nix
3
1 4 {
2 5 Beaker = super.buildPythonPackage {
3 6 name = "Beaker-1.7.0";
4 7 buildInputs = with self; [];
5 8 doCheck = false;
6 9 propagatedBuildInputs = with self; [];
7 10 src = fetchurl {
8 11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
9 12 md5 = "386be3f7fe427358881eee4622b428b3";
10 13 };
11 14 meta = {
12 15 license = [ pkgs.lib.licenses.bsdOriginal ];
13 16 };
14 17 };
15 18 Jinja2 = super.buildPythonPackage {
16 19 name = "Jinja2-2.8";
17 20 buildInputs = with self; [];
18 21 doCheck = false;
19 22 propagatedBuildInputs = with self; [MarkupSafe];
20 23 src = fetchurl {
21 24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
22 25 md5 = "edb51693fe22c53cee5403775c71a99e";
23 26 };
24 27 meta = {
25 28 license = [ pkgs.lib.licenses.bsdOriginal ];
26 29 };
27 30 };
28 31 Mako = super.buildPythonPackage {
29 name = "Mako-1.0.4";
32 name = "Mako-1.0.6";
30 33 buildInputs = with self; [];
31 34 doCheck = false;
32 35 propagatedBuildInputs = with self; [MarkupSafe];
33 36 src = fetchurl {
34 url = "https://pypi.python.org/packages/7a/ae/925434246ee90b42e8ef57d3b30a0ab7caf9a2de3e449b876c56dcb48155/Mako-1.0.4.tar.gz";
35 md5 = "c5fc31a323dd4990683d2f2da02d4e20";
37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
38 md5 = "a28e22a339080316b2acc352b9ee631c";
36 39 };
37 40 meta = {
38 41 license = [ pkgs.lib.licenses.mit ];
39 42 };
40 43 };
41 44 MarkupSafe = super.buildPythonPackage {
42 45 name = "MarkupSafe-0.23";
43 46 buildInputs = with self; [];
44 47 doCheck = false;
45 48 propagatedBuildInputs = with self; [];
46 49 src = fetchurl {
47 50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
48 51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
49 52 };
50 53 meta = {
51 54 license = [ pkgs.lib.licenses.bsdOriginal ];
52 55 };
53 56 };
54 57 PasteDeploy = super.buildPythonPackage {
55 58 name = "PasteDeploy-1.5.2";
56 59 buildInputs = with self; [];
57 60 doCheck = false;
58 61 propagatedBuildInputs = with self; [];
59 62 src = fetchurl {
60 63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
61 64 md5 = "352b7205c78c8de4987578d19431af3b";
62 65 };
63 66 meta = {
64 67 license = [ pkgs.lib.licenses.mit ];
65 68 };
66 69 };
67 70 Pyro4 = super.buildPythonPackage {
68 71 name = "Pyro4-4.41";
69 72 buildInputs = with self; [];
70 73 doCheck = false;
71 74 propagatedBuildInputs = with self; [serpent];
72 75 src = fetchurl {
73 76 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
74 77 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
75 78 };
76 79 meta = {
77 80 license = [ pkgs.lib.licenses.mit ];
78 81 };
79 82 };
80 83 WebOb = super.buildPythonPackage {
81 84 name = "WebOb-1.3.1";
82 85 buildInputs = with self; [];
83 86 doCheck = false;
84 87 propagatedBuildInputs = with self; [];
85 88 src = fetchurl {
86 89 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
87 90 md5 = "20918251c5726956ba8fef22d1556177";
88 91 };
89 92 meta = {
90 93 license = [ pkgs.lib.licenses.mit ];
91 94 };
92 95 };
93 96 WebTest = super.buildPythonPackage {
94 97 name = "WebTest-1.4.3";
95 98 buildInputs = with self; [];
96 99 doCheck = false;
97 100 propagatedBuildInputs = with self; [WebOb];
98 101 src = fetchurl {
99 102 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
100 103 md5 = "631ce728bed92c681a4020a36adbc353";
101 104 };
102 105 meta = {
103 106 license = [ pkgs.lib.licenses.mit ];
104 107 };
105 108 };
109 backports.shutil-get-terminal-size = super.buildPythonPackage {
110 name = "backports.shutil-get-terminal-size-1.0.0";
111 buildInputs = with self; [];
112 doCheck = false;
113 propagatedBuildInputs = with self; [];
114 src = fetchurl {
115 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
116 md5 = "03267762480bd86b50580dc19dff3c66";
117 };
118 meta = {
119 license = [ pkgs.lib.licenses.mit ];
120 };
121 };
106 122 configobj = super.buildPythonPackage {
107 123 name = "configobj-5.0.6";
108 124 buildInputs = with self; [];
109 125 doCheck = false;
110 126 propagatedBuildInputs = with self; [six];
111 127 src = fetchurl {
112 128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
113 129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
114 130 };
115 131 meta = {
116 132 license = [ pkgs.lib.licenses.bsdOriginal ];
117 133 };
118 134 };
135 decorator = super.buildPythonPackage {
136 name = "decorator-4.0.10";
137 buildInputs = with self; [];
138 doCheck = false;
139 propagatedBuildInputs = with self; [];
140 src = fetchurl {
141 url = "https://pypi.python.org/packages/13/8a/4eed41e338e8dcc13ca41c94b142d4d20c0de684ee5065523fee406ce76f/decorator-4.0.10.tar.gz";
142 md5 = "434b57fdc3230c500716c5aff8896100";
143 };
144 meta = {
145 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
146 };
147 };
119 148 dulwich = super.buildPythonPackage {
120 149 name = "dulwich-0.13.0";
121 150 buildInputs = with self; [];
122 151 doCheck = false;
123 152 propagatedBuildInputs = with self; [];
124 153 src = fetchurl {
125 154 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
126 155 md5 = "6dede0626657c2bd08f48ca1221eea91";
127 156 };
128 157 meta = {
129 158 license = [ pkgs.lib.licenses.gpl2Plus ];
130 159 };
131 160 };
161 enum34 = super.buildPythonPackage {
162 name = "enum34-1.1.6";
163 buildInputs = with self; [];
164 doCheck = false;
165 propagatedBuildInputs = with self; [];
166 src = fetchurl {
167 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
168 md5 = "5f13a0841a61f7fc295c514490d120d0";
169 };
170 meta = {
171 license = [ pkgs.lib.licenses.bsdOriginal ];
172 };
173 };
132 174 greenlet = super.buildPythonPackage {
133 175 name = "greenlet-0.4.7";
134 176 buildInputs = with self; [];
135 177 doCheck = false;
136 178 propagatedBuildInputs = with self; [];
137 179 src = fetchurl {
138 180 url = "https://pypi.python.org/packages/7a/9f/a1a0d9bdf3203ae1502c5a8434fe89d323599d78a106985bc327351a69d4/greenlet-0.4.7.zip";
139 181 md5 = "c2333a8ff30fa75c5d5ec0e67b461086";
140 182 };
141 183 meta = {
142 184 license = [ pkgs.lib.licenses.mit ];
143 185 };
144 186 };
145 187 gunicorn = super.buildPythonPackage {
146 188 name = "gunicorn-19.6.0";
147 189 buildInputs = with self; [];
148 190 doCheck = false;
149 191 propagatedBuildInputs = with self; [];
150 192 src = fetchurl {
151 193 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
152 194 md5 = "338e5e8a83ea0f0625f768dba4597530";
153 195 };
154 196 meta = {
155 197 license = [ pkgs.lib.licenses.mit ];
156 198 };
157 199 };
158 200 hgsubversion = super.buildPythonPackage {
159 201 name = "hgsubversion-1.8.6";
160 202 buildInputs = with self; [];
161 203 doCheck = false;
162 204 propagatedBuildInputs = with self; [mercurial subvertpy];
163 205 src = fetchurl {
164 206 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
165 207 md5 = "9310cb266031cf8d0779885782a84a5b";
166 208 };
167 209 meta = {
168 210 license = [ pkgs.lib.licenses.gpl1 ];
169 211 };
170 212 };
171 213 infrae.cache = super.buildPythonPackage {
172 214 name = "infrae.cache-1.0.1";
173 215 buildInputs = with self; [];
174 216 doCheck = false;
175 217 propagatedBuildInputs = with self; [Beaker repoze.lru];
176 218 src = fetchurl {
177 219 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
178 220 md5 = "b09076a766747e6ed2a755cc62088e32";
179 221 };
180 222 meta = {
181 223 license = [ pkgs.lib.licenses.zpt21 ];
182 224 };
183 225 };
226 ipdb = super.buildPythonPackage {
227 name = "ipdb-0.10.1";
228 buildInputs = with self; [];
229 doCheck = false;
230 propagatedBuildInputs = with self; [ipython setuptools];
231 src = fetchurl {
232 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
233 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
234 };
235 meta = {
236 license = [ pkgs.lib.licenses.bsdOriginal ];
237 };
238 };
239 ipython = super.buildPythonPackage {
240 name = "ipython-5.1.0";
241 buildInputs = with self; [];
242 doCheck = false;
243 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
244 src = fetchurl {
245 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
246 md5 = "47c8122420f65b58784cb4b9b4af35e3";
247 };
248 meta = {
249 license = [ pkgs.lib.licenses.bsdOriginal ];
250 };
251 };
252 ipython-genutils = super.buildPythonPackage {
253 name = "ipython-genutils-0.1.0";
254 buildInputs = with self; [];
255 doCheck = false;
256 propagatedBuildInputs = with self; [];
257 src = fetchurl {
258 url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz";
259 md5 = "9a8afbe0978adbcbfcb3b35b2d015a56";
260 };
261 meta = {
262 license = [ pkgs.lib.licenses.bsdOriginal ];
263 };
264 };
184 265 mercurial = super.buildPythonPackage {
185 266 name = "mercurial-3.8.4";
186 267 buildInputs = with self; [];
187 268 doCheck = false;
188 269 propagatedBuildInputs = with self; [];
189 270 src = fetchurl {
190 271 url = "https://pypi.python.org/packages/bc/16/b66eef0b70ee2b4ebb8e76622fe21bbed834606dd8c1bd30d6936ebf6f45/mercurial-3.8.4.tar.gz";
191 272 md5 = "cec2c3db688cb87142809089c6ae13e9";
192 273 };
193 274 meta = {
194 275 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
195 276 };
196 277 };
197 278 mock = super.buildPythonPackage {
198 279 name = "mock-1.0.1";
199 280 buildInputs = with self; [];
200 281 doCheck = false;
201 282 propagatedBuildInputs = with self; [];
202 283 src = fetchurl {
203 284 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
204 285 md5 = "869f08d003c289a97c1a6610faf5e913";
205 286 };
206 287 meta = {
207 288 license = [ pkgs.lib.licenses.bsdOriginal ];
208 289 };
209 290 };
210 291 msgpack-python = super.buildPythonPackage {
211 292 name = "msgpack-python-0.4.6";
212 293 buildInputs = with self; [];
213 294 doCheck = false;
214 295 propagatedBuildInputs = with self; [];
215 296 src = fetchurl {
216 297 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
217 298 md5 = "8b317669314cf1bc881716cccdaccb30";
218 299 };
219 300 meta = {
220 301 license = [ pkgs.lib.licenses.asl20 ];
221 302 };
222 303 };
304 pathlib2 = super.buildPythonPackage {
305 name = "pathlib2-2.1.0";
306 buildInputs = with self; [];
307 doCheck = false;
308 propagatedBuildInputs = with self; [six];
309 src = fetchurl {
310 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
311 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
312 };
313 meta = {
314 license = [ pkgs.lib.licenses.mit ];
315 };
316 };
317 pexpect = super.buildPythonPackage {
318 name = "pexpect-4.2.1";
319 buildInputs = with self; [];
320 doCheck = false;
321 propagatedBuildInputs = with self; [ptyprocess];
322 src = fetchurl {
323 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
324 md5 = "3694410001a99dff83f0b500a1ca1c95";
325 };
326 meta = {
327 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
328 };
329 };
330 pickleshare = super.buildPythonPackage {
331 name = "pickleshare-0.7.4";
332 buildInputs = with self; [];
333 doCheck = false;
334 propagatedBuildInputs = with self; [pathlib2];
335 src = fetchurl {
336 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
337 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
338 };
339 meta = {
340 license = [ pkgs.lib.licenses.mit ];
341 };
342 };
343 prompt-toolkit = super.buildPythonPackage {
344 name = "prompt-toolkit-1.0.9";
345 buildInputs = with self; [];
346 doCheck = false;
347 propagatedBuildInputs = with self; [six wcwidth];
348 src = fetchurl {
349 url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz";
350 md5 = "a39f91a54308fb7446b1a421c11f227c";
351 };
352 meta = {
353 license = [ pkgs.lib.licenses.bsdOriginal ];
354 };
355 };
356 ptyprocess = super.buildPythonPackage {
357 name = "ptyprocess-0.5.1";
358 buildInputs = with self; [];
359 doCheck = false;
360 propagatedBuildInputs = with self; [];
361 src = fetchurl {
362 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
363 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
364 };
365 meta = {
366 license = [ ];
367 };
368 };
223 369 py = super.buildPythonPackage {
224 370 name = "py-1.4.29";
225 371 buildInputs = with self; [];
226 372 doCheck = false;
227 373 propagatedBuildInputs = with self; [];
228 374 src = fetchurl {
229 375 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
230 376 md5 = "c28e0accba523a29b35a48bb703fb96c";
231 377 };
232 378 meta = {
233 379 license = [ pkgs.lib.licenses.mit ];
234 380 };
235 381 };
382 pygments = super.buildPythonPackage {
383 name = "pygments-2.1.3";
384 buildInputs = with self; [];
385 doCheck = false;
386 propagatedBuildInputs = with self; [];
387 src = fetchurl {
388 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
389 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
390 };
391 meta = {
392 license = [ pkgs.lib.licenses.bsdOriginal ];
393 };
394 };
236 395 pyramid = super.buildPythonPackage {
237 396 name = "pyramid-1.6.1";
238 397 buildInputs = with self; [];
239 398 doCheck = false;
240 399 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
241 400 src = fetchurl {
242 401 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
243 402 md5 = "b18688ff3cc33efdbb098a35b45dd122";
244 403 };
245 404 meta = {
246 405 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
247 406 };
248 407 };
249 408 pyramid-jinja2 = super.buildPythonPackage {
250 409 name = "pyramid-jinja2-2.5";
251 410 buildInputs = with self; [];
252 411 doCheck = false;
253 412 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
254 413 src = fetchurl {
255 414 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
256 415 md5 = "07cb6547204ac5e6f0b22a954ccee928";
257 416 };
258 417 meta = {
259 418 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
260 419 };
261 420 };
262 421 pyramid-mako = super.buildPythonPackage {
263 422 name = "pyramid-mako-1.0.2";
264 423 buildInputs = with self; [];
265 424 doCheck = false;
266 425 propagatedBuildInputs = with self; [pyramid Mako];
267 426 src = fetchurl {
268 427 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
269 428 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
270 429 };
271 430 meta = {
272 431 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
273 432 };
274 433 };
275 434 pytest = super.buildPythonPackage {
276 435 name = "pytest-2.8.5";
277 436 buildInputs = with self; [];
278 437 doCheck = false;
279 438 propagatedBuildInputs = with self; [py];
280 439 src = fetchurl {
281 440 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
282 441 md5 = "8493b06f700862f1294298d6c1b715a9";
283 442 };
284 443 meta = {
285 444 license = [ pkgs.lib.licenses.mit ];
286 445 };
287 446 };
288 447 repoze.lru = super.buildPythonPackage {
289 448 name = "repoze.lru-0.6";
290 449 buildInputs = with self; [];
291 450 doCheck = false;
292 451 propagatedBuildInputs = with self; [];
293 452 src = fetchurl {
294 453 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
295 454 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
296 455 };
297 456 meta = {
298 457 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
299 458 };
300 459 };
301 460 rhodecode-vcsserver = super.buildPythonPackage {
302 name = "rhodecode-vcsserver-4.4.2";
303 buildInputs = with self; [mock pytest WebTest];
461 name = "rhodecode-vcsserver-4.5.0";
462 buildInputs = with self; [mock pytest pytest-sugar WebTest];
304 463 doCheck = true;
305 464 propagatedBuildInputs = with self; [configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid Pyro4 simplejson subprocess32 waitress WebOb];
306 465 src = ./.;
307 466 meta = {
308 467 license = [ pkgs.lib.licenses.gpl3 { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
309 468 };
310 469 };
311 470 serpent = super.buildPythonPackage {
312 471 name = "serpent-1.12";
313 472 buildInputs = with self; [];
314 473 doCheck = false;
315 474 propagatedBuildInputs = with self; [];
316 475 src = fetchurl {
317 476 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
318 477 md5 = "05869ac7b062828b34f8f927f0457b65";
319 478 };
320 479 meta = {
321 480 license = [ pkgs.lib.licenses.mit ];
322 481 };
323 482 };
324 483 setuptools = super.buildPythonPackage {
325 484 name = "setuptools-20.8.1";
326 485 buildInputs = with self; [];
327 486 doCheck = false;
328 487 propagatedBuildInputs = with self; [];
329 488 src = fetchurl {
330 489 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
331 490 md5 = "fe58a5cac0df20bb83942b252a4b0543";
332 491 };
333 492 meta = {
334 493 license = [ pkgs.lib.licenses.mit ];
335 494 };
336 495 };
496 simplegeneric = super.buildPythonPackage {
497 name = "simplegeneric-0.8.1";
498 buildInputs = with self; [];
499 doCheck = false;
500 propagatedBuildInputs = with self; [];
501 src = fetchurl {
502 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
503 md5 = "f9c1fab00fd981be588fc32759f474e3";
504 };
505 meta = {
506 license = [ pkgs.lib.licenses.zpt21 ];
507 };
508 };
337 509 simplejson = super.buildPythonPackage {
338 510 name = "simplejson-3.7.2";
339 511 buildInputs = with self; [];
340 512 doCheck = false;
341 513 propagatedBuildInputs = with self; [];
342 514 src = fetchurl {
343 515 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
344 516 md5 = "a5fc7d05d4cb38492285553def5d4b46";
345 517 };
346 518 meta = {
347 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
519 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
348 520 };
349 521 };
350 522 six = super.buildPythonPackage {
351 523 name = "six-1.9.0";
352 524 buildInputs = with self; [];
353 525 doCheck = false;
354 526 propagatedBuildInputs = with self; [];
355 527 src = fetchurl {
356 528 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
357 529 md5 = "476881ef4012262dfc8adc645ee786c4";
358 530 };
359 531 meta = {
360 532 license = [ pkgs.lib.licenses.mit ];
361 533 };
362 534 };
363 535 subprocess32 = super.buildPythonPackage {
364 536 name = "subprocess32-3.2.6";
365 537 buildInputs = with self; [];
366 538 doCheck = false;
367 539 propagatedBuildInputs = with self; [];
368 540 src = fetchurl {
369 541 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
370 542 md5 = "754c5ab9f533e764f931136974b618f1";
371 543 };
372 544 meta = {
373 545 license = [ pkgs.lib.licenses.psfl ];
374 546 };
375 547 };
376 548 subvertpy = super.buildPythonPackage {
377 549 name = "subvertpy-0.9.3";
378 550 buildInputs = with self; [];
379 551 doCheck = false;
380 552 propagatedBuildInputs = with self; [];
381 553 src = fetchurl {
382 554 url = "https://github.com/jelmer/subvertpy/archive/subvertpy-0.9.3.tar.gz";
383 555 md5 = "7b745a47128050ea5a73efcd913ec1cf";
384 556 };
385 557 meta = {
386 558 license = [ pkgs.lib.licenses.lgpl21Plus ];
387 559 };
388 560 };
561 traitlets = super.buildPythonPackage {
562 name = "traitlets-4.3.1";
563 buildInputs = with self; [];
564 doCheck = false;
565 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
566 src = fetchurl {
567 url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz";
568 md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98";
569 };
570 meta = {
571 license = [ pkgs.lib.licenses.bsdOriginal ];
572 };
573 };
389 574 translationstring = super.buildPythonPackage {
390 575 name = "translationstring-1.3";
391 576 buildInputs = with self; [];
392 577 doCheck = false;
393 578 propagatedBuildInputs = with self; [];
394 579 src = fetchurl {
395 580 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
396 581 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
397 582 };
398 583 meta = {
399 584 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
400 585 };
401 586 };
402 587 venusian = super.buildPythonPackage {
403 588 name = "venusian-1.0";
404 589 buildInputs = with self; [];
405 590 doCheck = false;
406 591 propagatedBuildInputs = with self; [];
407 592 src = fetchurl {
408 593 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
409 594 md5 = "dccf2eafb7113759d60c86faf5538756";
410 595 };
411 596 meta = {
412 597 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
413 598 };
414 599 };
415 600 waitress = super.buildPythonPackage {
416 601 name = "waitress-0.8.9";
417 602 buildInputs = with self; [];
418 603 doCheck = false;
419 604 propagatedBuildInputs = with self; [setuptools];
420 605 src = fetchurl {
421 606 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
422 607 md5 = "da3f2e62b3676be5dd630703a68e2a04";
423 608 };
424 609 meta = {
425 610 license = [ pkgs.lib.licenses.zpt21 ];
426 611 };
427 612 };
613 wcwidth = super.buildPythonPackage {
614 name = "wcwidth-0.1.7";
615 buildInputs = with self; [];
616 doCheck = false;
617 propagatedBuildInputs = with self; [];
618 src = fetchurl {
619 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
620 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
621 };
622 meta = {
623 license = [ pkgs.lib.licenses.mit ];
624 };
625 };
428 626 wheel = super.buildPythonPackage {
429 627 name = "wheel-0.29.0";
430 628 buildInputs = with self; [];
431 629 doCheck = false;
432 630 propagatedBuildInputs = with self; [];
433 631 src = fetchurl {
434 632 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
435 633 md5 = "555a67e4507cedee23a0deb9651e452f";
436 634 };
437 635 meta = {
438 636 license = [ pkgs.lib.licenses.mit ];
439 637 };
440 638 };
441 639 zope.deprecation = super.buildPythonPackage {
442 640 name = "zope.deprecation-4.1.1";
443 641 buildInputs = with self; [];
444 642 doCheck = false;
445 643 propagatedBuildInputs = with self; [setuptools];
446 644 src = fetchurl {
447 645 url = "https://pypi.python.org/packages/c5/c9/e760f131fcde817da6c186a3f4952b8f206b7eeb269bb6f0836c715c5f20/zope.deprecation-4.1.1.tar.gz";
448 646 md5 = "ce261b9384066f7e13b63525778430cb";
449 647 };
450 648 meta = {
451 649 license = [ pkgs.lib.licenses.zpt21 ];
452 650 };
453 651 };
454 652 zope.interface = super.buildPythonPackage {
455 653 name = "zope.interface-4.1.3";
456 654 buildInputs = with self; [];
457 655 doCheck = false;
458 656 propagatedBuildInputs = with self; [setuptools];
459 657 src = fetchurl {
460 658 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
461 659 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
462 660 };
463 661 meta = {
464 662 license = [ pkgs.lib.licenses.zpt21 ];
465 663 };
466 664 };
467 665
468 666 ### Test requirements
469 667
470
668 pytest-sugar = super.buildPythonPackage {
669 name = "pytest-sugar-0.7.1";
670 buildInputs = with self; [];
671 doCheck = false;
672 propagatedBuildInputs = with self; [pytest termcolor];
673 src = fetchurl {
674 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
675 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
676 };
677 meta = {
678 license = [ pkgs.lib.licenses.bsdOriginal ];
679 };
680 };
681 termcolor = super.buildPythonPackage {
682 name = "termcolor-1.1.0";
683 buildInputs = with self; [];
684 doCheck = false;
685 propagatedBuildInputs = with self; [];
686 src = fetchurl {
687 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
688 md5 = "043e89644f8909d462fbbfa511c768df";
689 };
690 meta = {
691 license = [ pkgs.lib.licenses.mit ];
692 };
693 };
471 694 }
@@ -1,34 +1,35 b''
1 1 Beaker==1.7.0
2 2 configobj==5.0.6
3 3 dulwich==0.13.0
4 4 hgsubversion==1.8.6
5 5 infrae.cache==1.0.1
6 ipdb==0.10.1
6 7 mercurial==3.8.4
7 8 msgpack-python==0.4.6
8 9 py==1.4.29
9 10 pyramid==1.6.1
10 11 pyramid-jinja2==2.5
11 12 pyramid-mako==1.0.2
12 13 Pyro4==4.41
13 14 pytest==2.8.5
14 15 repoze.lru==0.6
15 16 serpent==1.12
16 17 setuptools==20.8.1
17 18 simplejson==3.7.2
18 19 subprocess32==3.2.6
19 20 # TODO: johbo: This version is not in source on PyPI currently,
20 21 # change back once this or a future version is available
21 22 https://github.com/jelmer/subvertpy/archive/subvertpy-0.9.3.tar.gz#md5=7b745a47128050ea5a73efcd913ec1cf
22 23 six==1.9.0
23 24 translationstring==1.3
24 25 waitress==0.8.9
25 26 WebOb==1.3.1
26 27 wheel==0.29.0
27 28 zope.deprecation==4.1.1
28 29 zope.interface==4.1.3
29 30 greenlet==0.4.7
30 31 gunicorn==19.6.0
31 32
32 33 # Test related requirements
33 34 mock==1.0.1
34 35 WebTest==1.4.3
@@ -1,102 +1,103 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from setuptools import setup, find_packages
19 19 from setuptools.command.test import test as TestCommand
20 20 from codecs import open
21 21 from os import path
22 22 import pkgutil
23 23 import sys
24 24
25 25
26 26 here = path.abspath(path.dirname(__file__))
27 27
28 28 with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
29 29 long_description = f.read()
30 30
31 31
32 32 def get_version():
33 33 version = pkgutil.get_data('vcsserver', 'VERSION')
34 34 return version.strip()
35 35
36 36
37 37 class PyTest(TestCommand):
38 38 user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
39 39
40 40 def initialize_options(self):
41 41 TestCommand.initialize_options(self)
42 42 self.pytest_args = []
43 43
44 44 def finalize_options(self):
45 45 TestCommand.finalize_options(self)
46 46 self.test_args = []
47 47 self.test_suite = True
48 48
49 49 def run_tests(self):
50 50 # import here, cause outside the eggs aren't loaded
51 51 import pytest
52 52 errno = pytest.main(self.pytest_args)
53 53 sys.exit(errno)
54 54
55 55
56 56 setup(
57 57 name='rhodecode-vcsserver',
58 58 version=get_version(),
59 59 description='Version Control System Server',
60 60 long_description=long_description,
61 61 url='http://www.rhodecode.com',
62 62 author='RhodeCode GmbH',
63 63 author_email='marcin@rhodecode.com',
64 64 cmdclass={'test': PyTest},
65 65 license='GPLv3',
66 66 classifiers=[
67 67 'Development Status :: 5 - Production/Stable',
68 68 'Intended Audience :: Developers',
69 69 'Topic :: Software Development :: Version Control',
70 70 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
71 71 'Programming Language :: Python :: 2.7',
72 72 ],
73 73 packages=find_packages(),
74 74 tests_require=[
75 75 'mock',
76 76 'pytest',
77 'pytest-sugar',
77 78 'WebTest',
78 79 ],
79 80 install_requires=[
80 81 'configobj',
81 82 'dulwich',
82 83 'hgsubversion',
83 84 'infrae.cache',
84 85 'mercurial',
85 86 'msgpack-python',
86 87 'pyramid',
87 88 'Pyro4',
88 89 'simplejson',
89 90 'subprocess32',
90 91 'waitress',
91 92 'WebOb',
92 93 ],
93 94 package_data={
94 95 'vcsserver': ['VERSION'],
95 96 },
96 97 entry_points={
97 98 'console_scripts': [
98 99 'vcsserver=vcsserver.main:main',
99 100 ],
100 101 'paste.app_factory': ['main=vcsserver.http_main:main']
101 102 },
102 103 )
@@ -1,18 +1,41 b''
1 1 { pkgs ? import <nixpkgs> {}
2 , doCheck ? false
2 , doCheck ? false
3 3 }:
4 4
5 5 let
6
6 7 vcsserver = import ./default.nix {
7 inherit
8 doCheck
9 pkgs;
8 inherit pkgs doCheck;
10 9 };
11 10
11 vcs-pythonPackages = vcsserver.pythonPackages;
12
12 13 in vcsserver.override (attrs: {
13 14
14 15 # Avoid that we dump any sources into the store when entering the shell and
15 16 # make development a little bit more convenient.
16 17 src = null;
17 18
19 buildInputs =
20 attrs.buildInputs ++
21 (with vcs-pythonPackages; [
22 ipdb
23 ]);
24
25 # Somewhat snappier setup of the development environment
26 # TODO: think of supporting a stable path again, so that multiple shells
27 # can share it.
28 postShellHook = ''
29 # Set locale
30 export LC_ALL="en_US.UTF-8"
31
32 # Custom prompt to distinguish from other dev envs.
33 export PS1="\n\[\033[1;32m\][VCS-shell:\w]$\[\033[0m\] "
34
35 tmp_path=$(mktemp -d)
36 export PATH="$tmp_path/bin:$PATH"
37 export PYTHONPATH="$tmp_path/${vcs-pythonPackages.python.sitePackages}:$PYTHONPATH"
38 mkdir -p $tmp_path/${vcs-pythonPackages.python.sitePackages}
39 python setup.py develop --prefix $tmp_path --allow-hosts ""
40 '';
18 41 })
@@ -1,36 +1,57 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import mock
19 import pytest
19 20
20 21 from vcsserver import main
22 from vcsserver.base import obfuscate_qs
21 23
22 24
23 25 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
24 26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
25 27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
26 28 main.main([])
27 29 patch_largefiles_capabilities.assert_called_once_with()
28 30
29 31
30 32 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
31 33 @mock.patch('vcsserver.main.MercurialFactory', None)
32 34 @mock.patch(
33 35 'vcsserver.hgpatches.patch_largefiles_capabilities',
34 36 mock.Mock(side_effect=Exception("Must not be called")))
35 37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
36 38 main.main([])
39
40
41 @pytest.mark.parametrize('given, expected', [
42 ('bad', 'bad'),
43 ('query&foo=bar', 'query&foo=bar'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret',
46 'a&b&c&query&foo=bar&auth_token=*****'),
47 ('', ''),
48 (None, None),
49 ('foo=bar', 'foo=bar'),
50 ('auth_token=secret', 'auth_token=*****'),
51 ('auth_token=secret&api_key=secret2',
52 'auth_token=*****&api_key=*****'),
53 ('auth_token=secret&api_key=secret2&param=value',
54 'auth_token=*****&api_key=*****&param=value'),
55 ])
56 def test_obfuscate_qs(given, expected):
57 assert expected == obfuscate_qs(given)
@@ -1,1 +1,1 b''
1 4.4.2 No newline at end of file
1 4.5.0 No newline at end of file
@@ -1,71 +1,85 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19
19 import urlparse
20 20
21 21 log = logging.getLogger(__name__)
22 22
23 23
24 24 class RepoFactory(object):
25 25 """
26 26 Utility to create instances of repository
27 27
28 28 It provides internal caching of the `repo` object based on
29 29 the :term:`call context`.
30 30 """
31 31
32 32 def __init__(self, repo_cache):
33 33 self._cache = repo_cache
34 34
35 35 def _create_config(self, path, config):
36 36 config = {}
37 37 return config
38 38
39 39 def _create_repo(self, wire, create):
40 40 raise NotImplementedError()
41 41
42 42 def repo(self, wire, create=False):
43 43 """
44 44 Get a repository instance for the given path.
45 45
46 46 Uses internally the low level beaker API since the decorators introduce
47 47 significant overhead.
48 48 """
49 49 def create_new_repo():
50 50 return self._create_repo(wire, create)
51 51
52 52 return self._repo(wire, create_new_repo)
53 53
54 54 def _repo(self, wire, createfunc):
55 55 context = wire.get('context', None)
56 56 cache = wire.get('cache', True)
57 57 log.debug(
58 58 'GET %s@%s with cache:%s. Context: %s',
59 59 self.__class__.__name__, wire['path'], cache, context)
60 60
61 61 if context and cache:
62 62 cache_key = (context, wire['path'])
63 63 log.debug(
64 64 'FETCH %s@%s repo object from cache. Context: %s',
65 65 self.__class__.__name__, wire['path'], context)
66 66 return self._cache.get(key=cache_key, createfunc=createfunc)
67 67 else:
68 68 log.debug(
69 69 'INIT %s@%s repo object based on wire %s. Context: %s',
70 70 self.__class__.__name__, wire['path'], wire, context)
71 71 return createfunc()
72
73
74 def obfuscate_qs(query_string):
75 if query_string is None:
76 return None
77
78 parsed = []
79 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
80 if k in ['auth_token', 'api_key']:
81 v = "*****"
82 parsed.append((k, v))
83
84 return '&'.join('{}{}'.format(
85 k, '={}'.format(v) if v else '') for k, v in parsed)
@@ -1,56 +1,70 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Special exception handling over the wire.
20 20
21 21 Since we cannot assume that our client is able to import our exception classes,
22 22 this module provides a "wrapping" mechanism to raise plain exceptions
23 23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 24 different error conditions.
25 25 """
26 26
27 27 import functools
28 from pyramid.httpexceptions import HTTPLocked
28 29
29 30
30 31 def _make_exception(kind, *args):
31 32 """
32 33 Prepares a base `Exception` instance to be sent over the wire.
33 34
34 35 To give our caller a hint what this is about, it will attach an attribute
35 36 `_vcs_kind` to the exception.
36 37 """
37 38 exc = Exception(*args)
38 39 exc._vcs_kind = kind
39 40 return exc
40 41
41 42
42 43 AbortException = functools.partial(_make_exception, 'abort')
43 44
44 45 ArchiveException = functools.partial(_make_exception, 'archive')
45 46
46 47 LookupException = functools.partial(_make_exception, 'lookup')
47 48
48 49 VcsException = functools.partial(_make_exception, 'error')
49 50
50 51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
51 52
52 53 RequirementException = functools.partial(_make_exception, 'requirement')
53 54
54 55 UnhandledException = functools.partial(_make_exception, 'unhandled')
55 56
56 57 URLError = functools.partial(_make_exception, 'url_error')
58
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
60
61
62 class HTTPRepoLocked(HTTPLocked):
63 """
64 Subclass of HTTPLocked response that allows to set the title and status
65 code via constructor arguments.
66 """
67 def __init__(self, title, status_code=None, **kwargs):
68 self.code = status_code or HTTPLocked.code
69 self.title = title
70 super(HTTPRepoLocked, self).__init__(**kwargs)
@@ -1,573 +1,580 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import urllib
24 24 import urllib2
25 25 from functools import wraps
26 26
27 27 from dulwich import index, objects
28 28 from dulwich.client import HttpGitClient, LocalGitClient
29 29 from dulwich.errors import (
30 30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 31 MissingCommitError, ObjectMissing, HangupException,
32 32 UnexpectedCommandError)
33 33 from dulwich.repo import Repo as DulwichRepo, Tag
34 34 from dulwich.server import update_server_info
35 35
36 36 from vcsserver import exceptions, settings, subprocessio
37 37 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory
38 from vcsserver.base import RepoFactory, obfuscate_qs
39 39 from vcsserver.hgcompat import (
40 hg_url, httpbasicauthhandler, httpdigestauthhandler)
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 41
42 42
43 43 DIR_STAT = stat.S_IFDIR
44 44 FILE_MODE = stat.S_IFMT
45 45 GIT_LINK = objects.S_IFGITLINK
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 def reraise_safe_exceptions(func):
51 51 """Converts Dulwich exceptions to something neutral."""
52 52 @wraps(func)
53 53 def wrapper(*args, **kwargs):
54 54 try:
55 55 return func(*args, **kwargs)
56 56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 57 ObjectMissing) as e:
58 58 raise exceptions.LookupException(e.message)
59 59 except (HangupException, UnexpectedCommandError) as e:
60 60 raise exceptions.VcsException(e.message)
61 61 return wrapper
62 62
63 63
64 64 class Repo(DulwichRepo):
65 65 """
66 66 A wrapper for dulwich Repo class.
67 67
68 68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
69 69 "Too many open files" error. We need to close all opened file descriptors
70 70 once the repo object is destroyed.
71 71
72 72 TODO: mikhail: please check if we need this wrapper after updating dulwich
73 73 to 0.12.0 +
74 74 """
75 75 def __del__(self):
76 76 if hasattr(self, 'object_store'):
77 77 self.close()
78 78
79 79
80 80 class GitFactory(RepoFactory):
81 81
82 82 def _create_repo(self, wire, create):
83 83 repo_path = str_to_dulwich(wire['path'])
84 84 return Repo(repo_path)
85 85
86 86
87 87 class GitRemote(object):
88 88
89 89 def __init__(self, factory):
90 90 self._factory = factory
91 91
92 92 self._bulk_methods = {
93 93 "author": self.commit_attribute,
94 94 "date": self.get_object_attrs,
95 95 "message": self.commit_attribute,
96 96 "parents": self.commit_attribute,
97 97 "_commit": self.revision,
98 98 }
99 99
100 100 def _assign_ref(self, wire, ref, commit_id):
101 101 repo = self._factory.repo(wire)
102 102 repo[ref] = commit_id
103 103
104 104 @reraise_safe_exceptions
105 105 def add_object(self, wire, content):
106 106 repo = self._factory.repo(wire)
107 107 blob = objects.Blob()
108 108 blob.set_raw_string(content)
109 109 repo.object_store.add_object(blob)
110 110 return blob.id
111 111
112 112 @reraise_safe_exceptions
113 113 def assert_correct_path(self, wire):
114 114 try:
115 115 self._factory.repo(wire)
116 116 except NotGitRepository as e:
117 117 # Exception can contain unicode which we convert
118 118 raise exceptions.AbortException(repr(e))
119 119
120 120 @reraise_safe_exceptions
121 121 def bare(self, wire):
122 122 repo = self._factory.repo(wire)
123 123 return repo.bare
124 124
125 125 @reraise_safe_exceptions
126 126 def blob_as_pretty_string(self, wire, sha):
127 127 repo = self._factory.repo(wire)
128 128 return repo[sha].as_pretty_string()
129 129
130 130 @reraise_safe_exceptions
131 131 def blob_raw_length(self, wire, sha):
132 132 repo = self._factory.repo(wire)
133 133 blob = repo[sha]
134 134 return blob.raw_length()
135 135
136 136 @reraise_safe_exceptions
137 137 def bulk_request(self, wire, rev, pre_load):
138 138 result = {}
139 139 for attr in pre_load:
140 140 try:
141 141 method = self._bulk_methods[attr]
142 142 args = [wire, rev]
143 143 if attr == "date":
144 144 args.extend(["commit_time", "commit_timezone"])
145 145 elif attr in ["author", "message", "parents"]:
146 146 args.append(attr)
147 147 result[attr] = method(*args)
148 148 except KeyError:
149 149 raise exceptions.VcsException(
150 150 "Unknown bulk attribute: %s" % attr)
151 151 return result
152 152
153 153 def _build_opener(self, url):
154 154 handlers = []
155 url_obj = hg_url(url)
155 url_obj = url_parser(url)
156 156 _, authinfo = url_obj.authinfo()
157 157
158 158 if authinfo:
159 159 # create a password manager
160 160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
161 161 passmgr.add_password(*authinfo)
162 162
163 163 handlers.extend((httpbasicauthhandler(passmgr),
164 164 httpdigestauthhandler(passmgr)))
165 165
166 166 return urllib2.build_opener(*handlers)
167 167
168 168 @reraise_safe_exceptions
169 169 def check_url(self, url, config):
170 url_obj = hg_url(url)
170 url_obj = url_parser(url)
171 171 test_uri, _ = url_obj.authinfo()
172 172 url_obj.passwd = '*****'
173 url_obj.query = obfuscate_qs(url_obj.query)
173 174 cleaned_uri = str(url_obj)
175 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
174 176
175 177 if not test_uri.endswith('info/refs'):
176 178 test_uri = test_uri.rstrip('/') + '/info/refs'
177 179
178 180 o = self._build_opener(url)
179 181 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
180 182
181 183 q = {"service": 'git-upload-pack'}
182 184 qs = '?%s' % urllib.urlencode(q)
183 185 cu = "%s%s" % (test_uri, qs)
184 186 req = urllib2.Request(cu, None, {})
185 187
186 188 try:
189 log.debug("Trying to open URL %s", cleaned_uri)
187 190 resp = o.open(req)
188 191 if resp.code != 200:
189 raise Exception('Return Code is not 200')
192 raise exceptions.URLError('Return Code is not 200')
190 193 except Exception as e:
194 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
191 195 # means it cannot be cloned
192 raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
196 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
193 197
194 198 # now detect if it's proper git repo
195 199 gitdata = resp.read()
196 200 if 'service=git-upload-pack' in gitdata:
197 201 pass
198 202 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
199 203 # old style git can return some other format !
200 204 pass
201 205 else:
202 raise urllib2.URLError(
206 raise exceptions.URLError(
203 207 "url [%s] does not look like an git" % (cleaned_uri,))
204 208
205 209 return True
206 210
207 211 @reraise_safe_exceptions
208 212 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
209 213 remote_refs = self.fetch(wire, url, apply_refs=False)
210 214 repo = self._factory.repo(wire)
211 215 if isinstance(valid_refs, list):
212 216 valid_refs = tuple(valid_refs)
213 217
214 218 for k in remote_refs:
215 219 # only parse heads/tags and skip so called deferred tags
216 220 if k.startswith(valid_refs) and not k.endswith(deferred):
217 221 repo[k] = remote_refs[k]
218 222
219 223 if update_after_clone:
220 224 # we want to checkout HEAD
221 225 repo["HEAD"] = remote_refs["HEAD"]
222 226 index.build_index_from_tree(repo.path, repo.index_path(),
223 227 repo.object_store, repo["HEAD"].tree)
224 228
225 229 # TODO: this is quite complex, check if that can be simplified
226 230 @reraise_safe_exceptions
227 231 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
228 232 repo = self._factory.repo(wire)
229 233 object_store = repo.object_store
230 234
231 235 # Create tree and populates it with blobs
232 236 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
233 237
234 238 for node in updated:
235 239 # Compute subdirs if needed
236 240 dirpath, nodename = vcspath.split(node['path'])
237 241 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
238 242 parent = commit_tree
239 243 ancestors = [('', parent)]
240 244
241 245 # Tries to dig for the deepest existing tree
242 246 while dirnames:
243 247 curdir = dirnames.pop(0)
244 248 try:
245 249 dir_id = parent[curdir][1]
246 250 except KeyError:
247 251 # put curdir back into dirnames and stops
248 252 dirnames.insert(0, curdir)
249 253 break
250 254 else:
251 255 # If found, updates parent
252 256 parent = repo[dir_id]
253 257 ancestors.append((curdir, parent))
254 258 # Now parent is deepest existing tree and we need to create
255 259 # subtrees for dirnames (in reverse order)
256 260 # [this only applies for nodes from added]
257 261 new_trees = []
258 262
259 263 blob = objects.Blob.from_string(node['content'])
260 264
261 265 if dirnames:
262 266 # If there are trees which should be created we need to build
263 267 # them now (in reverse order)
264 268 reversed_dirnames = list(reversed(dirnames))
265 269 curtree = objects.Tree()
266 270 curtree[node['node_path']] = node['mode'], blob.id
267 271 new_trees.append(curtree)
268 272 for dirname in reversed_dirnames[:-1]:
269 273 newtree = objects.Tree()
270 274 newtree[dirname] = (DIR_STAT, curtree.id)
271 275 new_trees.append(newtree)
272 276 curtree = newtree
273 277 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
274 278 else:
275 279 parent.add(
276 280 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
277 281
278 282 new_trees.append(parent)
279 283 # Update ancestors
280 284 reversed_ancestors = reversed(
281 285 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
282 286 for parent, tree, path in reversed_ancestors:
283 287 parent[path] = (DIR_STAT, tree.id)
284 288 object_store.add_object(tree)
285 289
286 290 object_store.add_object(blob)
287 291 for tree in new_trees:
288 292 object_store.add_object(tree)
289 293
290 294 for node_path in removed:
291 295 paths = node_path.split('/')
292 296 tree = commit_tree
293 297 trees = [tree]
294 298 # Traverse deep into the forest...
295 299 for path in paths:
296 300 try:
297 301 obj = repo[tree[path][1]]
298 302 if isinstance(obj, objects.Tree):
299 303 trees.append(obj)
300 304 tree = obj
301 305 except KeyError:
302 306 break
303 307 # Cut down the blob and all rotten trees on the way back...
304 308 for path, tree in reversed(zip(paths, trees)):
305 309 del tree[path]
306 310 if tree:
307 311 # This tree still has elements - don't remove it or any
308 312 # of it's parents
309 313 break
310 314
311 315 object_store.add_object(commit_tree)
312 316
313 317 # Create commit
314 318 commit = objects.Commit()
315 319 commit.tree = commit_tree.id
316 320 for k, v in commit_data.iteritems():
317 321 setattr(commit, k, v)
318 322 object_store.add_object(commit)
319 323
320 324 ref = 'refs/heads/%s' % branch
321 325 repo.refs[ref] = commit.id
322 326
323 327 return commit.id
324 328
325 329 @reraise_safe_exceptions
326 330 def fetch(self, wire, url, apply_refs=True, refs=None):
327 331 if url != 'default' and '://' not in url:
328 332 client = LocalGitClient(url)
329 333 else:
330 url_obj = hg_url(url)
334 url_obj = url_parser(url)
331 335 o = self._build_opener(url)
332 336 url, _ = url_obj.authinfo()
333 337 client = HttpGitClient(base_url=url, opener=o)
334 338 repo = self._factory.repo(wire)
335 339
336 340 determine_wants = repo.object_store.determine_wants_all
337 341 if refs:
338 342 def determine_wants_requested(references):
339 343 return [references[r] for r in references if r in refs]
340 344 determine_wants = determine_wants_requested
341 345
342 346 try:
343 347 remote_refs = client.fetch(
344 348 path=url, target=repo, determine_wants=determine_wants)
345 349 except NotGitRepository:
346 350 log.warning(
347 351 'Trying to fetch from "%s" failed, not a Git repository.', url)
348 352 raise exceptions.AbortException()
349 353
350 354 # mikhail: client.fetch() returns all the remote refs, but fetches only
351 355 # refs filtered by `determine_wants` function. We need to filter result
352 356 # as well
353 357 if refs:
354 358 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
355 359
356 360 if apply_refs:
357 361 # TODO: johbo: Needs proper test coverage with a git repository
358 362 # that contains a tag object, so that we would end up with
359 363 # a peeled ref at this point.
360 364 PEELED_REF_MARKER = '^{}'
361 365 for k in remote_refs:
362 366 if k.endswith(PEELED_REF_MARKER):
363 367 log.info("Skipping peeled reference %s", k)
364 368 continue
365 369 repo[k] = remote_refs[k]
366 370
367 371 if refs:
368 372 # mikhail: explicitly set the head to the last ref.
369 373 repo['HEAD'] = remote_refs[refs[-1]]
370 374
371 375 # TODO: mikhail: should we return remote_refs here to be
372 376 # consistent?
373 377 else:
374 378 return remote_refs
375 379
376 380 @reraise_safe_exceptions
377 381 def get_remote_refs(self, wire, url):
378 382 repo = Repo(url)
379 383 return repo.get_refs()
380 384
381 385 @reraise_safe_exceptions
382 386 def get_description(self, wire):
383 387 repo = self._factory.repo(wire)
384 388 return repo.get_description()
385 389
386 390 @reraise_safe_exceptions
387 391 def get_file_history(self, wire, file_path, commit_id, limit):
388 392 repo = self._factory.repo(wire)
389 393 include = [commit_id]
390 394 paths = [file_path]
391 395
392 396 walker = repo.get_walker(include, paths=paths, max_entries=limit)
393 397 return [x.commit.id for x in walker]
394 398
395 399 @reraise_safe_exceptions
396 400 def get_missing_revs(self, wire, rev1, rev2, path2):
397 401 repo = self._factory.repo(wire)
398 402 LocalGitClient(thin_packs=False).fetch(path2, repo)
399 403
400 404 wire_remote = wire.copy()
401 405 wire_remote['path'] = path2
402 406 repo_remote = self._factory.repo(wire_remote)
403 407 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
404 408
405 409 revs = [
406 410 x.commit.id
407 411 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
408 412 return revs
409 413
410 414 @reraise_safe_exceptions
411 415 def get_object(self, wire, sha):
412 416 repo = self._factory.repo(wire)
413 417 obj = repo.get_object(sha)
414 418 commit_id = obj.id
415 419
416 420 if isinstance(obj, Tag):
417 421 commit_id = obj.object[1]
418 422
419 423 return {
420 424 'id': obj.id,
421 425 'type': obj.type_name,
422 426 'commit_id': commit_id
423 427 }
424 428
425 429 @reraise_safe_exceptions
426 430 def get_object_attrs(self, wire, sha, *attrs):
427 431 repo = self._factory.repo(wire)
428 432 obj = repo.get_object(sha)
429 433 return list(getattr(obj, a) for a in attrs)
430 434
431 435 @reraise_safe_exceptions
432 436 def get_refs(self, wire):
433 437 repo = self._factory.repo(wire)
434 438 result = {}
435 439 for ref, sha in repo.refs.as_dict().items():
436 440 peeled_sha = repo.get_peeled(ref)
437 441 result[ref] = peeled_sha
438 442 return result
439 443
440 444 @reraise_safe_exceptions
441 445 def get_refs_path(self, wire):
442 446 repo = self._factory.repo(wire)
443 447 return repo.refs.path
444 448
445 449 @reraise_safe_exceptions
446 450 def head(self, wire):
447 451 repo = self._factory.repo(wire)
448 452 return repo.head()
449 453
450 454 @reraise_safe_exceptions
451 455 def init(self, wire):
452 456 repo_path = str_to_dulwich(wire['path'])
453 457 self.repo = Repo.init(repo_path)
454 458
455 459 @reraise_safe_exceptions
456 460 def init_bare(self, wire):
457 461 repo_path = str_to_dulwich(wire['path'])
458 462 self.repo = Repo.init_bare(repo_path)
459 463
460 464 @reraise_safe_exceptions
461 465 def revision(self, wire, rev):
462 466 repo = self._factory.repo(wire)
463 467 obj = repo[rev]
464 468 obj_data = {
465 469 'id': obj.id,
466 470 }
467 471 try:
468 472 obj_data['tree'] = obj.tree
469 473 except AttributeError:
470 474 pass
471 475 return obj_data
472 476
473 477 @reraise_safe_exceptions
474 478 def commit_attribute(self, wire, rev, attr):
475 479 repo = self._factory.repo(wire)
476 480 obj = repo[rev]
477 481 return getattr(obj, attr)
478 482
479 483 @reraise_safe_exceptions
480 484 def set_refs(self, wire, key, value):
481 485 repo = self._factory.repo(wire)
482 486 repo.refs[key] = value
483 487
484 488 @reraise_safe_exceptions
485 489 def remove_ref(self, wire, key):
486 490 repo = self._factory.repo(wire)
487 491 del repo.refs[key]
488 492
489 493 @reraise_safe_exceptions
490 494 def tree_changes(self, wire, source_id, target_id):
491 495 repo = self._factory.repo(wire)
492 496 source = repo[source_id].tree if source_id else None
493 497 target = repo[target_id].tree
494 498 result = repo.object_store.tree_changes(source, target)
495 499 return list(result)
496 500
497 501 @reraise_safe_exceptions
498 502 def tree_items(self, wire, tree_id):
499 503 repo = self._factory.repo(wire)
500 504 tree = repo[tree_id]
501 505
502 506 result = []
503 507 for item in tree.iteritems():
504 508 item_sha = item.sha
505 509 item_mode = item.mode
506 510
507 511 if FILE_MODE(item_mode) == GIT_LINK:
508 512 item_type = "link"
509 513 else:
510 514 item_type = repo[item_sha].type_name
511 515
512 516 result.append((item.path, item_mode, item_sha, item_type))
513 517 return result
514 518
515 519 @reraise_safe_exceptions
516 520 def update_server_info(self, wire):
517 521 repo = self._factory.repo(wire)
518 522 update_server_info(repo)
519 523
520 524 @reraise_safe_exceptions
521 525 def discover_git_version(self):
522 526 stdout, _ = self.run_git_command(
523 527 {}, ['--version'], _bare=True, _safe=True)
524 return stdout
528 prefix = 'git version'
529 if stdout.startswith(prefix):
530 stdout = stdout[len(prefix):]
531 return stdout.strip()
525 532
526 533 @reraise_safe_exceptions
527 534 def run_git_command(self, wire, cmd, **opts):
528 535 path = wire.get('path', None)
529 536
530 537 if path and os.path.isdir(path):
531 538 opts['cwd'] = path
532 539
533 540 if '_bare' in opts:
534 541 _copts = []
535 542 del opts['_bare']
536 543 else:
537 544 _copts = ['-c', 'core.quotepath=false', ]
538 545 safe_call = False
539 546 if '_safe' in opts:
540 547 # no exc on failure
541 548 del opts['_safe']
542 549 safe_call = True
543 550
544 551 gitenv = os.environ.copy()
545 552 gitenv.update(opts.pop('extra_env', {}))
546 553 # need to clean fix GIT_DIR !
547 554 if 'GIT_DIR' in gitenv:
548 555 del gitenv['GIT_DIR']
549 556 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
550 557
551 558 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
552 559
553 560 try:
554 561 _opts = {'env': gitenv, 'shell': False}
555 562 _opts.update(opts)
556 563 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
557 564
558 565 return ''.join(p), ''.join(p.error)
559 566 except (EnvironmentError, OSError) as err:
560 567 tb_err = ("Couldn't run git command (%s).\n"
561 568 "Original error was:%s\n" % (cmd, err))
562 569 log.exception(tb_err)
563 570 if safe_call:
564 571 return '', err
565 572 else:
566 573 raise exceptions.VcsException(tb_err)
567 574
568 575
569 576 def str_to_dulwich(value):
570 577 """
571 578 Dulwich 0.10.1a requires `unicode` objects to be passed in.
572 579 """
573 580 return value.decode(settings.WIRE_ENCODING)
@@ -1,707 +1,723 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import sys
22 22 import urllib
23 23 import urllib2
24 24
25 25 from hgext import largefiles, rebase
26 26 from hgext.strip import strip as hgext_strip
27 27 from mercurial import commands
28 28 from mercurial import unionrepo
29 29
30 30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory
31 from vcsserver.base import RepoFactory, obfuscate_qs
32 32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex, hg_url,
34 httpbasicauthhandler, httpdigestauthhandler, httppeer, localrepository,
35 match, memctx, exchange, memfilectx, nullrev, patch, peer, revrange, ui,
36 Abort, LookupError, RepoError, RepoLookupError, InterventionRequired,
37 RequirementError)
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
37 InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 57 # signal in a non-main thread, thus generating a ValueError.
58 58 baseui.setconfig('worker', 'numcpus', 1)
59 59
60 60 # If there is no config for the largefiles extension, we explicitly disable
61 61 # it here. This overrides settings from repositories hgrc file. Recent
62 62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 63 # repo.
64 64 if not baseui.hasconfig('extensions', 'largefiles'):
65 65 log.debug('Explicitly disable largefiles extension for repo.')
66 66 baseui.setconfig('extensions', 'largefiles', '!')
67 67
68 68 return baseui
69 69
70 70
71 71 def reraise_safe_exceptions(func):
72 72 """Decorator for converting mercurial exceptions to something neutral."""
73 73 def wrapper(*args, **kwargs):
74 74 try:
75 75 return func(*args, **kwargs)
76 76 except (Abort, InterventionRequired):
77 77 raise_from_original(exceptions.AbortException)
78 78 except RepoLookupError:
79 79 raise_from_original(exceptions.LookupException)
80 80 except RequirementError:
81 81 raise_from_original(exceptions.RequirementException)
82 82 except RepoError:
83 83 raise_from_original(exceptions.VcsException)
84 84 except LookupError:
85 85 raise_from_original(exceptions.LookupException)
86 86 except Exception as e:
87 87 if not hasattr(e, '_vcs_kind'):
88 88 log.exception("Unhandled exception in hg remote call")
89 89 raise_from_original(exceptions.UnhandledException)
90 90 raise
91 91 return wrapper
92 92
93 93
94 94 def raise_from_original(new_type):
95 95 """
96 96 Raise a new exception type with original args and traceback.
97 97 """
98 98 _, original, traceback = sys.exc_info()
99 99 try:
100 100 raise new_type(*original.args), None, traceback
101 101 finally:
102 102 del traceback
103 103
104 104
105 105 class MercurialFactory(RepoFactory):
106 106
107 107 def _create_config(self, config, hooks=True):
108 108 if not hooks:
109 109 hooks_to_clean = frozenset((
110 110 'changegroup.repo_size', 'preoutgoing.pre_pull',
111 111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
112 112 new_config = []
113 113 for section, option, value in config:
114 114 if section == 'hooks' and option in hooks_to_clean:
115 115 continue
116 116 new_config.append((section, option, value))
117 117 config = new_config
118 118
119 119 baseui = make_ui_from_config(config)
120 120 return baseui
121 121
122 122 def _create_repo(self, wire, create):
123 123 baseui = self._create_config(wire["config"])
124 124 return localrepository(baseui, wire["path"], create)
125 125
126 126
127 127 class HgRemote(object):
128 128
129 129 def __init__(self, factory):
130 130 self._factory = factory
131 131
132 132 self._bulk_methods = {
133 133 "affected_files": self.ctx_files,
134 134 "author": self.ctx_user,
135 135 "branch": self.ctx_branch,
136 136 "children": self.ctx_children,
137 137 "date": self.ctx_date,
138 138 "message": self.ctx_description,
139 139 "parents": self.ctx_parents,
140 140 "status": self.ctx_status,
141 141 "_file_paths": self.ctx_list,
142 142 }
143 143
144 144 @reraise_safe_exceptions
145 def discover_hg_version(self):
146 from mercurial import util
147 return util.version()
148
149 @reraise_safe_exceptions
145 150 def archive_repo(self, archive_path, mtime, file_info, kind):
146 151 if kind == "tgz":
147 152 archiver = archival.tarit(archive_path, mtime, "gz")
148 153 elif kind == "tbz2":
149 154 archiver = archival.tarit(archive_path, mtime, "bz2")
150 155 elif kind == 'zip':
151 156 archiver = archival.zipit(archive_path, mtime)
152 157 else:
153 158 raise exceptions.ArchiveException(
154 159 'Remote does not support: "%s".' % kind)
155 160
156 161 for f_path, f_mode, f_is_link, f_content in file_info:
157 162 archiver.addfile(f_path, f_mode, f_is_link, f_content)
158 163 archiver.done()
159 164
160 165 @reraise_safe_exceptions
161 166 def bookmarks(self, wire):
162 167 repo = self._factory.repo(wire)
163 168 return dict(repo._bookmarks)
164 169
165 170 @reraise_safe_exceptions
166 171 def branches(self, wire, normal, closed):
167 172 repo = self._factory.repo(wire)
168 173 iter_branches = repo.branchmap().iterbranches()
169 174 bt = {}
170 175 for branch_name, _heads, tip, is_closed in iter_branches:
171 176 if normal and not is_closed:
172 177 bt[branch_name] = tip
173 178 if closed and is_closed:
174 179 bt[branch_name] = tip
175 180
176 181 return bt
177 182
178 183 @reraise_safe_exceptions
179 184 def bulk_request(self, wire, rev, pre_load):
180 185 result = {}
181 186 for attr in pre_load:
182 187 try:
183 188 method = self._bulk_methods[attr]
184 189 result[attr] = method(wire, rev)
185 190 except KeyError:
186 191 raise exceptions.VcsException(
187 192 'Unknown bulk attribute: "%s"' % attr)
188 193 return result
189 194
190 195 @reraise_safe_exceptions
191 196 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
192 197 baseui = self._factory._create_config(wire["config"], hooks=hooks)
193 198 clone(baseui, source, dest, noupdate=not update_after_clone)
194 199
195 200 @reraise_safe_exceptions
196 201 def commitctx(
197 202 self, wire, message, parents, commit_time, commit_timezone,
198 203 user, files, extra, removed, updated):
199 204
200 205 def _filectxfn(_repo, memctx, path):
201 206 """
202 207 Marks given path as added/changed/removed in a given _repo. This is
203 208 for internal mercurial commit function.
204 209 """
205 210
206 211 # check if this path is removed
207 212 if path in removed:
208 213 # returning None is a way to mark node for removal
209 214 return None
210 215
211 216 # check if this path is added
212 217 for node in updated:
213 218 if node['path'] == path:
214 219 return memfilectx(
215 220 _repo,
216 221 path=node['path'],
217 222 data=node['content'],
218 223 islink=False,
219 224 isexec=bool(node['mode'] & stat.S_IXUSR),
220 225 copied=False,
221 226 memctx=memctx)
222 227
223 228 raise exceptions.AbortException(
224 229 "Given path haven't been marked as added, "
225 230 "changed or removed (%s)" % path)
226 231
227 232 repo = self._factory.repo(wire)
228 233
229 234 commit_ctx = memctx(
230 235 repo=repo,
231 236 parents=parents,
232 237 text=message,
233 238 files=files,
234 239 filectxfn=_filectxfn,
235 240 user=user,
236 241 date=(commit_time, commit_timezone),
237 242 extra=extra)
238 243
239 244 n = repo.commitctx(commit_ctx)
240 245 new_id = hex(n)
241 246
242 247 return new_id
243 248
244 249 @reraise_safe_exceptions
245 250 def ctx_branch(self, wire, revision):
246 251 repo = self._factory.repo(wire)
247 252 ctx = repo[revision]
248 253 return ctx.branch()
249 254
250 255 @reraise_safe_exceptions
251 256 def ctx_children(self, wire, revision):
252 257 repo = self._factory.repo(wire)
253 258 ctx = repo[revision]
254 259 return [child.rev() for child in ctx.children()]
255 260
256 261 @reraise_safe_exceptions
257 262 def ctx_date(self, wire, revision):
258 263 repo = self._factory.repo(wire)
259 264 ctx = repo[revision]
260 265 return ctx.date()
261 266
262 267 @reraise_safe_exceptions
263 268 def ctx_description(self, wire, revision):
264 269 repo = self._factory.repo(wire)
265 270 ctx = repo[revision]
266 271 return ctx.description()
267 272
268 273 @reraise_safe_exceptions
269 274 def ctx_diff(
270 275 self, wire, revision, git=True, ignore_whitespace=True, context=3):
271 276 repo = self._factory.repo(wire)
272 277 ctx = repo[revision]
273 278 result = ctx.diff(
274 279 git=git, ignore_whitespace=ignore_whitespace, context=context)
275 280 return list(result)
276 281
277 282 @reraise_safe_exceptions
278 283 def ctx_files(self, wire, revision):
279 284 repo = self._factory.repo(wire)
280 285 ctx = repo[revision]
281 286 return ctx.files()
282 287
283 288 @reraise_safe_exceptions
284 289 def ctx_list(self, path, revision):
285 290 repo = self._factory.repo(path)
286 291 ctx = repo[revision]
287 292 return list(ctx)
288 293
289 294 @reraise_safe_exceptions
290 295 def ctx_parents(self, wire, revision):
291 296 repo = self._factory.repo(wire)
292 297 ctx = repo[revision]
293 298 return [parent.rev() for parent in ctx.parents()]
294 299
295 300 @reraise_safe_exceptions
296 301 def ctx_substate(self, wire, revision):
297 302 repo = self._factory.repo(wire)
298 303 ctx = repo[revision]
299 304 return ctx.substate
300 305
301 306 @reraise_safe_exceptions
302 307 def ctx_status(self, wire, revision):
303 308 repo = self._factory.repo(wire)
304 309 ctx = repo[revision]
305 310 status = repo[ctx.p1().node()].status(other=ctx.node())
306 311 # object of status (odd, custom named tuple in mercurial) is not
307 312 # correctly serializable via Pyro, we make it a list, as the underling
308 313 # API expects this to be a list
309 314 return list(status)
310 315
311 316 @reraise_safe_exceptions
312 317 def ctx_user(self, wire, revision):
313 318 repo = self._factory.repo(wire)
314 319 ctx = repo[revision]
315 320 return ctx.user()
316 321
317 322 @reraise_safe_exceptions
318 323 def check_url(self, url, config):
319 log.info("Checking URL for remote cloning/import: %s", url)
320 324 _proto = None
321 325 if '+' in url[:url.find('://')]:
322 326 _proto = url[0:url.find('+')]
323 327 url = url[url.find('+') + 1:]
324 328 handlers = []
325 url_obj = hg_url(url)
329 url_obj = url_parser(url)
326 330 test_uri, authinfo = url_obj.authinfo()
327 331 url_obj.passwd = '*****'
332 url_obj.query = obfuscate_qs(url_obj.query)
333
328 334 cleaned_uri = str(url_obj)
335 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
329 336
330 337 if authinfo:
331 338 # create a password manager
332 339 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
333 340 passmgr.add_password(*authinfo)
334 341
335 342 handlers.extend((httpbasicauthhandler(passmgr),
336 343 httpdigestauthhandler(passmgr)))
337 344
338 345 o = urllib2.build_opener(*handlers)
339 346 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
340 347 ('Accept', 'application/mercurial-0.1')]
341 348
342 349 q = {"cmd": 'between'}
343 350 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
344 351 qs = '?%s' % urllib.urlencode(q)
345 352 cu = "%s%s" % (test_uri, qs)
346 353 req = urllib2.Request(cu, None, {})
347 354
348 355 try:
349 log.debug("Trying to open URL %s", url)
356 log.debug("Trying to open URL %s", cleaned_uri)
350 357 resp = o.open(req)
351 358 if resp.code != 200:
352 359 raise exceptions.URLError('Return Code is not 200')
353 360 except Exception as e:
354 log.warning("URL cannot be opened: %s", url, exc_info=True)
361 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
355 362 # means it cannot be cloned
356 363 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
357 364
358 365 # now check if it's a proper hg repo, but don't do it for svn
359 366 try:
360 367 if _proto == 'svn':
361 368 pass
362 369 else:
363 370 # check for pure hg repos
364 371 log.debug(
365 "Verifying if URL is a Mercurial repository: %s", url)
372 "Verifying if URL is a Mercurial repository: %s",
373 cleaned_uri)
366 374 httppeer(make_ui_from_config(config), url).lookup('tip')
367 375 except Exception as e:
368 log.warning("URL is not a valid Mercurial repository: %s", url)
376 log.warning("URL is not a valid Mercurial repository: %s",
377 cleaned_uri)
369 378 raise exceptions.URLError(
370 379 "url [%s] does not look like an hg repo org_exc: %s"
371 380 % (cleaned_uri, e))
372 381
373 log.info("URL is a valid Mercurial repository: %s", url)
382 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
374 383 return True
375 384
376 385 @reraise_safe_exceptions
377 386 def diff(
378 387 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
379 388 context):
380 389 repo = self._factory.repo(wire)
381 390
382 391 if file_filter:
383 392 filter = match(file_filter[0], '', [file_filter[1]])
384 393 else:
385 394 filter = file_filter
386 395 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
387 396
388 397 try:
389 398 return "".join(patch.diff(
390 399 repo, node1=rev1, node2=rev2, match=filter, opts=opts))
391 400 except RepoLookupError:
392 401 raise exceptions.LookupException()
393 402
394 403 @reraise_safe_exceptions
395 404 def file_history(self, wire, revision, path, limit):
396 405 repo = self._factory.repo(wire)
397 406
398 407 ctx = repo[revision]
399 408 fctx = ctx.filectx(path)
400 409
401 410 def history_iter():
402 411 limit_rev = fctx.rev()
403 412 for obj in reversed(list(fctx.filelog())):
404 413 obj = fctx.filectx(obj)
405 414 if limit_rev >= obj.rev():
406 415 yield obj
407 416
408 417 history = []
409 418 for cnt, obj in enumerate(history_iter()):
410 419 if limit and cnt >= limit:
411 420 break
412 421 history.append(hex(obj.node()))
413 422
414 423 return [x for x in history]
415 424
416 425 @reraise_safe_exceptions
417 426 def file_history_untill(self, wire, revision, path, limit):
418 427 repo = self._factory.repo(wire)
419 428 ctx = repo[revision]
420 429 fctx = ctx.filectx(path)
421 430
422 431 file_log = list(fctx.filelog())
423 432 if limit:
424 433 # Limit to the last n items
425 434 file_log = file_log[-limit:]
426 435
427 436 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
428 437
429 438 @reraise_safe_exceptions
430 439 def fctx_annotate(self, wire, revision, path):
431 440 repo = self._factory.repo(wire)
432 441 ctx = repo[revision]
433 442 fctx = ctx.filectx(path)
434 443
435 444 result = []
436 445 for i, annotate_data in enumerate(fctx.annotate()):
437 446 ln_no = i + 1
438 447 sha = hex(annotate_data[0].node())
439 448 result.append((ln_no, sha, annotate_data[1]))
440 449 return result
441 450
442 451 @reraise_safe_exceptions
443 452 def fctx_data(self, wire, revision, path):
444 453 repo = self._factory.repo(wire)
445 454 ctx = repo[revision]
446 455 fctx = ctx.filectx(path)
447 456 return fctx.data()
448 457
449 458 @reraise_safe_exceptions
450 459 def fctx_flags(self, wire, revision, path):
451 460 repo = self._factory.repo(wire)
452 461 ctx = repo[revision]
453 462 fctx = ctx.filectx(path)
454 463 return fctx.flags()
455 464
456 465 @reraise_safe_exceptions
457 466 def fctx_size(self, wire, revision, path):
458 467 repo = self._factory.repo(wire)
459 468 ctx = repo[revision]
460 469 fctx = ctx.filectx(path)
461 470 return fctx.size()
462 471
463 472 @reraise_safe_exceptions
464 473 def get_all_commit_ids(self, wire, name):
465 474 repo = self._factory.repo(wire)
466 475 revs = repo.filtered(name).changelog.index
467 476 return map(lambda x: hex(x[7]), revs)[:-1]
468 477
469 478 @reraise_safe_exceptions
470 479 def get_config_value(self, wire, section, name, untrusted=False):
471 480 repo = self._factory.repo(wire)
472 481 return repo.ui.config(section, name, untrusted=untrusted)
473 482
474 483 @reraise_safe_exceptions
475 484 def get_config_bool(self, wire, section, name, untrusted=False):
476 485 repo = self._factory.repo(wire)
477 486 return repo.ui.configbool(section, name, untrusted=untrusted)
478 487
479 488 @reraise_safe_exceptions
480 489 def get_config_list(self, wire, section, name, untrusted=False):
481 490 repo = self._factory.repo(wire)
482 491 return repo.ui.configlist(section, name, untrusted=untrusted)
483 492
484 493 @reraise_safe_exceptions
485 494 def is_large_file(self, wire, path):
486 495 return largefiles.lfutil.isstandin(path)
487 496
488 497 @reraise_safe_exceptions
489 498 def in_store(self, wire, sha):
490 499 repo = self._factory.repo(wire)
491 500 return largefiles.lfutil.instore(repo, sha)
492 501
493 502 @reraise_safe_exceptions
494 503 def in_user_cache(self, wire, sha):
495 504 repo = self._factory.repo(wire)
496 505 return largefiles.lfutil.inusercache(repo.ui, sha)
497 506
498 507 @reraise_safe_exceptions
499 508 def store_path(self, wire, sha):
500 509 repo = self._factory.repo(wire)
501 510 return largefiles.lfutil.storepath(repo, sha)
502 511
503 512 @reraise_safe_exceptions
504 513 def link(self, wire, sha, path):
505 514 repo = self._factory.repo(wire)
506 515 largefiles.lfutil.link(
507 516 largefiles.lfutil.usercachepath(repo.ui, sha), path)
508 517
509 518 @reraise_safe_exceptions
510 519 def localrepository(self, wire, create=False):
511 520 self._factory.repo(wire, create=create)
512 521
513 522 @reraise_safe_exceptions
514 523 def lookup(self, wire, revision, both):
515 524 # TODO Paris: Ugly hack to "deserialize" long for msgpack
516 525 if isinstance(revision, float):
517 526 revision = long(revision)
518 527 repo = self._factory.repo(wire)
519 528 try:
520 529 ctx = repo[revision]
521 530 except RepoLookupError:
522 531 raise exceptions.LookupException(revision)
523 532 except LookupError as e:
524 533 raise exceptions.LookupException(e.name)
525 534
526 535 if not both:
527 536 return ctx.hex()
528 537
529 538 ctx = repo[ctx.hex()]
530 539 return ctx.hex(), ctx.rev()
531 540
532 541 @reraise_safe_exceptions
533 542 def pull(self, wire, url, commit_ids=None):
534 543 repo = self._factory.repo(wire)
535 544 remote = peer(repo, {}, url)
536 545 if commit_ids:
537 546 commit_ids = [bin(commit_id) for commit_id in commit_ids]
538 547
539 548 return exchange.pull(
540 549 repo, remote, heads=commit_ids, force=None).cgresult
541 550
542 551 @reraise_safe_exceptions
543 552 def revision(self, wire, rev):
544 553 repo = self._factory.repo(wire)
545 554 ctx = repo[rev]
546 555 return ctx.rev()
547 556
548 557 @reraise_safe_exceptions
549 558 def rev_range(self, wire, filter):
550 559 repo = self._factory.repo(wire)
551 560 revisions = [rev for rev in revrange(repo, filter)]
552 561 return revisions
553 562
554 563 @reraise_safe_exceptions
555 564 def rev_range_hash(self, wire, node):
556 565 repo = self._factory.repo(wire)
557 566
558 567 def get_revs(repo, rev_opt):
559 568 if rev_opt:
560 569 revs = revrange(repo, rev_opt)
561 570 if len(revs) == 0:
562 571 return (nullrev, nullrev)
563 572 return max(revs), min(revs)
564 573 else:
565 574 return len(repo) - 1, 0
566 575
567 576 stop, start = get_revs(repo, [node + ':'])
568 577 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
569 578 return revs
570 579
571 580 @reraise_safe_exceptions
572 581 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
573 582 other_path = kwargs.pop('other_path', None)
574 583
575 584 # case when we want to compare two independent repositories
576 585 if other_path and other_path != wire["path"]:
577 586 baseui = self._factory._create_config(wire["config"])
578 587 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
579 588 else:
580 589 repo = self._factory.repo(wire)
581 590 return list(repo.revs(rev_spec, *args))
582 591
583 592 @reraise_safe_exceptions
584 593 def strip(self, wire, revision, update, backup):
585 594 repo = self._factory.repo(wire)
586 595 ctx = repo[revision]
587 596 hgext_strip(
588 597 repo.baseui, repo, ctx.node(), update=update, backup=backup)
589 598
590 599 @reraise_safe_exceptions
591 600 def tag(self, wire, name, revision, message, local, user,
592 601 tag_time, tag_timezone):
593 602 repo = self._factory.repo(wire)
594 603 ctx = repo[revision]
595 604 node = ctx.node()
596 605
597 606 date = (tag_time, tag_timezone)
598 607 try:
599 608 repo.tag(name, node, message, local, user, date)
600 609 except Abort:
601 610 log.exception("Tag operation aborted")
602 611 raise exceptions.AbortException()
603 612
604 613 @reraise_safe_exceptions
605 614 def tags(self, wire):
606 615 repo = self._factory.repo(wire)
607 616 return repo.tags()
608 617
609 618 @reraise_safe_exceptions
610 619 def update(self, wire, node=None, clean=False):
611 620 repo = self._factory.repo(wire)
612 621 baseui = self._factory._create_config(wire['config'])
613 622 commands.update(baseui, repo, node=node, clean=clean)
614 623
615 624 @reraise_safe_exceptions
616 625 def identify(self, wire):
617 626 repo = self._factory.repo(wire)
618 627 baseui = self._factory._create_config(wire['config'])
619 628 output = io.BytesIO()
620 629 baseui.write = output.write
621 630 # This is required to get a full node id
622 631 baseui.debugflag = True
623 632 commands.identify(baseui, repo, id=True)
624 633
625 634 return output.getvalue()
626 635
627 636 @reraise_safe_exceptions
628 637 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
629 638 hooks=True):
630 639 repo = self._factory.repo(wire)
631 640 baseui = self._factory._create_config(wire['config'], hooks=hooks)
632 641
633 642 # Mercurial internally has a lot of logic that checks ONLY if
634 643 # option is defined, we just pass those if they are defined then
635 644 opts = {}
636 645 if bookmark:
637 646 opts['bookmark'] = bookmark
638 647 if branch:
639 648 opts['branch'] = branch
640 649 if revision:
641 650 opts['rev'] = revision
642 651
643 652 commands.pull(baseui, repo, source, **opts)
644 653
645 654 @reraise_safe_exceptions
646 655 def heads(self, wire, branch=None):
647 656 repo = self._factory.repo(wire)
648 657 baseui = self._factory._create_config(wire['config'])
649 658 output = io.BytesIO()
650 659
651 660 def write(data, **unused_kwargs):
652 661 output.write(data)
653 662
654 663 baseui.write = write
655 664 if branch:
656 665 args = [branch]
657 666 else:
658 667 args = []
659 668 commands.heads(baseui, repo, template='{node} ', *args)
660 669
661 670 return output.getvalue()
662 671
663 672 @reraise_safe_exceptions
664 673 def ancestor(self, wire, revision1, revision2):
665 674 repo = self._factory.repo(wire)
666 675 baseui = self._factory._create_config(wire['config'])
667 676 output = io.BytesIO()
668 677 baseui.write = output.write
669 678 commands.debugancestor(baseui, repo, revision1, revision2)
670 679
671 680 return output.getvalue()
672 681
673 682 @reraise_safe_exceptions
674 683 def push(self, wire, revisions, dest_path, hooks=True,
675 684 push_branches=False):
676 685 repo = self._factory.repo(wire)
677 686 baseui = self._factory._create_config(wire['config'], hooks=hooks)
678 687 commands.push(baseui, repo, dest=dest_path, rev=revisions,
679 688 new_branch=push_branches)
680 689
681 690 @reraise_safe_exceptions
682 691 def merge(self, wire, revision):
683 692 repo = self._factory.repo(wire)
684 693 baseui = self._factory._create_config(wire['config'])
685 694 repo.ui.setconfig('ui', 'merge', 'internal:dump')
695
696 # In case of sub repositories are used mercurial prompts the user in
697 # case of merge conflicts or different sub repository sources. By
698 # setting the interactive flag to `False` mercurial doesn't prompt the
699 # used but instead uses a default value.
700 repo.ui.setconfig('ui', 'interactive', False)
701
686 702 commands.merge(baseui, repo, rev=revision)
687 703
688 704 @reraise_safe_exceptions
689 705 def commit(self, wire, message, username):
690 706 repo = self._factory.repo(wire)
691 707 baseui = self._factory._create_config(wire['config'])
692 708 repo.ui.setconfig('ui', 'username', username)
693 709 commands.commit(baseui, repo, message=message)
694 710
695 711 @reraise_safe_exceptions
696 712 def rebase(self, wire, source=None, dest=None, abort=False):
697 713 repo = self._factory.repo(wire)
698 714 baseui = self._factory._create_config(wire['config'])
699 715 repo.ui.setconfig('ui', 'merge', 'internal:dump')
700 716 rebase.rebase(
701 717 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
702 718
703 719 @reraise_safe_exceptions
704 720 def bookmark(self, wire, bookmark, revision=None):
705 721 repo = self._factory.repo(wire)
706 722 baseui = self._factory._create_config(wire['config'])
707 723 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,61 +1,62 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Mercurial libs compatibility
20 20 """
21 21
22 22 import mercurial
23 23 import mercurial.demandimport
24 24 # patch demandimport, due to bug in mercurial when it always triggers
25 25 # demandimport.enable()
26 26 mercurial.demandimport.enable = lambda *args, **kwargs: 1
27 27
28 28 from mercurial import ui
29 29 from mercurial import patch
30 30 from mercurial import config
31 31 from mercurial import extensions
32 32 from mercurial import scmutil
33 33 from mercurial import archival
34 34 from mercurial import discovery
35 35 from mercurial import unionrepo
36 36 from mercurial import localrepo
37 37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38 39
39 40 from mercurial.commands import clone, nullid, pull
40 41 from mercurial.context import memctx, memfilectx
41 42 from mercurial.error import (
42 43 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
43 44 RequirementError)
44 45 from mercurial.hgweb import hgweb_mod
45 46 from mercurial.localrepo import localrepository
46 47 from mercurial.match import match
47 48 from mercurial.mdiff import diffopts
48 49 from mercurial.node import bin, hex
49 50 from mercurial.encoding import tolocal
50 51 from mercurial.discovery import findcommonoutgoing
51 52 from mercurial.hg import peer
52 53 from mercurial.httppeer import httppeer
53 54 from mercurial.util import url as hg_url
54 55 from mercurial.scmutil import revrange
55 56 from mercurial.node import nullrev
56 57 from mercurial import exchange
57 58 from hgext import largefiles
58 59
59 60 # those authnadlers are patched for python 2.6.5 bug an
60 61 # infinit looping when given invalid resources
61 62 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
@@ -1,60 +1,134 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Adjustments to Mercurial
20 20
21 21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 22 be applied without having to import the whole Mercurial machinery.
23 23
24 24 Imports are function local, so that just importing this module does not cause
25 25 side-effects other than these functions being defined.
26 26 """
27 27
28 28 import logging
29 29
30 30
31 31 def patch_largefiles_capabilities():
32 32 """
33 33 Patches the capabilities function in the largefiles extension.
34 34 """
35 35 from vcsserver import hgcompat
36 36 lfproto = hgcompat.largefiles.proto
37 37 wrapper = _dynamic_capabilities_wrapper(
38 38 lfproto, hgcompat.extensions.extensions)
39 39 lfproto.capabilities = wrapper
40 40
41 41
42 42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43 43
44 44 wrapped_capabilities = lfproto.capabilities
45 45 logger = logging.getLogger('vcsserver.hg')
46 46
47 47 def _dynamic_capabilities(repo, proto):
48 48 """
49 49 Adds dynamic behavior, so that the capability is only added if the
50 50 extension is enabled in the current ui object.
51 51 """
52 52 if 'largefiles' in dict(extensions(repo.ui)):
53 53 logger.debug('Extension largefiles enabled')
54 54 calc_capabilities = wrapped_capabilities
55 55 else:
56 56 logger.debug('Extension largefiles disabled')
57 57 calc_capabilities = lfproto.capabilitiesorig
58 58 return calc_capabilities(repo, proto)
59 59
60 60 return _dynamic_capabilities
61
62
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
65 from hgcompat import subrepo
66 from exceptions import SubrepoMergeException
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
69
70 def __init__(self, ctx, path, *args, **kwargs):
71 """Initialize abstractsubrepo part
72
73 ``ctx`` is the context referring this subrepository in the
74 parent repository.
75
76 ``path`` is the path to this subrepository as seen from
77 innermost repository.
78 """
79 self.ui = ctx.repo().ui
80 self._ctx = ctx
81 self._path = path
82
83 def storeclean(self, path):
84 """
85 returns true if the repository has not changed since it was last
86 cloned from or pushed to a given repository.
87 """
88 return True
89
90 def dirty(self, ignoreupdate=False):
91 """returns true if the dirstate of the subrepo is dirty or does not
92 match current stored state. If ignoreupdate is true, only check
93 whether the subrepo has uncommitted changes in its dirstate.
94 """
95 return False
96
97 def basestate(self):
98 """current working directory base state, disregarding .hgsubstate
99 state and working directory modifications"""
100 substate = subrepo.state(self._ctx, self.ui)
101 file_system_path, rev, repotype = substate.get(self._path)
102 return rev
103
104 def remove(self):
105 """remove the subrepo
106
107 (should verify the dirstate is not dirty first)
108 """
109 pass
110
111 def get(self, state, overwrite=False):
112 """run whatever commands are needed to put the subrepo into
113 this state
114 """
115 pass
116
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()
120
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
123
124 This may be a no-op on some systems.
125 """
126 pass
127
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 # whenever a subrepo class is looked up.
130 subrepo.types = {
131 'hg': NoOpSubrepo,
132 'git': NoOpSubrepo,
133 'svn': NoOpSubrepo
134 }
@@ -1,337 +1,376 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import base64
19 19 import locale
20 20 import logging
21 21 import uuid
22 22 import wsgiref.util
23 23 from itertools import chain
24 24
25 25 import msgpack
26 26 from beaker.cache import CacheManager
27 27 from beaker.util import parse_cache_config_options
28 28 from pyramid.config import Configurator
29 29 from pyramid.wsgi import wsgiapp
30 30
31 31 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
32 32 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
33 33 from vcsserver.echo_stub.echo_app import EchoApp
34 from vcsserver.exceptions import HTTPRepoLocked
34 35 from vcsserver.server import VcsServer
35 36
36 37 try:
37 38 from vcsserver.git import GitFactory, GitRemote
38 39 except ImportError:
39 40 GitFactory = None
40 41 GitRemote = None
41 42 try:
42 43 from vcsserver.hg import MercurialFactory, HgRemote
43 44 except ImportError:
44 45 MercurialFactory = None
45 46 HgRemote = None
46 47 try:
47 48 from vcsserver.svn import SubversionFactory, SvnRemote
48 49 except ImportError:
49 50 SubversionFactory = None
50 51 SvnRemote = None
51 52
52 53 log = logging.getLogger(__name__)
53 54
54 55
55 56 class VCS(object):
56 57 def __init__(self, locale=None, cache_config=None):
57 58 self.locale = locale
58 59 self.cache_config = cache_config
59 60 self._configure_locale()
60 61 self._initialize_cache()
61 62
62 63 if GitFactory and GitRemote:
63 64 git_repo_cache = self.cache.get_cache_region(
64 65 'git', region='repo_object')
65 66 git_factory = GitFactory(git_repo_cache)
66 67 self._git_remote = GitRemote(git_factory)
67 68 else:
68 69 log.info("Git client import failed")
69 70
70 71 if MercurialFactory and HgRemote:
71 72 hg_repo_cache = self.cache.get_cache_region(
72 73 'hg', region='repo_object')
73 74 hg_factory = MercurialFactory(hg_repo_cache)
74 75 self._hg_remote = HgRemote(hg_factory)
75 76 else:
76 77 log.info("Mercurial client import failed")
77 78
78 79 if SubversionFactory and SvnRemote:
79 80 svn_repo_cache = self.cache.get_cache_region(
80 81 'svn', region='repo_object')
81 82 svn_factory = SubversionFactory(svn_repo_cache)
82 83 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
83 84 else:
84 85 log.info("Subversion client import failed")
85 86
86 87 self._vcsserver = VcsServer()
87 88
88 89 def _initialize_cache(self):
89 90 cache_config = parse_cache_config_options(self.cache_config)
90 91 log.info('Initializing beaker cache: %s' % cache_config)
91 92 self.cache = CacheManager(**cache_config)
92 93
93 94 def _configure_locale(self):
94 95 if self.locale:
95 96 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
96 97 else:
97 98 log.info(
98 99 'Configuring locale subsystem based on environment variables')
99 100 try:
100 101 # If self.locale is the empty string, then the locale
101 102 # module will use the environment variables. See the
102 103 # documentation of the package `locale`.
103 104 locale.setlocale(locale.LC_ALL, self.locale)
104 105
105 106 language_code, encoding = locale.getlocale()
106 107 log.info(
107 108 'Locale set to language code "%s" with encoding "%s".',
108 109 language_code, encoding)
109 110 except locale.Error:
110 111 log.exception(
111 112 'Cannot set locale, not configuring the locale system')
112 113
113 114
114 115 class WsgiProxy(object):
115 116 def __init__(self, wsgi):
116 117 self.wsgi = wsgi
117 118
118 119 def __call__(self, environ, start_response):
119 120 input_data = environ['wsgi.input'].read()
120 121 input_data = msgpack.unpackb(input_data)
121 122
122 123 error = None
123 124 try:
124 125 data, status, headers = self.wsgi.handle(
125 126 input_data['environment'], input_data['input_data'],
126 127 *input_data['args'], **input_data['kwargs'])
127 128 except Exception as e:
128 129 data, status, headers = [], None, None
129 130 error = {
130 131 'message': str(e),
131 132 '_vcs_kind': getattr(e, '_vcs_kind', None)
132 133 }
133 134
134 135 start_response(200, {})
135 136 return self._iterator(error, status, headers, data)
136 137
137 138 def _iterator(self, error, status, headers, data):
138 139 initial_data = [
139 140 error,
140 141 status,
141 142 headers,
142 143 ]
143 144
144 145 for d in chain(initial_data, data):
145 146 yield msgpack.packb(d)
146 147
147 148
148 149 class HTTPApplication(object):
149 150 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
150 151
151 152 remote_wsgi = remote_wsgi
152 153 _use_echo_app = False
153 154
154 155 def __init__(self, settings=None):
155 156 self.config = Configurator(settings=settings)
156 157 locale = settings.get('', 'en_US.UTF-8')
157 158 vcs = VCS(locale=locale, cache_config=settings)
158 159 self._remotes = {
159 160 'hg': vcs._hg_remote,
160 161 'git': vcs._git_remote,
161 162 'svn': vcs._svn_remote,
162 163 'server': vcs._vcsserver,
163 164 }
164 165 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
165 166 self._use_echo_app = True
166 167 log.warning("Using EchoApp for VCS operations.")
167 168 self.remote_wsgi = remote_wsgi_stub
168 169 self._configure_settings(settings)
169 170 self._configure()
170 171
171 172 def _configure_settings(self, app_settings):
172 173 """
173 174 Configure the settings module.
174 175 """
175 176 git_path = app_settings.get('git_path', None)
176 177 if git_path:
177 178 settings.GIT_EXECUTABLE = git_path
178 179
179 180 def _configure(self):
180 181 self.config.add_renderer(
181 182 name='msgpack',
182 183 factory=self._msgpack_renderer_factory)
183 184
185 self.config.add_route('service', '/_service')
184 186 self.config.add_route('status', '/status')
185 187 self.config.add_route('hg_proxy', '/proxy/hg')
186 188 self.config.add_route('git_proxy', '/proxy/git')
187 189 self.config.add_route('vcs', '/{backend}')
188 190 self.config.add_route('stream_git', '/stream/git/*repo_name')
189 191 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
190 192
191 193 self.config.add_view(
192 194 self.status_view, route_name='status', renderer='json')
195 self.config.add_view(
196 self.service_view, route_name='service', renderer='msgpack')
197
193 198 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
194 199 self.config.add_view(self.git_proxy(), route_name='git_proxy')
195 200 self.config.add_view(
196 201 self.vcs_view, route_name='vcs', renderer='msgpack')
197 202
198 203 self.config.add_view(self.hg_stream(), route_name='stream_hg')
199 204 self.config.add_view(self.git_stream(), route_name='stream_git')
205 self.config.add_view(
206 self.handle_vcs_exception, context=Exception,
207 custom_predicates=[self.is_vcs_exception])
200 208
201 209 def wsgi_app(self):
202 210 return self.config.make_wsgi_app()
203 211
204 212 def vcs_view(self, request):
205 213 remote = self._remotes[request.matchdict['backend']]
206 214 payload = msgpack.unpackb(request.body, use_list=True)
207 215 method = payload.get('method')
208 216 params = payload.get('params')
209 217 wire = params.get('wire')
210 218 args = params.get('args')
211 219 kwargs = params.get('kwargs')
212 220 if wire:
213 221 try:
214 222 wire['context'] = uuid.UUID(wire['context'])
215 223 except KeyError:
216 224 pass
217 225 args.insert(0, wire)
218 226
219 227 try:
220 228 resp = getattr(remote, method)(*args, **kwargs)
221 229 except Exception as e:
222 230 type_ = e.__class__.__name__
223 231 if type_ not in self.ALLOWED_EXCEPTIONS:
224 232 type_ = None
225 233
226 234 resp = {
227 235 'id': payload.get('id'),
228 236 'error': {
229 237 'message': e.message,
230 238 'type': type_
231 239 }
232 240 }
233 241 try:
234 242 resp['error']['_vcs_kind'] = e._vcs_kind
235 243 except AttributeError:
236 244 pass
237 245 else:
238 246 resp = {
239 247 'id': payload.get('id'),
240 248 'result': resp
241 249 }
242 250
243 251 return resp
244 252
245 253 def status_view(self, request):
246 254 return {'status': 'OK'}
247 255
256 def service_view(self, request):
257 import vcsserver
258 payload = msgpack.unpackb(request.body, use_list=True)
259 resp = {
260 'id': payload.get('id'),
261 'result': dict(
262 version=vcsserver.__version__,
263 config={},
264 payload=payload,
265 )
266 }
267 return resp
268
248 269 def _msgpack_renderer_factory(self, info):
249 270 def _render(value, system):
250 271 value = msgpack.packb(value)
251 272 request = system.get('request')
252 273 if request is not None:
253 274 response = request.response
254 275 ct = response.content_type
255 276 if ct == response.default_content_type:
256 277 response.content_type = 'application/x-msgpack'
257 278 return value
258 279 return _render
259 280
260 281 def hg_proxy(self):
261 282 @wsgiapp
262 283 def _hg_proxy(environ, start_response):
263 284 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
264 285 return app(environ, start_response)
265 286 return _hg_proxy
266 287
267 288 def git_proxy(self):
268 289 @wsgiapp
269 290 def _git_proxy(environ, start_response):
270 291 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
271 292 return app(environ, start_response)
272 293 return _git_proxy
273 294
274 295 def hg_stream(self):
275 296 if self._use_echo_app:
276 297 @wsgiapp
277 298 def _hg_stream(environ, start_response):
278 299 app = EchoApp('fake_path', 'fake_name', None)
279 300 return app(environ, start_response)
280 301 return _hg_stream
281 302 else:
282 303 @wsgiapp
283 304 def _hg_stream(environ, start_response):
284 305 repo_path = environ['HTTP_X_RC_REPO_PATH']
285 306 repo_name = environ['HTTP_X_RC_REPO_NAME']
286 307 packed_config = base64.b64decode(
287 308 environ['HTTP_X_RC_REPO_CONFIG'])
288 309 config = msgpack.unpackb(packed_config)
289 310 app = scm_app.create_hg_wsgi_app(
290 311 repo_path, repo_name, config)
291 312
292 313 # Consitent path information for hgweb
293 314 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
294 315 environ['REPO_NAME'] = repo_name
295 316 return app(environ, ResponseFilter(start_response))
296 317 return _hg_stream
297 318
298 319 def git_stream(self):
299 320 if self._use_echo_app:
300 321 @wsgiapp
301 322 def _git_stream(environ, start_response):
302 323 app = EchoApp('fake_path', 'fake_name', None)
303 324 return app(environ, start_response)
304 325 return _git_stream
305 326 else:
306 327 @wsgiapp
307 328 def _git_stream(environ, start_response):
308 329 repo_path = environ['HTTP_X_RC_REPO_PATH']
309 330 repo_name = environ['HTTP_X_RC_REPO_NAME']
310 331 packed_config = base64.b64decode(
311 332 environ['HTTP_X_RC_REPO_CONFIG'])
312 333 config = msgpack.unpackb(packed_config)
313 334
314 335 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
315 336 app = scm_app.create_git_wsgi_app(
316 337 repo_path, repo_name, config)
317 338 return app(environ, start_response)
318 339 return _git_stream
319 340
341 def is_vcs_exception(self, context, request):
342 """
343 View predicate that returns true if the context object is a VCS
344 exception.
345 """
346 return hasattr(context, '_vcs_kind')
347
348 def handle_vcs_exception(self, exception, request):
349 if exception._vcs_kind == 'repo_locked':
350 # Get custom repo-locked status code if present.
351 status_code = request.headers.get('X-RC-Locked-Status-Code')
352 return HTTPRepoLocked(
353 title=exception.message, status_code=status_code)
354
355 # Re-raise exception if we can not handle it.
356 raise exception
357
320 358
321 359 class ResponseFilter(object):
322 360
323 361 def __init__(self, start_response):
324 362 self._start_response = start_response
325 363
326 364 def __call__(self, status, response_headers, exc_info=None):
327 365 headers = tuple(
328 366 (h, v) for h, v in response_headers
329 367 if not wsgiref.util.is_hop_by_hop(h))
330 368 return self._start_response(status, headers, exc_info)
331 369
332 370
333 371 def main(global_config, **settings):
334 372 if MercurialFactory:
335 373 hgpatches.patch_largefiles_capabilities()
374 hgpatches.patch_subrepo_type_mapping()
336 375 app = HTTPApplication(settings=settings)
337 376 return app.wsgi_app()
@@ -1,507 +1,508 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import atexit
19 19 import locale
20 20 import logging
21 21 import optparse
22 22 import os
23 23 import textwrap
24 24 import threading
25 25 import sys
26 26
27 27 import configobj
28 28 import Pyro4
29 29 from beaker.cache import CacheManager
30 30 from beaker.util import parse_cache_config_options
31 31
32 32 try:
33 33 from vcsserver.git import GitFactory, GitRemote
34 34 except ImportError:
35 35 GitFactory = None
36 36 GitRemote = None
37 37 try:
38 38 from vcsserver.hg import MercurialFactory, HgRemote
39 39 except ImportError:
40 40 MercurialFactory = None
41 41 HgRemote = None
42 42 try:
43 43 from vcsserver.svn import SubversionFactory, SvnRemote
44 44 except ImportError:
45 45 SubversionFactory = None
46 46 SvnRemote = None
47 47
48 48 from server import VcsServer
49 49 from vcsserver import hgpatches, remote_wsgi, settings
50 50 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
51 51
52 52 log = logging.getLogger(__name__)
53 53
54 54 HERE = os.path.dirname(os.path.abspath(__file__))
55 55 SERVER_RUNNING_FILE = None
56 56
57 57
58 58 # HOOKS - inspired by gunicorn #
59 59
60 60 def when_ready(server):
61 61 """
62 62 Called just after the server is started.
63 63 """
64 64
65 65 def _remove_server_running_file():
66 66 if os.path.isfile(SERVER_RUNNING_FILE):
67 67 os.remove(SERVER_RUNNING_FILE)
68 68
69 69 # top up to match to level location
70 70 if SERVER_RUNNING_FILE:
71 71 with open(SERVER_RUNNING_FILE, 'wb') as f:
72 72 f.write(str(os.getpid()))
73 73 # register cleanup of that file when server exits
74 74 atexit.register(_remove_server_running_file)
75 75
76 76
77 77 class LazyWriter(object):
78 78 """
79 79 File-like object that opens a file lazily when it is first written
80 80 to.
81 81 """
82 82
83 83 def __init__(self, filename, mode='w'):
84 84 self.filename = filename
85 85 self.fileobj = None
86 86 self.lock = threading.Lock()
87 87 self.mode = mode
88 88
89 89 def open(self):
90 90 if self.fileobj is None:
91 91 with self.lock:
92 92 self.fileobj = open(self.filename, self.mode)
93 93 return self.fileobj
94 94
95 95 def close(self):
96 96 fileobj = self.fileobj
97 97 if fileobj is not None:
98 98 fileobj.close()
99 99
100 100 def __del__(self):
101 101 self.close()
102 102
103 103 def write(self, text):
104 104 fileobj = self.open()
105 105 fileobj.write(text)
106 106 fileobj.flush()
107 107
108 108 def writelines(self, text):
109 109 fileobj = self.open()
110 110 fileobj.writelines(text)
111 111 fileobj.flush()
112 112
113 113 def flush(self):
114 114 self.open().flush()
115 115
116 116
117 117 class Application(object):
118 118 """
119 119 Represents the vcs server application.
120 120
121 121 This object is responsible to initialize the application and all needed
122 122 libraries. After that it hooks together the different objects and provides
123 123 them a way to access things like configuration.
124 124 """
125 125
126 126 def __init__(
127 127 self, host, port=None, locale='', threadpool_size=None,
128 128 timeout=None, cache_config=None, remote_wsgi_=None):
129 129
130 130 self.host = host
131 131 self.port = int(port) or settings.PYRO_PORT
132 132 self.threadpool_size = (
133 133 int(threadpool_size) if threadpool_size else None)
134 134 self.locale = locale
135 135 self.timeout = timeout
136 136 self.cache_config = cache_config
137 137 self.remote_wsgi = remote_wsgi_ or remote_wsgi
138 138
139 139 def init(self):
140 140 """
141 141 Configure and hook together all relevant objects.
142 142 """
143 143 self._configure_locale()
144 144 self._configure_pyro()
145 145 self._initialize_cache()
146 146 self._create_daemon_and_remote_objects(host=self.host, port=self.port)
147 147
148 148 def run(self):
149 149 """
150 150 Start the main loop of the application.
151 151 """
152 152
153 153 if hasattr(os, 'getpid'):
154 154 log.info('Starting %s in PID %i.', __name__, os.getpid())
155 155 else:
156 156 log.info('Starting %s.', __name__)
157 157 if SERVER_RUNNING_FILE:
158 158 log.info('PID file written as %s', SERVER_RUNNING_FILE)
159 159 else:
160 160 log.info('No PID file written by default.')
161 161 when_ready(self)
162 162 try:
163 163 self._pyrodaemon.requestLoop(
164 164 loopCondition=lambda: not self._vcsserver._shutdown)
165 165 finally:
166 166 self._pyrodaemon.shutdown()
167 167
168 168 def _configure_locale(self):
169 169 if self.locale:
170 170 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
171 171 else:
172 172 log.info(
173 173 'Configuring locale subsystem based on environment variables')
174 174
175 175 try:
176 176 # If self.locale is the empty string, then the locale
177 177 # module will use the environment variables. See the
178 178 # documentation of the package `locale`.
179 179 locale.setlocale(locale.LC_ALL, self.locale)
180 180
181 181 language_code, encoding = locale.getlocale()
182 182 log.info(
183 183 'Locale set to language code "%s" with encoding "%s".',
184 184 language_code, encoding)
185 185 except locale.Error:
186 186 log.exception(
187 187 'Cannot set locale, not configuring the locale system')
188 188
189 189 def _configure_pyro(self):
190 190 if self.threadpool_size is not None:
191 191 log.info("Threadpool size set to %s", self.threadpool_size)
192 192 Pyro4.config.THREADPOOL_SIZE = self.threadpool_size
193 193 if self.timeout not in (None, 0, 0.0, '0'):
194 194 log.info("Timeout for RPC calls set to %s seconds", self.timeout)
195 195 Pyro4.config.COMMTIMEOUT = float(self.timeout)
196 196 Pyro4.config.SERIALIZER = 'pickle'
197 197 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
198 198 Pyro4.config.SOCK_REUSE = True
199 199 # Uncomment the next line when you need to debug remote errors
200 200 # Pyro4.config.DETAILED_TRACEBACK = True
201 201
202 202 def _initialize_cache(self):
203 203 cache_config = parse_cache_config_options(self.cache_config)
204 204 log.info('Initializing beaker cache: %s' % cache_config)
205 205 self.cache = CacheManager(**cache_config)
206 206
207 207 def _create_daemon_and_remote_objects(self, host='localhost',
208 208 port=settings.PYRO_PORT):
209 209 daemon = Pyro4.Daemon(host=host, port=port)
210 210
211 211 self._vcsserver = VcsServer()
212 212 uri = daemon.register(
213 213 self._vcsserver, objectId=settings.PYRO_VCSSERVER)
214 214 log.info("Object registered = %s", uri)
215 215
216 216 if GitFactory and GitRemote:
217 217 git_repo_cache = self.cache.get_cache_region('git', region='repo_object')
218 218 git_factory = GitFactory(git_repo_cache)
219 219 self._git_remote = GitRemote(git_factory)
220 220 uri = daemon.register(self._git_remote, objectId=settings.PYRO_GIT)
221 221 log.info("Object registered = %s", uri)
222 222 else:
223 223 log.info("Git client import failed")
224 224
225 225 if MercurialFactory and HgRemote:
226 226 hg_repo_cache = self.cache.get_cache_region('hg', region='repo_object')
227 227 hg_factory = MercurialFactory(hg_repo_cache)
228 228 self._hg_remote = HgRemote(hg_factory)
229 229 uri = daemon.register(self._hg_remote, objectId=settings.PYRO_HG)
230 230 log.info("Object registered = %s", uri)
231 231 else:
232 232 log.info("Mercurial client import failed")
233 233
234 234 if SubversionFactory and SvnRemote:
235 235 svn_repo_cache = self.cache.get_cache_region('svn', region='repo_object')
236 236 svn_factory = SubversionFactory(svn_repo_cache)
237 237 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
238 238 uri = daemon.register(self._svn_remote, objectId=settings.PYRO_SVN)
239 239 log.info("Object registered = %s", uri)
240 240 else:
241 241 log.info("Subversion client import failed")
242 242
243 243 self._git_remote_wsgi = self.remote_wsgi.GitRemoteWsgi()
244 244 uri = daemon.register(self._git_remote_wsgi,
245 245 objectId=settings.PYRO_GIT_REMOTE_WSGI)
246 246 log.info("Object registered = %s", uri)
247 247
248 248 self._hg_remote_wsgi = self.remote_wsgi.HgRemoteWsgi()
249 249 uri = daemon.register(self._hg_remote_wsgi,
250 250 objectId=settings.PYRO_HG_REMOTE_WSGI)
251 251 log.info("Object registered = %s", uri)
252 252
253 253 self._pyrodaemon = daemon
254 254
255 255
256 256 class VcsServerCommand(object):
257 257
258 258 usage = '%prog'
259 259 description = """
260 260 Runs the VCS server
261 261 """
262 262 default_verbosity = 1
263 263
264 264 parser = optparse.OptionParser(
265 265 usage,
266 266 description=textwrap.dedent(description)
267 267 )
268 268 parser.add_option(
269 269 '--host',
270 270 type="str",
271 271 dest="host",
272 272 )
273 273 parser.add_option(
274 274 '--port',
275 275 type="int",
276 276 dest="port"
277 277 )
278 278 parser.add_option(
279 279 '--running-file',
280 280 dest='running_file',
281 281 metavar='RUNNING_FILE',
282 282 help="Create a running file after the server is initalized with "
283 283 "stored PID of process"
284 284 )
285 285 parser.add_option(
286 286 '--locale',
287 287 dest='locale',
288 288 help="Allows to set the locale, e.g. en_US.UTF-8",
289 289 default=""
290 290 )
291 291 parser.add_option(
292 292 '--log-file',
293 293 dest='log_file',
294 294 metavar='LOG_FILE',
295 295 help="Save output to the given log file (redirects stdout)"
296 296 )
297 297 parser.add_option(
298 298 '--log-level',
299 299 dest="log_level",
300 300 metavar="LOG_LEVEL",
301 301 help="use LOG_LEVEL to set log level "
302 302 "(debug,info,warning,error,critical)"
303 303 )
304 304 parser.add_option(
305 305 '--threadpool',
306 306 dest='threadpool_size',
307 307 type='int',
308 308 help="Set the size of the threadpool used to communicate with the "
309 309 "WSGI workers. This should be at least 6 times the number of "
310 310 "WSGI worker processes."
311 311 )
312 312 parser.add_option(
313 313 '--timeout',
314 314 dest='timeout',
315 315 type='float',
316 316 help="Set the timeout for RPC communication in seconds."
317 317 )
318 318 parser.add_option(
319 319 '--config',
320 320 dest='config_file',
321 321 type='string',
322 322 help="Configuration file for vcsserver."
323 323 )
324 324
325 325 def __init__(self, argv, quiet=False):
326 326 self.options, self.args = self.parser.parse_args(argv[1:])
327 327 if quiet:
328 328 self.options.verbose = 0
329 329
330 330 def _get_file_config(self):
331 331 ini_conf = {}
332 332 conf = configobj.ConfigObj(self.options.config_file)
333 333 if 'DEFAULT' in conf:
334 334 ini_conf = conf['DEFAULT']
335 335
336 336 return ini_conf
337 337
338 338 def _show_config(self, vcsserver_config):
339 339 order = [
340 340 'config_file',
341 341 'host',
342 342 'port',
343 343 'log_file',
344 344 'log_level',
345 345 'locale',
346 346 'threadpool_size',
347 347 'timeout',
348 348 'cache_config',
349 349 ]
350 350
351 351 def sorter(k):
352 352 return dict([(y, x) for x, y in enumerate(order)]).get(k)
353 353
354 354 _config = []
355 355 for k in sorted(vcsserver_config.keys(), key=sorter):
356 356 v = vcsserver_config[k]
357 357 # construct padded key for display eg %-20s % = key: val
358 358 k_formatted = ('%-'+str(len(max(order, key=len))+1)+'s') % (k+':')
359 359 _config.append(' * %s %s' % (k_formatted, v))
360 360 log.info('\n[vcsserver configuration]:\n'+'\n'.join(_config))
361 361
362 362 def _get_vcsserver_configuration(self):
363 363 _defaults = {
364 364 'config_file': None,
365 365 'git_path': 'git',
366 366 'host': 'localhost',
367 367 'port': settings.PYRO_PORT,
368 368 'log_file': None,
369 369 'log_level': 'debug',
370 370 'locale': None,
371 371 'threadpool_size': 16,
372 372 'timeout': None,
373 373
374 374 # Development support
375 375 'dev.use_echo_app': False,
376 376
377 377 # caches, baker style config
378 378 'beaker.cache.regions': 'repo_object',
379 379 'beaker.cache.repo_object.expire': '10',
380 380 'beaker.cache.repo_object.type': 'memory',
381 381 }
382 382 config = {}
383 383 config.update(_defaults)
384 384 # overwrite defaults with one loaded from file
385 385 config.update(self._get_file_config())
386 386
387 387 # overwrite with self.option which has the top priority
388 388 for k, v in self.options.__dict__.items():
389 389 if v or v == 0:
390 390 config[k] = v
391 391
392 392 # clear all "extra" keys if they are somehow passed,
393 393 # we only want defaults, so any extra stuff from self.options is cleared
394 394 # except beaker stuff which needs to be dynamic
395 395 for k in [k for k in config.copy().keys() if not k.startswith('beaker.cache.')]:
396 396 if k not in _defaults:
397 397 del config[k]
398 398
399 399 # group together the cache into one key.
400 400 # Needed further for beaker lib configuration
401 401 _k = {}
402 402 for k in [k for k in config.copy() if k.startswith('beaker.cache.')]:
403 403 _k[k] = config.pop(k)
404 404 config['cache_config'] = _k
405 405
406 406 return config
407 407
408 408 def out(self, msg): # pragma: no cover
409 409 if self.options.verbose > 0:
410 410 print(msg)
411 411
412 412 def run(self): # pragma: no cover
413 413 vcsserver_config = self._get_vcsserver_configuration()
414 414
415 415 # Ensure the log file is writeable
416 416 if vcsserver_config['log_file']:
417 417 stdout_log = self._configure_logfile()
418 418 else:
419 419 stdout_log = None
420 420
421 421 # set PID file with running lock
422 422 if self.options.running_file:
423 423 global SERVER_RUNNING_FILE
424 424 SERVER_RUNNING_FILE = self.options.running_file
425 425
426 426 # configure logging, and logging based on configuration file
427 427 self._configure_logging(level=vcsserver_config['log_level'],
428 428 stream=stdout_log)
429 429 if self.options.config_file:
430 430 if not os.path.isfile(self.options.config_file):
431 431 raise OSError('File %s does not exist' %
432 432 self.options.config_file)
433 433
434 434 self._configure_file_logging(self.options.config_file)
435 435
436 436 self._configure_settings(vcsserver_config)
437 437
438 438 # display current configuration of vcsserver
439 439 self._show_config(vcsserver_config)
440 440
441 441 if not vcsserver_config['dev.use_echo_app']:
442 442 remote_wsgi_mod = remote_wsgi
443 443 else:
444 444 log.warning("Using EchoApp for VCS endpoints.")
445 445 remote_wsgi_mod = remote_wsgi_stub
446 446
447 447 app = Application(
448 448 host=vcsserver_config['host'],
449 449 port=vcsserver_config['port'],
450 450 locale=vcsserver_config['locale'],
451 451 threadpool_size=vcsserver_config['threadpool_size'],
452 452 timeout=vcsserver_config['timeout'],
453 453 cache_config=vcsserver_config['cache_config'],
454 454 remote_wsgi_=remote_wsgi_mod)
455 455 app.init()
456 456 app.run()
457 457
458 458 def _configure_logging(self, level, stream=None):
459 459 _format = (
460 460 '%(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s')
461 461 levels = {
462 462 'debug': logging.DEBUG,
463 463 'info': logging.INFO,
464 464 'warning': logging.WARNING,
465 465 'error': logging.ERROR,
466 466 'critical': logging.CRITICAL,
467 467 }
468 468 try:
469 469 level = levels[level]
470 470 except KeyError:
471 471 raise AttributeError(
472 472 'Invalid log level please use one of %s' % (levels.keys(),))
473 473 logging.basicConfig(format=_format, stream=stream, level=level)
474 474 logging.getLogger('Pyro4').setLevel(level)
475 475
476 476 def _configure_file_logging(self, config):
477 477 import logging.config
478 478 try:
479 479 logging.config.fileConfig(config)
480 480 except Exception as e:
481 481 log.warning('Failed to configure logging based on given '
482 482 'config file. Error: %s' % e)
483 483
484 484 def _configure_logfile(self):
485 485 try:
486 486 writeable_log_file = open(self.options.log_file, 'a')
487 487 except IOError as ioe:
488 488 msg = 'Error: Unable to write to log file: %s' % ioe
489 489 raise ValueError(msg)
490 490 writeable_log_file.close()
491 491 stdout_log = LazyWriter(self.options.log_file, 'a')
492 492 sys.stdout = stdout_log
493 493 sys.stderr = stdout_log
494 494 return stdout_log
495 495
496 496 def _configure_settings(self, config):
497 497 """
498 498 Configure the settings module based on the given `config`.
499 499 """
500 500 settings.GIT_EXECUTABLE = config['git_path']
501 501
502 502
503 503 def main(argv=sys.argv, quiet=False):
504 504 if MercurialFactory:
505 505 hgpatches.patch_largefiles_capabilities()
506 hgpatches.patch_subrepo_type_mapping()
506 507 command = VcsServerCommand(argv, quiet=quiet)
507 508 return command.run()
@@ -1,591 +1,627 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2016 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 from __future__ import absolute_import
19 19
20 20 from urllib2 import URLError
21 21 import logging
22 22 import posixpath as vcspath
23 23 import StringIO
24 24 import subprocess
25 25 import urllib
26 26
27 27 import svn.client
28 28 import svn.core
29 29 import svn.delta
30 30 import svn.diff
31 31 import svn.fs
32 32 import svn.repos
33 33
34 34 from vcsserver import svn_diff
35 from vcsserver import exceptions
35 36 from vcsserver.base import RepoFactory
36 37
37 38
38 39 log = logging.getLogger(__name__)
39 40
40 41
41 42 # Set of svn compatible version flags.
42 43 # Compare with subversion/svnadmin/svnadmin.c
43 44 svn_compatible_versions = set([
44 45 'pre-1.4-compatible',
45 46 'pre-1.5-compatible',
46 47 'pre-1.6-compatible',
47 48 'pre-1.8-compatible',
48 49 ])
49 50
50 51
52 def reraise_safe_exceptions(func):
53 """Decorator for converting svn exceptions to something neutral."""
54 def wrapper(*args, **kwargs):
55 try:
56 return func(*args, **kwargs)
57 except Exception as e:
58 if not hasattr(e, '_vcs_kind'):
59 log.exception("Unhandled exception in hg remote call")
60 raise_from_original(exceptions.UnhandledException)
61 raise
62 return wrapper
63
64
65 def raise_from_original(new_type):
66 """
67 Raise a new exception type with original args and traceback.
68 """
69 _, original, traceback = sys.exc_info()
70 try:
71 raise new_type(*original.args), None, traceback
72 finally:
73 del traceback
74
75
51 76 class SubversionFactory(RepoFactory):
52 77
53 78 def _create_repo(self, wire, create, compatible_version):
54 79 path = svn.core.svn_path_canonicalize(wire['path'])
55 80 if create:
56 81 fs_config = {}
57 82 if compatible_version:
58 83 if compatible_version not in svn_compatible_versions:
59 84 raise Exception('Unknown SVN compatible version "{}"'
60 85 .format(compatible_version))
61 86 log.debug('Create SVN repo with compatible version "%s"',
62 87 compatible_version)
63 88 fs_config[compatible_version] = '1'
64 89 repo = svn.repos.create(path, "", "", None, fs_config)
65 90 else:
66 91 repo = svn.repos.open(path)
67 92 return repo
68 93
69 94 def repo(self, wire, create=False, compatible_version=None):
70 95 def create_new_repo():
71 96 return self._create_repo(wire, create, compatible_version)
72 97
73 98 return self._repo(wire, create_new_repo)
74 99
75 100
76 101
77 102 NODE_TYPE_MAPPING = {
78 103 svn.core.svn_node_file: 'file',
79 104 svn.core.svn_node_dir: 'dir',
80 105 }
81 106
82 107
83 108 class SvnRemote(object):
84 109
85 110 def __init__(self, factory, hg_factory=None):
86 111 self._factory = factory
87 112 # TODO: Remove once we do not use internal Mercurial objects anymore
88 113 # for subversion
89 114 self._hg_factory = hg_factory
90 115
116 @reraise_safe_exceptions
117 def discover_svn_version(self):
118 try:
119 import svn.core
120 svn_ver = svn.core.SVN_VERSION
121 except ImportError:
122 svn_ver = None
123 return svn_ver
124
91 125 def check_url(self, url, config_items):
92 126 # this can throw exception if not installed, but we detect this
93 127 from hgsubversion import svnrepo
94 128
95 129 baseui = self._hg_factory._create_config(config_items)
96 130 # uuid function get's only valid UUID from proper repo, else
97 131 # throws exception
98 132 try:
99 133 svnrepo.svnremoterepo(baseui, url).svn.uuid
100 134 except:
101 135 log.debug("Invalid svn url: %s", url)
102 136 raise URLError(
103 137 '"%s" is not a valid Subversion source url.' % (url, ))
104 138 return True
105 139
106 140 def is_path_valid_repository(self, wire, path):
107 141 try:
108 142 svn.repos.open(path)
109 143 except svn.core.SubversionException:
110 144 log.debug("Invalid Subversion path %s", path)
111 145 return False
112 146 return True
113 147
114 148 def lookup(self, wire, revision):
115 149 if revision not in [-1, None, 'HEAD']:
116 150 raise NotImplementedError
117 151 repo = self._factory.repo(wire)
118 152 fs_ptr = svn.repos.fs(repo)
119 153 head = svn.fs.youngest_rev(fs_ptr)
120 154 return head
121 155
122 156 def lookup_interval(self, wire, start_ts, end_ts):
123 157 repo = self._factory.repo(wire)
124 158 fsobj = svn.repos.fs(repo)
125 159 start_rev = None
126 160 end_rev = None
127 161 if start_ts:
128 162 start_ts_svn = apr_time_t(start_ts)
129 163 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
130 164 else:
131 165 start_rev = 1
132 166 if end_ts:
133 167 end_ts_svn = apr_time_t(end_ts)
134 168 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
135 169 else:
136 170 end_rev = svn.fs.youngest_rev(fsobj)
137 171 return start_rev, end_rev
138 172
139 173 def revision_properties(self, wire, revision):
140 174 repo = self._factory.repo(wire)
141 175 fs_ptr = svn.repos.fs(repo)
142 176 return svn.fs.revision_proplist(fs_ptr, revision)
143 177
144 178 def revision_changes(self, wire, revision):
145 179
146 180 repo = self._factory.repo(wire)
147 181 fsobj = svn.repos.fs(repo)
148 182 rev_root = svn.fs.revision_root(fsobj, revision)
149 183
150 184 editor = svn.repos.ChangeCollector(fsobj, rev_root)
151 185 editor_ptr, editor_baton = svn.delta.make_editor(editor)
152 186 base_dir = ""
153 187 send_deltas = False
154 188 svn.repos.replay2(
155 189 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
156 190 editor_ptr, editor_baton, None)
157 191
158 192 added = []
159 193 changed = []
160 194 removed = []
161 195
162 196 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
163 197 for path, change in editor.changes.iteritems():
164 198 # TODO: Decide what to do with directory nodes. Subversion can add
165 199 # empty directories.
200
166 201 if change.item_kind == svn.core.svn_node_dir:
167 202 continue
168 if change.action == svn.repos.CHANGE_ACTION_ADD:
203 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
169 204 added.append(path)
170 elif change.action == svn.repos.CHANGE_ACTION_MODIFY:
205 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
206 svn.repos.CHANGE_ACTION_REPLACE]:
171 207 changed.append(path)
172 elif change.action == svn.repos.CHANGE_ACTION_DELETE:
208 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
173 209 removed.append(path)
174 210 else:
175 211 raise NotImplementedError(
176 212 "Action %s not supported on path %s" % (
177 213 change.action, path))
178 214
179 215 changes = {
180 216 'added': added,
181 217 'changed': changed,
182 218 'removed': removed,
183 219 }
184 220 return changes
185 221
186 222 def node_history(self, wire, path, revision, limit):
187 223 cross_copies = False
188 224 repo = self._factory.repo(wire)
189 225 fsobj = svn.repos.fs(repo)
190 226 rev_root = svn.fs.revision_root(fsobj, revision)
191 227
192 228 history_revisions = []
193 229 history = svn.fs.node_history(rev_root, path)
194 230 history = svn.fs.history_prev(history, cross_copies)
195 231 while history:
196 232 __, node_revision = svn.fs.history_location(history)
197 233 history_revisions.append(node_revision)
198 234 if limit and len(history_revisions) >= limit:
199 235 break
200 236 history = svn.fs.history_prev(history, cross_copies)
201 237 return history_revisions
202 238
203 239 def node_properties(self, wire, path, revision):
204 240 repo = self._factory.repo(wire)
205 241 fsobj = svn.repos.fs(repo)
206 242 rev_root = svn.fs.revision_root(fsobj, revision)
207 243 return svn.fs.node_proplist(rev_root, path)
208 244
209 245 def file_annotate(self, wire, path, revision):
210 246 abs_path = 'file://' + urllib.pathname2url(
211 247 vcspath.join(wire['path'], path))
212 248 file_uri = svn.core.svn_path_canonicalize(abs_path)
213 249
214 250 start_rev = svn_opt_revision_value_t(0)
215 251 peg_rev = svn_opt_revision_value_t(revision)
216 252 end_rev = peg_rev
217 253
218 254 annotations = []
219 255
220 256 def receiver(line_no, revision, author, date, line, pool):
221 257 annotations.append((line_no, revision, line))
222 258
223 259 # TODO: Cannot use blame5, missing typemap function in the swig code
224 260 try:
225 261 svn.client.blame2(
226 262 file_uri, peg_rev, start_rev, end_rev,
227 263 receiver, svn.client.create_context())
228 264 except svn.core.SubversionException as exc:
229 265 log.exception("Error during blame operation.")
230 266 raise Exception(
231 267 "Blame not supported or file does not exist at path %s. "
232 268 "Error %s." % (path, exc))
233 269
234 270 return annotations
235 271
236 272 def get_node_type(self, wire, path, rev=None):
237 273 repo = self._factory.repo(wire)
238 274 fs_ptr = svn.repos.fs(repo)
239 275 if rev is None:
240 276 rev = svn.fs.youngest_rev(fs_ptr)
241 277 root = svn.fs.revision_root(fs_ptr, rev)
242 278 node = svn.fs.check_path(root, path)
243 279 return NODE_TYPE_MAPPING.get(node, None)
244 280
245 281 def get_nodes(self, wire, path, revision=None):
246 282 repo = self._factory.repo(wire)
247 283 fsobj = svn.repos.fs(repo)
248 284 if revision is None:
249 285 revision = svn.fs.youngest_rev(fsobj)
250 286 root = svn.fs.revision_root(fsobj, revision)
251 287 entries = svn.fs.dir_entries(root, path)
252 288 result = []
253 289 for entry_path, entry_info in entries.iteritems():
254 290 result.append(
255 291 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
256 292 return result
257 293
258 294 def get_file_content(self, wire, path, rev=None):
259 295 repo = self._factory.repo(wire)
260 296 fsobj = svn.repos.fs(repo)
261 297 if rev is None:
262 298 rev = svn.fs.youngest_revision(fsobj)
263 299 root = svn.fs.revision_root(fsobj, rev)
264 300 content = svn.core.Stream(svn.fs.file_contents(root, path))
265 301 return content.read()
266 302
267 303 def get_file_size(self, wire, path, revision=None):
268 304 repo = self._factory.repo(wire)
269 305 fsobj = svn.repos.fs(repo)
270 306 if revision is None:
271 307 revision = svn.fs.youngest_revision(fsobj)
272 308 root = svn.fs.revision_root(fsobj, revision)
273 309 size = svn.fs.file_length(root, path)
274 310 return size
275 311
276 312 def create_repository(self, wire, compatible_version=None):
277 313 log.info('Creating Subversion repository in path "%s"', wire['path'])
278 314 self._factory.repo(wire, create=True,
279 315 compatible_version=compatible_version)
280 316
281 317 def import_remote_repository(self, wire, src_url):
282 318 repo_path = wire['path']
283 319 if not self.is_path_valid_repository(wire, repo_path):
284 320 raise Exception(
285 321 "Path %s is not a valid Subversion repository." % repo_path)
286 322 # TODO: johbo: URL checks ?
287 323 rdump = subprocess.Popen(
288 324 ['svnrdump', 'dump', '--non-interactive', src_url],
289 325 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
290 326 load = subprocess.Popen(
291 327 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
292 328
293 329 # TODO: johbo: This can be a very long operation, might be better
294 330 # to track some kind of status and provide an api to check if the
295 331 # import is done.
296 332 rdump.wait()
297 333 load.wait()
298 334
299 335 if rdump.returncode != 0:
300 336 errors = rdump.stderr.read()
301 337 log.error('svnrdump dump failed: statuscode %s: message: %s',
302 338 rdump.returncode, errors)
303 339 reason = 'UNKNOWN'
304 340 if 'svnrdump: E230001:' in errors:
305 341 reason = 'INVALID_CERTIFICATE'
306 342 raise Exception(
307 343 'Failed to dump the remote repository from %s.' % src_url,
308 344 reason)
309 345 if load.returncode != 0:
310 346 raise Exception(
311 347 'Failed to load the dump of remote repository from %s.' %
312 348 (src_url, ))
313 349
314 350 def commit(self, wire, message, author, timestamp, updated, removed):
315 351 assert isinstance(message, str)
316 352 assert isinstance(author, str)
317 353
318 354 repo = self._factory.repo(wire)
319 355 fsobj = svn.repos.fs(repo)
320 356
321 357 rev = svn.fs.youngest_rev(fsobj)
322 358 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
323 359 txn_root = svn.fs.txn_root(txn)
324 360
325 361 for node in updated:
326 362 TxnNodeProcessor(node, txn_root).update()
327 363 for node in removed:
328 364 TxnNodeProcessor(node, txn_root).remove()
329 365
330 366 commit_id = svn.repos.fs_commit_txn(repo, txn)
331 367
332 368 if timestamp:
333 369 apr_time = apr_time_t(timestamp)
334 370 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
335 371 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
336 372
337 373 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
338 374 return commit_id
339 375
340 376 def diff(self, wire, rev1, rev2, path1=None, path2=None,
341 377 ignore_whitespace=False, context=3):
342 378 wire.update(cache=False)
343 379 repo = self._factory.repo(wire)
344 380 diff_creator = SvnDiffer(
345 381 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
346 382 return diff_creator.generate_diff()
347 383
348 384
349 385 class SvnDiffer(object):
350 386 """
351 387 Utility to create diffs based on difflib and the Subversion api
352 388 """
353 389
354 390 binary_content = False
355 391
356 392 def __init__(
357 393 self, repo, src_rev, src_path, tgt_rev, tgt_path,
358 394 ignore_whitespace, context):
359 395 self.repo = repo
360 396 self.ignore_whitespace = ignore_whitespace
361 397 self.context = context
362 398
363 399 fsobj = svn.repos.fs(repo)
364 400
365 401 self.tgt_rev = tgt_rev
366 402 self.tgt_path = tgt_path or ''
367 403 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
368 404 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
369 405
370 406 self.src_rev = src_rev
371 407 self.src_path = src_path or self.tgt_path
372 408 self.src_root = svn.fs.revision_root(fsobj, src_rev)
373 409 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
374 410
375 411 self._validate()
376 412
377 413 def _validate(self):
378 414 if (self.tgt_kind != svn.core.svn_node_none and
379 415 self.src_kind != svn.core.svn_node_none and
380 416 self.src_kind != self.tgt_kind):
381 417 # TODO: johbo: proper error handling
382 418 raise Exception(
383 419 "Source and target are not compatible for diff generation. "
384 420 "Source type: %s, target type: %s" %
385 421 (self.src_kind, self.tgt_kind))
386 422
387 423 def generate_diff(self):
388 424 buf = StringIO.StringIO()
389 425 if self.tgt_kind == svn.core.svn_node_dir:
390 426 self._generate_dir_diff(buf)
391 427 else:
392 428 self._generate_file_diff(buf)
393 429 return buf.getvalue()
394 430
395 431 def _generate_dir_diff(self, buf):
396 432 editor = DiffChangeEditor()
397 433 editor_ptr, editor_baton = svn.delta.make_editor(editor)
398 434 svn.repos.dir_delta2(
399 435 self.src_root,
400 436 self.src_path,
401 437 '', # src_entry
402 438 self.tgt_root,
403 439 self.tgt_path,
404 440 editor_ptr, editor_baton,
405 441 authorization_callback_allow_all,
406 442 False, # text_deltas
407 443 svn.core.svn_depth_infinity, # depth
408 444 False, # entry_props
409 445 False, # ignore_ancestry
410 446 )
411 447
412 448 for path, __, change in sorted(editor.changes):
413 449 self._generate_node_diff(
414 450 buf, change, path, self.tgt_path, path, self.src_path)
415 451
416 452 def _generate_file_diff(self, buf):
417 453 change = None
418 454 if self.src_kind == svn.core.svn_node_none:
419 455 change = "add"
420 456 elif self.tgt_kind == svn.core.svn_node_none:
421 457 change = "delete"
422 458 tgt_base, tgt_path = vcspath.split(self.tgt_path)
423 459 src_base, src_path = vcspath.split(self.src_path)
424 460 self._generate_node_diff(
425 461 buf, change, tgt_path, tgt_base, src_path, src_base)
426 462
427 463 def _generate_node_diff(
428 464 self, buf, change, tgt_path, tgt_base, src_path, src_base):
429 465 tgt_full_path = vcspath.join(tgt_base, tgt_path)
430 466 src_full_path = vcspath.join(src_base, src_path)
431 467
432 468 self.binary_content = False
433 469 mime_type = self._get_mime_type(tgt_full_path)
434 470 if mime_type and not mime_type.startswith('text'):
435 471 self.binary_content = True
436 472 buf.write("=" * 67 + '\n')
437 473 buf.write("Cannot display: file marked as a binary type.\n")
438 474 buf.write("svn:mime-type = %s\n" % mime_type)
439 475 buf.write("Index: %s\n" % (tgt_path, ))
440 476 buf.write("=" * 67 + '\n')
441 477 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
442 478 'tgt_path': tgt_path})
443 479
444 480 if change == 'add':
445 481 # TODO: johbo: SVN is missing a zero here compared to git
446 482 buf.write("new file mode 10644\n")
447 483 buf.write("--- /dev/null\t(revision 0)\n")
448 484 src_lines = []
449 485 else:
450 486 if change == 'delete':
451 487 buf.write("deleted file mode 10644\n")
452 488 buf.write("--- a/%s\t(revision %s)\n" % (
453 489 src_path, self.src_rev))
454 490 src_lines = self._svn_readlines(self.src_root, src_full_path)
455 491
456 492 if change == 'delete':
457 493 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
458 494 tgt_lines = []
459 495 else:
460 496 buf.write("+++ b/%s\t(revision %s)\n" % (
461 497 tgt_path, self.tgt_rev))
462 498 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
463 499
464 500 if not self.binary_content:
465 501 udiff = svn_diff.unified_diff(
466 502 src_lines, tgt_lines, context=self.context,
467 503 ignore_blank_lines=self.ignore_whitespace,
468 504 ignore_case=False,
469 505 ignore_space_changes=self.ignore_whitespace)
470 506 buf.writelines(udiff)
471 507
472 508 def _get_mime_type(self, path):
473 509 try:
474 510 mime_type = svn.fs.node_prop(
475 511 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
476 512 except svn.core.SubversionException:
477 513 mime_type = svn.fs.node_prop(
478 514 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
479 515 return mime_type
480 516
481 517 def _svn_readlines(self, fs_root, node_path):
482 518 if self.binary_content:
483 519 return []
484 520 node_kind = svn.fs.check_path(fs_root, node_path)
485 521 if node_kind not in (
486 522 svn.core.svn_node_file, svn.core.svn_node_symlink):
487 523 return []
488 524 content = svn.core.Stream(
489 525 svn.fs.file_contents(fs_root, node_path)).read()
490 526 return content.splitlines(True)
491 527
492 528
493 529 class DiffChangeEditor(svn.delta.Editor):
494 530 """
495 531 Records changes between two given revisions
496 532 """
497 533
498 534 def __init__(self):
499 535 self.changes = []
500 536
501 537 def delete_entry(self, path, revision, parent_baton, pool=None):
502 538 self.changes.append((path, None, 'delete'))
503 539
504 540 def add_file(
505 541 self, path, parent_baton, copyfrom_path, copyfrom_revision,
506 542 file_pool=None):
507 543 self.changes.append((path, 'file', 'add'))
508 544
509 545 def open_file(self, path, parent_baton, base_revision, file_pool=None):
510 546 self.changes.append((path, 'file', 'change'))
511 547
512 548
513 549 def authorization_callback_allow_all(root, path, pool):
514 550 return True
515 551
516 552
517 553 class TxnNodeProcessor(object):
518 554 """
519 555 Utility to process the change of one node within a transaction root.
520 556
521 557 It encapsulates the knowledge of how to add, update or remove
522 558 a node for a given transaction root. The purpose is to support the method
523 559 `SvnRemote.commit`.
524 560 """
525 561
526 562 def __init__(self, node, txn_root):
527 563 assert isinstance(node['path'], str)
528 564
529 565 self.node = node
530 566 self.txn_root = txn_root
531 567
532 568 def update(self):
533 569 self._ensure_parent_dirs()
534 570 self._add_file_if_node_does_not_exist()
535 571 self._update_file_content()
536 572 self._update_file_properties()
537 573
538 574 def remove(self):
539 575 svn.fs.delete(self.txn_root, self.node['path'])
540 576 # TODO: Clean up directory if empty
541 577
542 578 def _ensure_parent_dirs(self):
543 579 curdir = vcspath.dirname(self.node['path'])
544 580 dirs_to_create = []
545 581 while not self._svn_path_exists(curdir):
546 582 dirs_to_create.append(curdir)
547 583 curdir = vcspath.dirname(curdir)
548 584
549 585 for curdir in reversed(dirs_to_create):
550 586 log.debug('Creating missing directory "%s"', curdir)
551 587 svn.fs.make_dir(self.txn_root, curdir)
552 588
553 589 def _svn_path_exists(self, path):
554 590 path_status = svn.fs.check_path(self.txn_root, path)
555 591 return path_status != svn.core.svn_node_none
556 592
557 593 def _add_file_if_node_does_not_exist(self):
558 594 kind = svn.fs.check_path(self.txn_root, self.node['path'])
559 595 if kind == svn.core.svn_node_none:
560 596 svn.fs.make_file(self.txn_root, self.node['path'])
561 597
562 598 def _update_file_content(self):
563 599 assert isinstance(self.node['content'], str)
564 600 handler, baton = svn.fs.apply_textdelta(
565 601 self.txn_root, self.node['path'], None, None)
566 602 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
567 603
568 604 def _update_file_properties(self):
569 605 properties = self.node.get('properties', {})
570 606 for key, value in properties.iteritems():
571 607 svn.fs.change_node_prop(
572 608 self.txn_root, self.node['path'], key, value)
573 609
574 610
575 611 def apr_time_t(timestamp):
576 612 """
577 613 Convert a Python timestamp into APR timestamp type apr_time_t
578 614 """
579 615 return timestamp * 1E6
580 616
581 617
582 618 def svn_opt_revision_value_t(num):
583 619 """
584 620 Put `num` into a `svn_opt_revision_value_t` structure.
585 621 """
586 622 value = svn.core.svn_opt_revision_value_t()
587 623 value.number = num
588 624 revision = svn.core.svn_opt_revision_t()
589 625 revision.kind = svn.core.svn_opt_revision_number
590 626 revision.value = value
591 627 return revision
General Comments 0
You need to be logged in to leave comments. Login now