##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r108:a1a58274 merge stable
parent child Browse files
Show More
@@ -0,0 +1,63 b''
1 diff -rup subversion-1.9.4-orig/subversion/include/svn_auth.h subversion-1.9.4/subversion/include/svn_auth.h
2 --- subversion-1.9.4-orig/subversion/include/svn_auth.h 2015-02-13 12:17:40.000000000 +0100
3 +++ subversion-1.9.4/subversion/include/svn_auth.h 2016-09-21 12:55:27.000000000 +0200
4 @@ -943,7 +943,7 @@ svn_auth_get_windows_ssl_server_trust_pr
5
6 #endif /* WIN32 && !__MINGW32__ || DOXYGEN */
7
8 -#if defined(DARWIN) || defined(DOXYGEN)
9 +#if defined(SVN_HAVE_KEYCHAIN_SERVICES) || defined(DOXYGEN)
10 /**
11 * Set @a *provider to an authentication provider of type @c
12 * svn_auth_cred_simple_t that gets/sets information from the user's
13 @@ -984,7 +984,7 @@ void
14 svn_auth_get_keychain_ssl_client_cert_pw_provider(
15 svn_auth_provider_object_t **provider,
16 apr_pool_t *pool);
17 -#endif /* DARWIN || DOXYGEN */
18 +#endif /* SVN_HAVE_KEYCHAIN_SERVICES || DOXYGEN */
19
20 /* Note that the gnome keyring unlock prompt related items below must be
21 * declared for all platforms in order to allow SWIG interfaces to be
22 diff -rup subversion-1.9.4-orig/subversion/libsvn_subr/auth.h subversion-1.9.4/subversion/libsvn_subr/auth.h
23 --- subversion-1.9.4-orig/subversion/libsvn_subr/auth.h 2015-08-27 06:00:31.000000000 +0200
24 +++ subversion-1.9.4/subversion/libsvn_subr/auth.h 2016-09-21 12:56:20.000000000 +0200
25 @@ -103,7 +103,7 @@ svn_auth__get_windows_ssl_server_trust_p
26 apr_pool_t *pool);
27 #endif /* WIN32 && !__MINGW32__ || DOXYGEN */
28
29 -#if defined(DARWIN) || defined(DOXYGEN)
30 +#if defined(SVN_HAVE_KEYCHAIN_SERVICES) || defined(DOXYGEN)
31 /**
32 * Set @a *provider to an authentication provider of type @c
33 * svn_auth_cred_simple_t that gets/sets information from the user's
34 @@ -134,7 +134,7 @@ void
35 svn_auth__get_keychain_ssl_client_cert_pw_provider(
36 svn_auth_provider_object_t **provider,
37 apr_pool_t *pool);
38 -#endif /* DARWIN || DOXYGEN */
39 +#endif /* SVN_HAVE_KEYCHAIN_SERVICES || DOXYGEN */
40
41 #if !defined(WIN32) || defined(DOXYGEN)
42 /**
43 diff -rup subversion-1.9.4-orig/subversion/libsvn_subr/deprecated.c subversion-1.9.4/subversion/libsvn_subr/deprecated.c
44 --- subversion-1.9.4-orig/subversion/libsvn_subr/deprecated.c 2015-08-27 06:00:31.000000000 +0200
45 +++ subversion-1.9.4/subversion/libsvn_subr/deprecated.c 2016-09-21 12:57:08.000000000 +0200
46 @@ -1479,7 +1479,7 @@ svn_auth_get_windows_ssl_server_trust_pr
47 #endif /* WIN32 && !__MINGW32__ */
48
49 /*** From macos_keychain.c ***/
50 -#if defined(DARWIN)
51 +#if defined(SVN_HAVE_KEYCHAIN_SERVICES)
52 void
53 svn_auth_get_keychain_simple_provider(svn_auth_provider_object_t **provider,
54 apr_pool_t *pool)
55 @@ -1494,7 +1494,7 @@ svn_auth_get_keychain_ssl_client_cert_pw
56 {
57 svn_auth__get_keychain_ssl_client_cert_pw_provider(provider, pool);
58 }
59 -#endif /* DARWIN */
60 +#endif /* SVN_HAVE_KEYCHAIN_SERVICES */
61
62 #if !defined(WIN32)
63 void
@@ -1,6 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.4.2
2 current_version = 4.5.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
6
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.4.2
13 version = 4.5.0
16
14
@@ -1,111 +1,111 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
3 # #
4 ################################################################################
4 ################################################################################
5
5
6 [app:main]
7 use = egg:rhodecode-vcsserver
8
9 pyramid.default_locale_name = en
10 pyramid.includes =
11
12 # default locale used by VCS systems
13 locale = en_US.UTF-8
14
15 # cache regions, please don't change
16 beaker.cache.regions = repo_object
17 beaker.cache.repo_object.type = memorylru
18 beaker.cache.repo_object.max_items = 100
19 # cache auto-expires after N seconds
20 beaker.cache.repo_object.expire = 300
21 beaker.cache.repo_object.enabled = true
22
6
23 [server:main]
7 [server:main]
24 ## COMMON ##
8 ## COMMON ##
25 host = 127.0.0.1
9 host = 127.0.0.1
26 port = 9900
10 port = 9900
27
11
28
12
29 ##########################
13 ##########################
30 ## GUNICORN WSGI SERVER ##
14 ## GUNICORN WSGI SERVER ##
31 ##########################
15 ##########################
32 ## run with gunicorn --log-config <inifile.ini> --paste <inifile.ini>
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
33 use = egg:gunicorn#main
17 use = egg:gunicorn#main
34 ## Sets the number of process workers. You must set `instance_id = *`
18 ## Sets the number of process workers. You must set `instance_id = *`
35 ## when this option is set to more than one worker, recommended
19 ## when this option is set to more than one worker, recommended
36 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
37 ## The `instance_id = *` must be set in the [app:main] section below
21 ## The `instance_id = *` must be set in the [app:main] section below
38 workers = 2
22 workers = 2
39 ## process name
23 ## process name
40 proc_name = rhodecode_vcsserver
24 proc_name = rhodecode_vcsserver
41 ## type of worker class, one of sync, gevent
25 ## type of worker class, one of sync, gevent
42 ## recommended for bigger setup is using of of other than sync one
26 ## recommended for bigger setup is using of of other than sync one
43 worker_class = sync
27 worker_class = sync
44 ## The maximum number of simultaneous clients. Valid only for Gevent
28 ## The maximum number of simultaneous clients. Valid only for Gevent
45 #worker_connections = 10
29 #worker_connections = 10
46 ## max number of requests that worker will handle before being gracefully
30 ## max number of requests that worker will handle before being gracefully
47 ## restarted, could prevent memory leaks
31 ## restarted, could prevent memory leaks
48 max_requests = 1000
32 max_requests = 1000
49 max_requests_jitter = 30
33 max_requests_jitter = 30
50 ## amount of time a worker can spend with handling a request before it
34 ## amount of time a worker can spend with handling a request before it
51 ## gets killed and restarted. Set to 6hrs
35 ## gets killed and restarted. Set to 6hrs
52 timeout = 21600
36 timeout = 21600
53
37
54
38
39 [app:main]
40 use = egg:rhodecode-vcsserver
41
42 pyramid.default_locale_name = en
43 pyramid.includes =
44
45 ## default locale used by VCS systems
46 locale = en_US.UTF-8
47
48 # cache regions, please don't change
49 beaker.cache.regions = repo_object
50 beaker.cache.repo_object.type = memorylru
51 beaker.cache.repo_object.max_items = 100
52 # cache auto-expires after N seconds
53 beaker.cache.repo_object.expire = 300
54 beaker.cache.repo_object.enabled = true
55
55
56
56
57 ################################
57 ################################
58 ### LOGGING CONFIGURATION ####
58 ### LOGGING CONFIGURATION ####
59 ################################
59 ################################
60 [loggers]
60 [loggers]
61 keys = root, vcsserver, pyro4, beaker
61 keys = root, vcsserver, pyro4, beaker
62
62
63 [handlers]
63 [handlers]
64 keys = console
64 keys = console
65
65
66 [formatters]
66 [formatters]
67 keys = generic
67 keys = generic
68
68
69 #############
69 #############
70 ## LOGGERS ##
70 ## LOGGERS ##
71 #############
71 #############
72 [logger_root]
72 [logger_root]
73 level = NOTSET
73 level = NOTSET
74 handlers = console
74 handlers = console
75
75
76 [logger_vcsserver]
76 [logger_vcsserver]
77 level = DEBUG
77 level = DEBUG
78 handlers =
78 handlers =
79 qualname = vcsserver
79 qualname = vcsserver
80 propagate = 1
80 propagate = 1
81
81
82 [logger_beaker]
82 [logger_beaker]
83 level = DEBUG
83 level = DEBUG
84 handlers =
84 handlers =
85 qualname = beaker
85 qualname = beaker
86 propagate = 1
86 propagate = 1
87
87
88 [logger_pyro4]
88 [logger_pyro4]
89 level = DEBUG
89 level = DEBUG
90 handlers =
90 handlers =
91 qualname = Pyro4
91 qualname = Pyro4
92 propagate = 1
92 propagate = 1
93
93
94
94
95 ##############
95 ##############
96 ## HANDLERS ##
96 ## HANDLERS ##
97 ##############
97 ##############
98
98
99 [handler_console]
99 [handler_console]
100 class = StreamHandler
100 class = StreamHandler
101 args = (sys.stderr,)
101 args = (sys.stderr,)
102 level = DEBUG
102 level = DEBUG
103 formatter = generic
103 formatter = generic
104
104
105 ################
105 ################
106 ## FORMATTERS ##
106 ## FORMATTERS ##
107 ################
107 ################
108
108
109 [formatter_generic]
109 [formatter_generic]
110 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
110 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
111 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
111 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,146 +1,138 b''
1 # Nix environment for the community edition
1 # Nix environment for the community edition
2 #
2 #
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6
6
7 { pkgs ? (import <nixpkgs> {})
7 { pkgs ? (import <nixpkgs> {})
8 , pythonPackages ? "python27Packages"
8 , pythonPackages ? "python27Packages"
9 , pythonExternalOverrides ? self: super: {}
9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? true
10 , doCheck ? true
11 }:
11 }:
12
12
13 let pkgs_ = pkgs; in
13 let pkgs_ = pkgs; in
14
14
15 let
15 let
16 pkgs = pkgs_.overridePackages (self: super: {
16 pkgs = pkgs_.overridePackages (self: super: {
17 # Override subversion derivation to
17 # Override subversion derivation to
18 # - activate python bindings
18 # - activate python bindings
19 # - set version to 1.8
19 subversion = let
20 subversion = super.subversion18.override {
20 subversionWithPython = super.subversion.override {
21 httpSupport = true;
21 httpSupport = true;
22 pythonBindings = true;
22 pythonBindings = true;
23 python = self.python27Packages.python;
23 python = self.python27Packages.python;
24 };
24 };
25 in pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
26 patches = (oldAttrs.patches or []) ++
27 pkgs.lib.optionals pkgs.stdenv.isDarwin [
28 # johbo: "import svn.client" fails on darwin currently.
29 ./pkgs/subversion-1.9.4-darwin.patch
30 ];
31 });
25 });
32 });
26
33
27 inherit (pkgs.lib) fix extends;
34 inherit (pkgs.lib) fix extends;
28
35
29 basePythonPackages = with builtins; if isAttrs pythonPackages
36 basePythonPackages = with builtins; if isAttrs pythonPackages
30 then pythonPackages
37 then pythonPackages
31 else getAttr pythonPackages pkgs;
38 else getAttr pythonPackages pkgs;
32
39
33 elem = builtins.elem;
40 elem = builtins.elem;
34 basename = path: with pkgs.lib; last (splitString "/" path);
41 basename = path: with pkgs.lib; last (splitString "/" path);
35 startsWith = prefix: full: let
42 startsWith = prefix: full: let
36 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
43 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
37 in actualPrefix == prefix;
44 in actualPrefix == prefix;
38
45
39 src-filter = path: type: with pkgs.lib;
46 src-filter = path: type: with pkgs.lib;
40 let
47 let
41 ext = last (splitString "." path);
48 ext = last (splitString "." path);
42 in
49 in
43 !elem (basename path) [
50 !elem (basename path) [
44 ".git" ".hg" "__pycache__" ".eggs" "node_modules"
51 ".git" ".hg" "__pycache__" ".eggs" "node_modules"
45 "build" "data" "tmp"] &&
52 "build" "data" "tmp"] &&
46 !elem ext ["egg-info" "pyc"] &&
53 !elem ext ["egg-info" "pyc"] &&
47 !startsWith "result" path;
54 !startsWith "result" path;
48
55
49 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
56 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
50
57
51 pythonGeneratedPackages = self: basePythonPackages.override (a: {
58 pythonGeneratedPackages = self: basePythonPackages.override (a: {
52 inherit self;
59 inherit self;
53 })
60 })
54 // (scopedImport {
61 // (scopedImport {
55 self = self;
62 self = self;
56 super = basePythonPackages;
63 super = basePythonPackages;
57 inherit pkgs;
64 inherit pkgs;
58 inherit (pkgs) fetchurl fetchgit;
65 inherit (pkgs) fetchurl fetchgit;
59 } ./pkgs/python-packages.nix);
66 } ./pkgs/python-packages.nix);
60
67
61 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
68 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
62 inherit
69 inherit
63 basePythonPackages
70 basePythonPackages
64 pkgs;
71 pkgs;
65 };
72 };
66
73
67 version = builtins.readFile ./vcsserver/VERSION;
74 version = builtins.readFile ./vcsserver/VERSION;
68
75
69 pythonLocalOverrides = self: super: {
76 pythonLocalOverrides = self: super: {
70 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
77 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
71 inherit
78 inherit
72 doCheck
79 doCheck
73 version;
80 version;
74 name = "rhodecode-vcsserver-${version}";
81 name = "rhodecode-vcsserver-${version}";
75 releaseName = "RhodeCodeVCSServer-${version}";
82 releaseName = "RhodeCodeVCSServer-${version}";
76 src = rhodecode-vcsserver-src;
83 src = rhodecode-vcsserver-src;
77
84
78 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
85 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
79 pkgs.git
86 pkgs.git
80 pkgs.subversion
87 pkgs.subversion
81 ]);
88 ]);
82
89
83 # TODO: johbo: Make a nicer way to expose the parts. Maybe
90 # TODO: johbo: Make a nicer way to expose the parts. Maybe
84 # pkgs/default.nix?
91 # pkgs/default.nix?
85 passthru = {
92 passthru = {
86 pythonPackages = self;
93 pythonPackages = self;
87 };
94 };
88
95
89 # Somewhat snappier setup of the development environment
90 # TODO: move into shell.nix
91 # TODO: think of supporting a stable path again, so that multiple shells
92 # can share it.
93 shellHook = ''
94 # Set locale
95 export LC_ALL="en_US.UTF-8"
96
97 tmp_path=$(mktemp -d)
98 export PATH="$tmp_path/bin:$PATH"
99 export PYTHONPATH="$tmp_path/${self.python.sitePackages}:$PYTHONPATH"
100 mkdir -p $tmp_path/${self.python.sitePackages}
101 python setup.py develop --prefix $tmp_path --allow-hosts ""
102 '';
103
104 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
96 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
105 preCheck = ''
97 preCheck = ''
106 export PATH="$out/bin:$PATH"
98 export PATH="$out/bin:$PATH"
107 '';
99 '';
108
100
109 postInstall = ''
101 postInstall = ''
110 echo "Writing meta information for rccontrol to nix-support/rccontrol"
102 echo "Writing meta information for rccontrol to nix-support/rccontrol"
111 mkdir -p $out/nix-support/rccontrol
103 mkdir -p $out/nix-support/rccontrol
112 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
104 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
113 echo "DONE: Meta information for rccontrol written"
105 echo "DONE: Meta information for rccontrol written"
114
106
115 ln -s ${self.pyramid}/bin/* $out/bin/
107 ln -s ${self.pyramid}/bin/* $out/bin/
116 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
108 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
117
109
118 # Symlink version control utilities
110 # Symlink version control utilities
119 #
111 #
120 # We ensure that always the correct version is available as a symlink.
112 # We ensure that always the correct version is available as a symlink.
121 # So that users calling them via the profile path will always use the
113 # So that users calling them via the profile path will always use the
122 # correct version.
114 # correct version.
123 ln -s ${pkgs.git}/bin/git $out/bin
115 ln -s ${pkgs.git}/bin/git $out/bin
124 ln -s ${self.mercurial}/bin/hg $out/bin
116 ln -s ${self.mercurial}/bin/hg $out/bin
125 ln -s ${pkgs.subversion}/bin/svn* $out/bin
117 ln -s ${pkgs.subversion}/bin/svn* $out/bin
126
118
127 for file in $out/bin/*; do
119 for file in $out/bin/*; do
128 wrapProgram $file \
120 wrapProgram $file \
129 --set PATH $PATH \
121 --set PATH $PATH \
130 --set PYTHONPATH $PYTHONPATH \
122 --set PYTHONPATH $PYTHONPATH \
131 --set PYTHONHASHSEED random
123 --set PYTHONHASHSEED random
132 done
124 done
133 '';
125 '';
134
126
135 });
127 });
136 };
128 };
137
129
138 # Apply all overrides and fix the final package set
130 # Apply all overrides and fix the final package set
139 myPythonPackages =
131 myPythonPackages =
140 (fix
132 (fix
141 (extends pythonExternalOverrides
133 (extends pythonExternalOverrides
142 (extends pythonLocalOverrides
134 (extends pythonLocalOverrides
143 (extends pythonOverrides
135 (extends pythonOverrides
144 pythonGeneratedPackages))));
136 pythonGeneratedPackages))));
145
137
146 in myPythonPackages.rhodecode-vcsserver
138 in myPythonPackages.rhodecode-vcsserver
@@ -1,56 +1,57 b''
1 # Overrides for the generated python-packages.nix
1 # Overrides for the generated python-packages.nix
2 #
2 #
3 # This function is intended to be used as an extension to the generated file
3 # This function is intended to be used as an extension to the generated file
4 # python-packages.nix. The main objective is to add needed dependencies of C
4 # python-packages.nix. The main objective is to add needed dependencies of C
5 # libraries and tweak the build instructions where needed.
5 # libraries and tweak the build instructions where needed.
6
6
7 { pkgs, basePythonPackages }:
7 { pkgs, basePythonPackages }:
8
8
9 let
9 let
10 sed = "sed -i";
10 sed = "sed -i";
11 in
11 in
12
12
13 self: super: {
13 self: super: {
14
14
15 subvertpy = super.subvertpy.override (attrs: {
15 subvertpy = super.subvertpy.override (attrs: {
16 SVN_PREFIX = "${pkgs.subversion}";
16 # TODO: johbo: Remove the "or" once we drop 16.03 support
17 SVN_PREFIX = "${pkgs.subversion.dev or pkgs.subversion}";
17 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
18 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
18 pkgs.aprutil
19 pkgs.aprutil
19 pkgs.subversion
20 pkgs.subversion
20 ];
21 ];
21 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
22 preBuild = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
22 ${sed} -e "s/'gcc'/'clang'/" setup.py
23 ${sed} -e "s/'gcc'/'clang'/" setup.py
23 '';
24 '';
24 });
25 });
25
26
26 mercurial = super.mercurial.override (attrs: {
27 mercurial = super.mercurial.override (attrs: {
27 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
28 propagatedBuildInputs = attrs.propagatedBuildInputs ++ [
28 self.python.modules.curses
29 self.python.modules.curses
29 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
30 ] ++ pkgs.lib.optional pkgs.stdenv.isDarwin
30 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
31 pkgs.darwin.apple_sdk.frameworks.ApplicationServices;
31 });
32 });
32
33
33 pyramid = super.pyramid.override (attrs: {
34 pyramid = super.pyramid.override (attrs: {
34 postFixup = ''
35 postFixup = ''
35 wrapPythonPrograms
36 wrapPythonPrograms
36 # TODO: johbo: "wrapPython" adds this magic line which
37 # TODO: johbo: "wrapPython" adds this magic line which
37 # confuses pserve.
38 # confuses pserve.
38 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
39 ${sed} '/import sys; sys.argv/d' $out/bin/.pserve-wrapped
39 '';
40 '';
40 });
41 });
41
42
42 Pyro4 = super.Pyro4.override (attrs: {
43 Pyro4 = super.Pyro4.override (attrs: {
43 # TODO: Was not able to generate this version, needs further
44 # TODO: Was not able to generate this version, needs further
44 # investigation.
45 # investigation.
45 name = "Pyro4-4.35";
46 name = "Pyro4-4.35";
46 src = pkgs.fetchurl {
47 src = pkgs.fetchurl {
47 url = "https://pypi.python.org/packages/source/P/Pyro4/Pyro4-4.35.src.tar.gz";
48 url = "https://pypi.python.org/packages/source/P/Pyro4/Pyro4-4.35.src.tar.gz";
48 md5 = "cbe6cb855f086a0f092ca075005855f3";
49 md5 = "cbe6cb855f086a0f092ca075005855f3";
49 };
50 };
50 });
51 });
51
52
52 # Avoid that setuptools is replaced, this leads to trouble
53 # Avoid that setuptools is replaced, this leads to trouble
53 # with buildPythonPackage.
54 # with buildPythonPackage.
54 setuptools = basePythonPackages.setuptools;
55 setuptools = basePythonPackages.setuptools;
55
56
56 }
57 }
@@ -1,471 +1,694 b''
1 # Generated by pip2nix 0.4.0.dev1
2 # See https://github.com/johbo/pip2nix
3
1 {
4 {
2 Beaker = super.buildPythonPackage {
5 Beaker = super.buildPythonPackage {
3 name = "Beaker-1.7.0";
6 name = "Beaker-1.7.0";
4 buildInputs = with self; [];
7 buildInputs = with self; [];
5 doCheck = false;
8 doCheck = false;
6 propagatedBuildInputs = with self; [];
9 propagatedBuildInputs = with self; [];
7 src = fetchurl {
10 src = fetchurl {
8 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
9 md5 = "386be3f7fe427358881eee4622b428b3";
12 md5 = "386be3f7fe427358881eee4622b428b3";
10 };
13 };
11 meta = {
14 meta = {
12 license = [ pkgs.lib.licenses.bsdOriginal ];
15 license = [ pkgs.lib.licenses.bsdOriginal ];
13 };
16 };
14 };
17 };
15 Jinja2 = super.buildPythonPackage {
18 Jinja2 = super.buildPythonPackage {
16 name = "Jinja2-2.8";
19 name = "Jinja2-2.8";
17 buildInputs = with self; [];
20 buildInputs = with self; [];
18 doCheck = false;
21 doCheck = false;
19 propagatedBuildInputs = with self; [MarkupSafe];
22 propagatedBuildInputs = with self; [MarkupSafe];
20 src = fetchurl {
23 src = fetchurl {
21 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
22 md5 = "edb51693fe22c53cee5403775c71a99e";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
23 };
26 };
24 meta = {
27 meta = {
25 license = [ pkgs.lib.licenses.bsdOriginal ];
28 license = [ pkgs.lib.licenses.bsdOriginal ];
26 };
29 };
27 };
30 };
28 Mako = super.buildPythonPackage {
31 Mako = super.buildPythonPackage {
29 name = "Mako-1.0.4";
32 name = "Mako-1.0.6";
30 buildInputs = with self; [];
33 buildInputs = with self; [];
31 doCheck = false;
34 doCheck = false;
32 propagatedBuildInputs = with self; [MarkupSafe];
35 propagatedBuildInputs = with self; [MarkupSafe];
33 src = fetchurl {
36 src = fetchurl {
34 url = "https://pypi.python.org/packages/7a/ae/925434246ee90b42e8ef57d3b30a0ab7caf9a2de3e449b876c56dcb48155/Mako-1.0.4.tar.gz";
37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
35 md5 = "c5fc31a323dd4990683d2f2da02d4e20";
38 md5 = "a28e22a339080316b2acc352b9ee631c";
36 };
39 };
37 meta = {
40 meta = {
38 license = [ pkgs.lib.licenses.mit ];
41 license = [ pkgs.lib.licenses.mit ];
39 };
42 };
40 };
43 };
41 MarkupSafe = super.buildPythonPackage {
44 MarkupSafe = super.buildPythonPackage {
42 name = "MarkupSafe-0.23";
45 name = "MarkupSafe-0.23";
43 buildInputs = with self; [];
46 buildInputs = with self; [];
44 doCheck = false;
47 doCheck = false;
45 propagatedBuildInputs = with self; [];
48 propagatedBuildInputs = with self; [];
46 src = fetchurl {
49 src = fetchurl {
47 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
48 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
49 };
52 };
50 meta = {
53 meta = {
51 license = [ pkgs.lib.licenses.bsdOriginal ];
54 license = [ pkgs.lib.licenses.bsdOriginal ];
52 };
55 };
53 };
56 };
54 PasteDeploy = super.buildPythonPackage {
57 PasteDeploy = super.buildPythonPackage {
55 name = "PasteDeploy-1.5.2";
58 name = "PasteDeploy-1.5.2";
56 buildInputs = with self; [];
59 buildInputs = with self; [];
57 doCheck = false;
60 doCheck = false;
58 propagatedBuildInputs = with self; [];
61 propagatedBuildInputs = with self; [];
59 src = fetchurl {
62 src = fetchurl {
60 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
61 md5 = "352b7205c78c8de4987578d19431af3b";
64 md5 = "352b7205c78c8de4987578d19431af3b";
62 };
65 };
63 meta = {
66 meta = {
64 license = [ pkgs.lib.licenses.mit ];
67 license = [ pkgs.lib.licenses.mit ];
65 };
68 };
66 };
69 };
67 Pyro4 = super.buildPythonPackage {
70 Pyro4 = super.buildPythonPackage {
68 name = "Pyro4-4.41";
71 name = "Pyro4-4.41";
69 buildInputs = with self; [];
72 buildInputs = with self; [];
70 doCheck = false;
73 doCheck = false;
71 propagatedBuildInputs = with self; [serpent];
74 propagatedBuildInputs = with self; [serpent];
72 src = fetchurl {
75 src = fetchurl {
73 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
76 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
74 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
77 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
75 };
78 };
76 meta = {
79 meta = {
77 license = [ pkgs.lib.licenses.mit ];
80 license = [ pkgs.lib.licenses.mit ];
78 };
81 };
79 };
82 };
80 WebOb = super.buildPythonPackage {
83 WebOb = super.buildPythonPackage {
81 name = "WebOb-1.3.1";
84 name = "WebOb-1.3.1";
82 buildInputs = with self; [];
85 buildInputs = with self; [];
83 doCheck = false;
86 doCheck = false;
84 propagatedBuildInputs = with self; [];
87 propagatedBuildInputs = with self; [];
85 src = fetchurl {
88 src = fetchurl {
86 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
89 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
87 md5 = "20918251c5726956ba8fef22d1556177";
90 md5 = "20918251c5726956ba8fef22d1556177";
88 };
91 };
89 meta = {
92 meta = {
90 license = [ pkgs.lib.licenses.mit ];
93 license = [ pkgs.lib.licenses.mit ];
91 };
94 };
92 };
95 };
93 WebTest = super.buildPythonPackage {
96 WebTest = super.buildPythonPackage {
94 name = "WebTest-1.4.3";
97 name = "WebTest-1.4.3";
95 buildInputs = with self; [];
98 buildInputs = with self; [];
96 doCheck = false;
99 doCheck = false;
97 propagatedBuildInputs = with self; [WebOb];
100 propagatedBuildInputs = with self; [WebOb];
98 src = fetchurl {
101 src = fetchurl {
99 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
102 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
100 md5 = "631ce728bed92c681a4020a36adbc353";
103 md5 = "631ce728bed92c681a4020a36adbc353";
101 };
104 };
102 meta = {
105 meta = {
103 license = [ pkgs.lib.licenses.mit ];
106 license = [ pkgs.lib.licenses.mit ];
104 };
107 };
105 };
108 };
109 backports.shutil-get-terminal-size = super.buildPythonPackage {
110 name = "backports.shutil-get-terminal-size-1.0.0";
111 buildInputs = with self; [];
112 doCheck = false;
113 propagatedBuildInputs = with self; [];
114 src = fetchurl {
115 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
116 md5 = "03267762480bd86b50580dc19dff3c66";
117 };
118 meta = {
119 license = [ pkgs.lib.licenses.mit ];
120 };
121 };
106 configobj = super.buildPythonPackage {
122 configobj = super.buildPythonPackage {
107 name = "configobj-5.0.6";
123 name = "configobj-5.0.6";
108 buildInputs = with self; [];
124 buildInputs = with self; [];
109 doCheck = false;
125 doCheck = false;
110 propagatedBuildInputs = with self; [six];
126 propagatedBuildInputs = with self; [six];
111 src = fetchurl {
127 src = fetchurl {
112 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
113 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
114 };
130 };
115 meta = {
131 meta = {
116 license = [ pkgs.lib.licenses.bsdOriginal ];
132 license = [ pkgs.lib.licenses.bsdOriginal ];
117 };
133 };
118 };
134 };
135 decorator = super.buildPythonPackage {
136 name = "decorator-4.0.10";
137 buildInputs = with self; [];
138 doCheck = false;
139 propagatedBuildInputs = with self; [];
140 src = fetchurl {
141 url = "https://pypi.python.org/packages/13/8a/4eed41e338e8dcc13ca41c94b142d4d20c0de684ee5065523fee406ce76f/decorator-4.0.10.tar.gz";
142 md5 = "434b57fdc3230c500716c5aff8896100";
143 };
144 meta = {
145 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
146 };
147 };
119 dulwich = super.buildPythonPackage {
148 dulwich = super.buildPythonPackage {
120 name = "dulwich-0.13.0";
149 name = "dulwich-0.13.0";
121 buildInputs = with self; [];
150 buildInputs = with self; [];
122 doCheck = false;
151 doCheck = false;
123 propagatedBuildInputs = with self; [];
152 propagatedBuildInputs = with self; [];
124 src = fetchurl {
153 src = fetchurl {
125 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
154 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
126 md5 = "6dede0626657c2bd08f48ca1221eea91";
155 md5 = "6dede0626657c2bd08f48ca1221eea91";
127 };
156 };
128 meta = {
157 meta = {
129 license = [ pkgs.lib.licenses.gpl2Plus ];
158 license = [ pkgs.lib.licenses.gpl2Plus ];
130 };
159 };
131 };
160 };
161 enum34 = super.buildPythonPackage {
162 name = "enum34-1.1.6";
163 buildInputs = with self; [];
164 doCheck = false;
165 propagatedBuildInputs = with self; [];
166 src = fetchurl {
167 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
168 md5 = "5f13a0841a61f7fc295c514490d120d0";
169 };
170 meta = {
171 license = [ pkgs.lib.licenses.bsdOriginal ];
172 };
173 };
132 greenlet = super.buildPythonPackage {
174 greenlet = super.buildPythonPackage {
133 name = "greenlet-0.4.7";
175 name = "greenlet-0.4.7";
134 buildInputs = with self; [];
176 buildInputs = with self; [];
135 doCheck = false;
177 doCheck = false;
136 propagatedBuildInputs = with self; [];
178 propagatedBuildInputs = with self; [];
137 src = fetchurl {
179 src = fetchurl {
138 url = "https://pypi.python.org/packages/7a/9f/a1a0d9bdf3203ae1502c5a8434fe89d323599d78a106985bc327351a69d4/greenlet-0.4.7.zip";
180 url = "https://pypi.python.org/packages/7a/9f/a1a0d9bdf3203ae1502c5a8434fe89d323599d78a106985bc327351a69d4/greenlet-0.4.7.zip";
139 md5 = "c2333a8ff30fa75c5d5ec0e67b461086";
181 md5 = "c2333a8ff30fa75c5d5ec0e67b461086";
140 };
182 };
141 meta = {
183 meta = {
142 license = [ pkgs.lib.licenses.mit ];
184 license = [ pkgs.lib.licenses.mit ];
143 };
185 };
144 };
186 };
145 gunicorn = super.buildPythonPackage {
187 gunicorn = super.buildPythonPackage {
146 name = "gunicorn-19.6.0";
188 name = "gunicorn-19.6.0";
147 buildInputs = with self; [];
189 buildInputs = with self; [];
148 doCheck = false;
190 doCheck = false;
149 propagatedBuildInputs = with self; [];
191 propagatedBuildInputs = with self; [];
150 src = fetchurl {
192 src = fetchurl {
151 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
193 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
152 md5 = "338e5e8a83ea0f0625f768dba4597530";
194 md5 = "338e5e8a83ea0f0625f768dba4597530";
153 };
195 };
154 meta = {
196 meta = {
155 license = [ pkgs.lib.licenses.mit ];
197 license = [ pkgs.lib.licenses.mit ];
156 };
198 };
157 };
199 };
158 hgsubversion = super.buildPythonPackage {
200 hgsubversion = super.buildPythonPackage {
159 name = "hgsubversion-1.8.6";
201 name = "hgsubversion-1.8.6";
160 buildInputs = with self; [];
202 buildInputs = with self; [];
161 doCheck = false;
203 doCheck = false;
162 propagatedBuildInputs = with self; [mercurial subvertpy];
204 propagatedBuildInputs = with self; [mercurial subvertpy];
163 src = fetchurl {
205 src = fetchurl {
164 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
206 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
165 md5 = "9310cb266031cf8d0779885782a84a5b";
207 md5 = "9310cb266031cf8d0779885782a84a5b";
166 };
208 };
167 meta = {
209 meta = {
168 license = [ pkgs.lib.licenses.gpl1 ];
210 license = [ pkgs.lib.licenses.gpl1 ];
169 };
211 };
170 };
212 };
171 infrae.cache = super.buildPythonPackage {
213 infrae.cache = super.buildPythonPackage {
172 name = "infrae.cache-1.0.1";
214 name = "infrae.cache-1.0.1";
173 buildInputs = with self; [];
215 buildInputs = with self; [];
174 doCheck = false;
216 doCheck = false;
175 propagatedBuildInputs = with self; [Beaker repoze.lru];
217 propagatedBuildInputs = with self; [Beaker repoze.lru];
176 src = fetchurl {
218 src = fetchurl {
177 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
219 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
178 md5 = "b09076a766747e6ed2a755cc62088e32";
220 md5 = "b09076a766747e6ed2a755cc62088e32";
179 };
221 };
180 meta = {
222 meta = {
181 license = [ pkgs.lib.licenses.zpt21 ];
223 license = [ pkgs.lib.licenses.zpt21 ];
182 };
224 };
183 };
225 };
226 ipdb = super.buildPythonPackage {
227 name = "ipdb-0.10.1";
228 buildInputs = with self; [];
229 doCheck = false;
230 propagatedBuildInputs = with self; [ipython setuptools];
231 src = fetchurl {
232 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
233 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
234 };
235 meta = {
236 license = [ pkgs.lib.licenses.bsdOriginal ];
237 };
238 };
239 ipython = super.buildPythonPackage {
240 name = "ipython-5.1.0";
241 buildInputs = with self; [];
242 doCheck = false;
243 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
244 src = fetchurl {
245 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
246 md5 = "47c8122420f65b58784cb4b9b4af35e3";
247 };
248 meta = {
249 license = [ pkgs.lib.licenses.bsdOriginal ];
250 };
251 };
252 ipython-genutils = super.buildPythonPackage {
253 name = "ipython-genutils-0.1.0";
254 buildInputs = with self; [];
255 doCheck = false;
256 propagatedBuildInputs = with self; [];
257 src = fetchurl {
258 url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz";
259 md5 = "9a8afbe0978adbcbfcb3b35b2d015a56";
260 };
261 meta = {
262 license = [ pkgs.lib.licenses.bsdOriginal ];
263 };
264 };
184 mercurial = super.buildPythonPackage {
265 mercurial = super.buildPythonPackage {
185 name = "mercurial-3.8.4";
266 name = "mercurial-3.8.4";
186 buildInputs = with self; [];
267 buildInputs = with self; [];
187 doCheck = false;
268 doCheck = false;
188 propagatedBuildInputs = with self; [];
269 propagatedBuildInputs = with self; [];
189 src = fetchurl {
270 src = fetchurl {
190 url = "https://pypi.python.org/packages/bc/16/b66eef0b70ee2b4ebb8e76622fe21bbed834606dd8c1bd30d6936ebf6f45/mercurial-3.8.4.tar.gz";
271 url = "https://pypi.python.org/packages/bc/16/b66eef0b70ee2b4ebb8e76622fe21bbed834606dd8c1bd30d6936ebf6f45/mercurial-3.8.4.tar.gz";
191 md5 = "cec2c3db688cb87142809089c6ae13e9";
272 md5 = "cec2c3db688cb87142809089c6ae13e9";
192 };
273 };
193 meta = {
274 meta = {
194 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
275 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
195 };
276 };
196 };
277 };
197 mock = super.buildPythonPackage {
278 mock = super.buildPythonPackage {
198 name = "mock-1.0.1";
279 name = "mock-1.0.1";
199 buildInputs = with self; [];
280 buildInputs = with self; [];
200 doCheck = false;
281 doCheck = false;
201 propagatedBuildInputs = with self; [];
282 propagatedBuildInputs = with self; [];
202 src = fetchurl {
283 src = fetchurl {
203 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
284 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
204 md5 = "869f08d003c289a97c1a6610faf5e913";
285 md5 = "869f08d003c289a97c1a6610faf5e913";
205 };
286 };
206 meta = {
287 meta = {
207 license = [ pkgs.lib.licenses.bsdOriginal ];
288 license = [ pkgs.lib.licenses.bsdOriginal ];
208 };
289 };
209 };
290 };
210 msgpack-python = super.buildPythonPackage {
291 msgpack-python = super.buildPythonPackage {
211 name = "msgpack-python-0.4.6";
292 name = "msgpack-python-0.4.6";
212 buildInputs = with self; [];
293 buildInputs = with self; [];
213 doCheck = false;
294 doCheck = false;
214 propagatedBuildInputs = with self; [];
295 propagatedBuildInputs = with self; [];
215 src = fetchurl {
296 src = fetchurl {
216 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
297 url = "https://pypi.python.org/packages/15/ce/ff2840885789ef8035f66cd506ea05bdb228340307d5e71a7b1e3f82224c/msgpack-python-0.4.6.tar.gz";
217 md5 = "8b317669314cf1bc881716cccdaccb30";
298 md5 = "8b317669314cf1bc881716cccdaccb30";
218 };
299 };
219 meta = {
300 meta = {
220 license = [ pkgs.lib.licenses.asl20 ];
301 license = [ pkgs.lib.licenses.asl20 ];
221 };
302 };
222 };
303 };
304 pathlib2 = super.buildPythonPackage {
305 name = "pathlib2-2.1.0";
306 buildInputs = with self; [];
307 doCheck = false;
308 propagatedBuildInputs = with self; [six];
309 src = fetchurl {
310 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
311 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
312 };
313 meta = {
314 license = [ pkgs.lib.licenses.mit ];
315 };
316 };
317 pexpect = super.buildPythonPackage {
318 name = "pexpect-4.2.1";
319 buildInputs = with self; [];
320 doCheck = false;
321 propagatedBuildInputs = with self; [ptyprocess];
322 src = fetchurl {
323 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
324 md5 = "3694410001a99dff83f0b500a1ca1c95";
325 };
326 meta = {
327 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
328 };
329 };
330 pickleshare = super.buildPythonPackage {
331 name = "pickleshare-0.7.4";
332 buildInputs = with self; [];
333 doCheck = false;
334 propagatedBuildInputs = with self; [pathlib2];
335 src = fetchurl {
336 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
337 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
338 };
339 meta = {
340 license = [ pkgs.lib.licenses.mit ];
341 };
342 };
343 prompt-toolkit = super.buildPythonPackage {
344 name = "prompt-toolkit-1.0.9";
345 buildInputs = with self; [];
346 doCheck = false;
347 propagatedBuildInputs = with self; [six wcwidth];
348 src = fetchurl {
349 url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz";
350 md5 = "a39f91a54308fb7446b1a421c11f227c";
351 };
352 meta = {
353 license = [ pkgs.lib.licenses.bsdOriginal ];
354 };
355 };
356 ptyprocess = super.buildPythonPackage {
357 name = "ptyprocess-0.5.1";
358 buildInputs = with self; [];
359 doCheck = false;
360 propagatedBuildInputs = with self; [];
361 src = fetchurl {
362 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
363 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
364 };
365 meta = {
366 license = [ ];
367 };
368 };
223 py = super.buildPythonPackage {
369 py = super.buildPythonPackage {
224 name = "py-1.4.29";
370 name = "py-1.4.29";
225 buildInputs = with self; [];
371 buildInputs = with self; [];
226 doCheck = false;
372 doCheck = false;
227 propagatedBuildInputs = with self; [];
373 propagatedBuildInputs = with self; [];
228 src = fetchurl {
374 src = fetchurl {
229 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
375 url = "https://pypi.python.org/packages/2a/bc/a1a4a332ac10069b8e5e25136a35e08a03f01fd6ab03d819889d79a1fd65/py-1.4.29.tar.gz";
230 md5 = "c28e0accba523a29b35a48bb703fb96c";
376 md5 = "c28e0accba523a29b35a48bb703fb96c";
231 };
377 };
232 meta = {
378 meta = {
233 license = [ pkgs.lib.licenses.mit ];
379 license = [ pkgs.lib.licenses.mit ];
234 };
380 };
235 };
381 };
382 pygments = super.buildPythonPackage {
383 name = "pygments-2.1.3";
384 buildInputs = with self; [];
385 doCheck = false;
386 propagatedBuildInputs = with self; [];
387 src = fetchurl {
388 url = "https://pypi.python.org/packages/b8/67/ab177979be1c81bc99c8d0592ef22d547e70bb4c6815c383286ed5dec504/Pygments-2.1.3.tar.gz";
389 md5 = "ed3fba2467c8afcda4d317e4ef2c6150";
390 };
391 meta = {
392 license = [ pkgs.lib.licenses.bsdOriginal ];
393 };
394 };
236 pyramid = super.buildPythonPackage {
395 pyramid = super.buildPythonPackage {
237 name = "pyramid-1.6.1";
396 name = "pyramid-1.6.1";
238 buildInputs = with self; [];
397 buildInputs = with self; [];
239 doCheck = false;
398 doCheck = false;
240 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
399 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
241 src = fetchurl {
400 src = fetchurl {
242 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
401 url = "https://pypi.python.org/packages/30/b3/fcc4a2a4800cbf21989e00454b5828cf1f7fe35c63e0810b350e56d4c475/pyramid-1.6.1.tar.gz";
243 md5 = "b18688ff3cc33efdbb098a35b45dd122";
402 md5 = "b18688ff3cc33efdbb098a35b45dd122";
244 };
403 };
245 meta = {
404 meta = {
246 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
405 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
247 };
406 };
248 };
407 };
249 pyramid-jinja2 = super.buildPythonPackage {
408 pyramid-jinja2 = super.buildPythonPackage {
250 name = "pyramid-jinja2-2.5";
409 name = "pyramid-jinja2-2.5";
251 buildInputs = with self; [];
410 buildInputs = with self; [];
252 doCheck = false;
411 doCheck = false;
253 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
412 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
254 src = fetchurl {
413 src = fetchurl {
255 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
414 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
256 md5 = "07cb6547204ac5e6f0b22a954ccee928";
415 md5 = "07cb6547204ac5e6f0b22a954ccee928";
257 };
416 };
258 meta = {
417 meta = {
259 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
418 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
260 };
419 };
261 };
420 };
262 pyramid-mako = super.buildPythonPackage {
421 pyramid-mako = super.buildPythonPackage {
263 name = "pyramid-mako-1.0.2";
422 name = "pyramid-mako-1.0.2";
264 buildInputs = with self; [];
423 buildInputs = with self; [];
265 doCheck = false;
424 doCheck = false;
266 propagatedBuildInputs = with self; [pyramid Mako];
425 propagatedBuildInputs = with self; [pyramid Mako];
267 src = fetchurl {
426 src = fetchurl {
268 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
427 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
269 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
428 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
270 };
429 };
271 meta = {
430 meta = {
272 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
431 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
273 };
432 };
274 };
433 };
275 pytest = super.buildPythonPackage {
434 pytest = super.buildPythonPackage {
276 name = "pytest-2.8.5";
435 name = "pytest-2.8.5";
277 buildInputs = with self; [];
436 buildInputs = with self; [];
278 doCheck = false;
437 doCheck = false;
279 propagatedBuildInputs = with self; [py];
438 propagatedBuildInputs = with self; [py];
280 src = fetchurl {
439 src = fetchurl {
281 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
440 url = "https://pypi.python.org/packages/b1/3d/d7ea9b0c51e0cacded856e49859f0a13452747491e842c236bbab3714afe/pytest-2.8.5.zip";
282 md5 = "8493b06f700862f1294298d6c1b715a9";
441 md5 = "8493b06f700862f1294298d6c1b715a9";
283 };
442 };
284 meta = {
443 meta = {
285 license = [ pkgs.lib.licenses.mit ];
444 license = [ pkgs.lib.licenses.mit ];
286 };
445 };
287 };
446 };
288 repoze.lru = super.buildPythonPackage {
447 repoze.lru = super.buildPythonPackage {
289 name = "repoze.lru-0.6";
448 name = "repoze.lru-0.6";
290 buildInputs = with self; [];
449 buildInputs = with self; [];
291 doCheck = false;
450 doCheck = false;
292 propagatedBuildInputs = with self; [];
451 propagatedBuildInputs = with self; [];
293 src = fetchurl {
452 src = fetchurl {
294 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
453 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
295 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
454 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
296 };
455 };
297 meta = {
456 meta = {
298 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
457 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
299 };
458 };
300 };
459 };
301 rhodecode-vcsserver = super.buildPythonPackage {
460 rhodecode-vcsserver = super.buildPythonPackage {
302 name = "rhodecode-vcsserver-4.4.2";
461 name = "rhodecode-vcsserver-4.5.0";
303 buildInputs = with self; [mock pytest WebTest];
462 buildInputs = with self; [mock pytest pytest-sugar WebTest];
304 doCheck = true;
463 doCheck = true;
305 propagatedBuildInputs = with self; [configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid Pyro4 simplejson subprocess32 waitress WebOb];
464 propagatedBuildInputs = with self; [configobj dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid Pyro4 simplejson subprocess32 waitress WebOb];
306 src = ./.;
465 src = ./.;
307 meta = {
466 meta = {
308 license = [ pkgs.lib.licenses.gpl3 { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
467 license = [ pkgs.lib.licenses.gpl3 { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
309 };
468 };
310 };
469 };
311 serpent = super.buildPythonPackage {
470 serpent = super.buildPythonPackage {
312 name = "serpent-1.12";
471 name = "serpent-1.12";
313 buildInputs = with self; [];
472 buildInputs = with self; [];
314 doCheck = false;
473 doCheck = false;
315 propagatedBuildInputs = with self; [];
474 propagatedBuildInputs = with self; [];
316 src = fetchurl {
475 src = fetchurl {
317 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
476 url = "https://pypi.python.org/packages/3b/19/1e0e83b47c09edaef8398655088036e7e67386b5c48770218ebb339fbbd5/serpent-1.12.tar.gz";
318 md5 = "05869ac7b062828b34f8f927f0457b65";
477 md5 = "05869ac7b062828b34f8f927f0457b65";
319 };
478 };
320 meta = {
479 meta = {
321 license = [ pkgs.lib.licenses.mit ];
480 license = [ pkgs.lib.licenses.mit ];
322 };
481 };
323 };
482 };
324 setuptools = super.buildPythonPackage {
483 setuptools = super.buildPythonPackage {
325 name = "setuptools-20.8.1";
484 name = "setuptools-20.8.1";
326 buildInputs = with self; [];
485 buildInputs = with self; [];
327 doCheck = false;
486 doCheck = false;
328 propagatedBuildInputs = with self; [];
487 propagatedBuildInputs = with self; [];
329 src = fetchurl {
488 src = fetchurl {
330 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
489 url = "https://pypi.python.org/packages/c4/19/c1bdc88b53da654df43770f941079dbab4e4788c2dcb5658fb86259894c7/setuptools-20.8.1.zip";
331 md5 = "fe58a5cac0df20bb83942b252a4b0543";
490 md5 = "fe58a5cac0df20bb83942b252a4b0543";
332 };
491 };
333 meta = {
492 meta = {
334 license = [ pkgs.lib.licenses.mit ];
493 license = [ pkgs.lib.licenses.mit ];
335 };
494 };
336 };
495 };
496 simplegeneric = super.buildPythonPackage {
497 name = "simplegeneric-0.8.1";
498 buildInputs = with self; [];
499 doCheck = false;
500 propagatedBuildInputs = with self; [];
501 src = fetchurl {
502 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
503 md5 = "f9c1fab00fd981be588fc32759f474e3";
504 };
505 meta = {
506 license = [ pkgs.lib.licenses.zpt21 ];
507 };
508 };
337 simplejson = super.buildPythonPackage {
509 simplejson = super.buildPythonPackage {
338 name = "simplejson-3.7.2";
510 name = "simplejson-3.7.2";
339 buildInputs = with self; [];
511 buildInputs = with self; [];
340 doCheck = false;
512 doCheck = false;
341 propagatedBuildInputs = with self; [];
513 propagatedBuildInputs = with self; [];
342 src = fetchurl {
514 src = fetchurl {
343 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
515 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
344 md5 = "a5fc7d05d4cb38492285553def5d4b46";
516 md5 = "a5fc7d05d4cb38492285553def5d4b46";
345 };
517 };
346 meta = {
518 meta = {
347 license = [ pkgs.lib.licenses.mit pkgs.lib.licenses.afl21 ];
519 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
348 };
520 };
349 };
521 };
350 six = super.buildPythonPackage {
522 six = super.buildPythonPackage {
351 name = "six-1.9.0";
523 name = "six-1.9.0";
352 buildInputs = with self; [];
524 buildInputs = with self; [];
353 doCheck = false;
525 doCheck = false;
354 propagatedBuildInputs = with self; [];
526 propagatedBuildInputs = with self; [];
355 src = fetchurl {
527 src = fetchurl {
356 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
528 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
357 md5 = "476881ef4012262dfc8adc645ee786c4";
529 md5 = "476881ef4012262dfc8adc645ee786c4";
358 };
530 };
359 meta = {
531 meta = {
360 license = [ pkgs.lib.licenses.mit ];
532 license = [ pkgs.lib.licenses.mit ];
361 };
533 };
362 };
534 };
363 subprocess32 = super.buildPythonPackage {
535 subprocess32 = super.buildPythonPackage {
364 name = "subprocess32-3.2.6";
536 name = "subprocess32-3.2.6";
365 buildInputs = with self; [];
537 buildInputs = with self; [];
366 doCheck = false;
538 doCheck = false;
367 propagatedBuildInputs = with self; [];
539 propagatedBuildInputs = with self; [];
368 src = fetchurl {
540 src = fetchurl {
369 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
541 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
370 md5 = "754c5ab9f533e764f931136974b618f1";
542 md5 = "754c5ab9f533e764f931136974b618f1";
371 };
543 };
372 meta = {
544 meta = {
373 license = [ pkgs.lib.licenses.psfl ];
545 license = [ pkgs.lib.licenses.psfl ];
374 };
546 };
375 };
547 };
376 subvertpy = super.buildPythonPackage {
548 subvertpy = super.buildPythonPackage {
377 name = "subvertpy-0.9.3";
549 name = "subvertpy-0.9.3";
378 buildInputs = with self; [];
550 buildInputs = with self; [];
379 doCheck = false;
551 doCheck = false;
380 propagatedBuildInputs = with self; [];
552 propagatedBuildInputs = with self; [];
381 src = fetchurl {
553 src = fetchurl {
382 url = "https://github.com/jelmer/subvertpy/archive/subvertpy-0.9.3.tar.gz";
554 url = "https://github.com/jelmer/subvertpy/archive/subvertpy-0.9.3.tar.gz";
383 md5 = "7b745a47128050ea5a73efcd913ec1cf";
555 md5 = "7b745a47128050ea5a73efcd913ec1cf";
384 };
556 };
385 meta = {
557 meta = {
386 license = [ pkgs.lib.licenses.lgpl21Plus ];
558 license = [ pkgs.lib.licenses.lgpl21Plus ];
387 };
559 };
388 };
560 };
561 traitlets = super.buildPythonPackage {
562 name = "traitlets-4.3.1";
563 buildInputs = with self; [];
564 doCheck = false;
565 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
566 src = fetchurl {
567 url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz";
568 md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98";
569 };
570 meta = {
571 license = [ pkgs.lib.licenses.bsdOriginal ];
572 };
573 };
389 translationstring = super.buildPythonPackage {
574 translationstring = super.buildPythonPackage {
390 name = "translationstring-1.3";
575 name = "translationstring-1.3";
391 buildInputs = with self; [];
576 buildInputs = with self; [];
392 doCheck = false;
577 doCheck = false;
393 propagatedBuildInputs = with self; [];
578 propagatedBuildInputs = with self; [];
394 src = fetchurl {
579 src = fetchurl {
395 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
580 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
396 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
581 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
397 };
582 };
398 meta = {
583 meta = {
399 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
584 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
400 };
585 };
401 };
586 };
402 venusian = super.buildPythonPackage {
587 venusian = super.buildPythonPackage {
403 name = "venusian-1.0";
588 name = "venusian-1.0";
404 buildInputs = with self; [];
589 buildInputs = with self; [];
405 doCheck = false;
590 doCheck = false;
406 propagatedBuildInputs = with self; [];
591 propagatedBuildInputs = with self; [];
407 src = fetchurl {
592 src = fetchurl {
408 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
593 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
409 md5 = "dccf2eafb7113759d60c86faf5538756";
594 md5 = "dccf2eafb7113759d60c86faf5538756";
410 };
595 };
411 meta = {
596 meta = {
412 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
597 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
413 };
598 };
414 };
599 };
415 waitress = super.buildPythonPackage {
600 waitress = super.buildPythonPackage {
416 name = "waitress-0.8.9";
601 name = "waitress-0.8.9";
417 buildInputs = with self; [];
602 buildInputs = with self; [];
418 doCheck = false;
603 doCheck = false;
419 propagatedBuildInputs = with self; [setuptools];
604 propagatedBuildInputs = with self; [setuptools];
420 src = fetchurl {
605 src = fetchurl {
421 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
606 url = "https://pypi.python.org/packages/ee/65/fc9dee74a909a1187ca51e4f15ad9c4d35476e4ab5813f73421505c48053/waitress-0.8.9.tar.gz";
422 md5 = "da3f2e62b3676be5dd630703a68e2a04";
607 md5 = "da3f2e62b3676be5dd630703a68e2a04";
423 };
608 };
424 meta = {
609 meta = {
425 license = [ pkgs.lib.licenses.zpt21 ];
610 license = [ pkgs.lib.licenses.zpt21 ];
426 };
611 };
427 };
612 };
613 wcwidth = super.buildPythonPackage {
614 name = "wcwidth-0.1.7";
615 buildInputs = with self; [];
616 doCheck = false;
617 propagatedBuildInputs = with self; [];
618 src = fetchurl {
619 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
620 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
621 };
622 meta = {
623 license = [ pkgs.lib.licenses.mit ];
624 };
625 };
428 wheel = super.buildPythonPackage {
626 wheel = super.buildPythonPackage {
429 name = "wheel-0.29.0";
627 name = "wheel-0.29.0";
430 buildInputs = with self; [];
628 buildInputs = with self; [];
431 doCheck = false;
629 doCheck = false;
432 propagatedBuildInputs = with self; [];
630 propagatedBuildInputs = with self; [];
433 src = fetchurl {
631 src = fetchurl {
434 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
632 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
435 md5 = "555a67e4507cedee23a0deb9651e452f";
633 md5 = "555a67e4507cedee23a0deb9651e452f";
436 };
634 };
437 meta = {
635 meta = {
438 license = [ pkgs.lib.licenses.mit ];
636 license = [ pkgs.lib.licenses.mit ];
439 };
637 };
440 };
638 };
441 zope.deprecation = super.buildPythonPackage {
639 zope.deprecation = super.buildPythonPackage {
442 name = "zope.deprecation-4.1.1";
640 name = "zope.deprecation-4.1.1";
443 buildInputs = with self; [];
641 buildInputs = with self; [];
444 doCheck = false;
642 doCheck = false;
445 propagatedBuildInputs = with self; [setuptools];
643 propagatedBuildInputs = with self; [setuptools];
446 src = fetchurl {
644 src = fetchurl {
447 url = "https://pypi.python.org/packages/c5/c9/e760f131fcde817da6c186a3f4952b8f206b7eeb269bb6f0836c715c5f20/zope.deprecation-4.1.1.tar.gz";
645 url = "https://pypi.python.org/packages/c5/c9/e760f131fcde817da6c186a3f4952b8f206b7eeb269bb6f0836c715c5f20/zope.deprecation-4.1.1.tar.gz";
448 md5 = "ce261b9384066f7e13b63525778430cb";
646 md5 = "ce261b9384066f7e13b63525778430cb";
449 };
647 };
450 meta = {
648 meta = {
451 license = [ pkgs.lib.licenses.zpt21 ];
649 license = [ pkgs.lib.licenses.zpt21 ];
452 };
650 };
453 };
651 };
454 zope.interface = super.buildPythonPackage {
652 zope.interface = super.buildPythonPackage {
455 name = "zope.interface-4.1.3";
653 name = "zope.interface-4.1.3";
456 buildInputs = with self; [];
654 buildInputs = with self; [];
457 doCheck = false;
655 doCheck = false;
458 propagatedBuildInputs = with self; [setuptools];
656 propagatedBuildInputs = with self; [setuptools];
459 src = fetchurl {
657 src = fetchurl {
460 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
658 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
461 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
659 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
462 };
660 };
463 meta = {
661 meta = {
464 license = [ pkgs.lib.licenses.zpt21 ];
662 license = [ pkgs.lib.licenses.zpt21 ];
465 };
663 };
466 };
664 };
467
665
468 ### Test requirements
666 ### Test requirements
469
667
470
668 pytest-sugar = super.buildPythonPackage {
669 name = "pytest-sugar-0.7.1";
670 buildInputs = with self; [];
671 doCheck = false;
672 propagatedBuildInputs = with self; [pytest termcolor];
673 src = fetchurl {
674 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
675 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
676 };
677 meta = {
678 license = [ pkgs.lib.licenses.bsdOriginal ];
679 };
680 };
681 termcolor = super.buildPythonPackage {
682 name = "termcolor-1.1.0";
683 buildInputs = with self; [];
684 doCheck = false;
685 propagatedBuildInputs = with self; [];
686 src = fetchurl {
687 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
688 md5 = "043e89644f8909d462fbbfa511c768df";
689 };
690 meta = {
691 license = [ pkgs.lib.licenses.mit ];
692 };
693 };
471 }
694 }
@@ -1,34 +1,35 b''
1 Beaker==1.7.0
1 Beaker==1.7.0
2 configobj==5.0.6
2 configobj==5.0.6
3 dulwich==0.13.0
3 dulwich==0.13.0
4 hgsubversion==1.8.6
4 hgsubversion==1.8.6
5 infrae.cache==1.0.1
5 infrae.cache==1.0.1
6 ipdb==0.10.1
6 mercurial==3.8.4
7 mercurial==3.8.4
7 msgpack-python==0.4.6
8 msgpack-python==0.4.6
8 py==1.4.29
9 py==1.4.29
9 pyramid==1.6.1
10 pyramid==1.6.1
10 pyramid-jinja2==2.5
11 pyramid-jinja2==2.5
11 pyramid-mako==1.0.2
12 pyramid-mako==1.0.2
12 Pyro4==4.41
13 Pyro4==4.41
13 pytest==2.8.5
14 pytest==2.8.5
14 repoze.lru==0.6
15 repoze.lru==0.6
15 serpent==1.12
16 serpent==1.12
16 setuptools==20.8.1
17 setuptools==20.8.1
17 simplejson==3.7.2
18 simplejson==3.7.2
18 subprocess32==3.2.6
19 subprocess32==3.2.6
19 # TODO: johbo: This version is not in source on PyPI currently,
20 # TODO: johbo: This version is not in source on PyPI currently,
20 # change back once this or a future version is available
21 # change back once this or a future version is available
21 https://github.com/jelmer/subvertpy/archive/subvertpy-0.9.3.tar.gz#md5=7b745a47128050ea5a73efcd913ec1cf
22 https://github.com/jelmer/subvertpy/archive/subvertpy-0.9.3.tar.gz#md5=7b745a47128050ea5a73efcd913ec1cf
22 six==1.9.0
23 six==1.9.0
23 translationstring==1.3
24 translationstring==1.3
24 waitress==0.8.9
25 waitress==0.8.9
25 WebOb==1.3.1
26 WebOb==1.3.1
26 wheel==0.29.0
27 wheel==0.29.0
27 zope.deprecation==4.1.1
28 zope.deprecation==4.1.1
28 zope.interface==4.1.3
29 zope.interface==4.1.3
29 greenlet==0.4.7
30 greenlet==0.4.7
30 gunicorn==19.6.0
31 gunicorn==19.6.0
31
32
32 # Test related requirements
33 # Test related requirements
33 mock==1.0.1
34 mock==1.0.1
34 WebTest==1.4.3
35 WebTest==1.4.3
@@ -1,102 +1,103 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from setuptools import setup, find_packages
18 from setuptools import setup, find_packages
19 from setuptools.command.test import test as TestCommand
19 from setuptools.command.test import test as TestCommand
20 from codecs import open
20 from codecs import open
21 from os import path
21 from os import path
22 import pkgutil
22 import pkgutil
23 import sys
23 import sys
24
24
25
25
26 here = path.abspath(path.dirname(__file__))
26 here = path.abspath(path.dirname(__file__))
27
27
28 with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
28 with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
29 long_description = f.read()
29 long_description = f.read()
30
30
31
31
32 def get_version():
32 def get_version():
33 version = pkgutil.get_data('vcsserver', 'VERSION')
33 version = pkgutil.get_data('vcsserver', 'VERSION')
34 return version.strip()
34 return version.strip()
35
35
36
36
37 class PyTest(TestCommand):
37 class PyTest(TestCommand):
38 user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
38 user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
39
39
40 def initialize_options(self):
40 def initialize_options(self):
41 TestCommand.initialize_options(self)
41 TestCommand.initialize_options(self)
42 self.pytest_args = []
42 self.pytest_args = []
43
43
44 def finalize_options(self):
44 def finalize_options(self):
45 TestCommand.finalize_options(self)
45 TestCommand.finalize_options(self)
46 self.test_args = []
46 self.test_args = []
47 self.test_suite = True
47 self.test_suite = True
48
48
49 def run_tests(self):
49 def run_tests(self):
50 # import here, cause outside the eggs aren't loaded
50 # import here, cause outside the eggs aren't loaded
51 import pytest
51 import pytest
52 errno = pytest.main(self.pytest_args)
52 errno = pytest.main(self.pytest_args)
53 sys.exit(errno)
53 sys.exit(errno)
54
54
55
55
56 setup(
56 setup(
57 name='rhodecode-vcsserver',
57 name='rhodecode-vcsserver',
58 version=get_version(),
58 version=get_version(),
59 description='Version Control System Server',
59 description='Version Control System Server',
60 long_description=long_description,
60 long_description=long_description,
61 url='http://www.rhodecode.com',
61 url='http://www.rhodecode.com',
62 author='RhodeCode GmbH',
62 author='RhodeCode GmbH',
63 author_email='marcin@rhodecode.com',
63 author_email='marcin@rhodecode.com',
64 cmdclass={'test': PyTest},
64 cmdclass={'test': PyTest},
65 license='GPLv3',
65 license='GPLv3',
66 classifiers=[
66 classifiers=[
67 'Development Status :: 5 - Production/Stable',
67 'Development Status :: 5 - Production/Stable',
68 'Intended Audience :: Developers',
68 'Intended Audience :: Developers',
69 'Topic :: Software Development :: Version Control',
69 'Topic :: Software Development :: Version Control',
70 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
70 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
71 'Programming Language :: Python :: 2.7',
71 'Programming Language :: Python :: 2.7',
72 ],
72 ],
73 packages=find_packages(),
73 packages=find_packages(),
74 tests_require=[
74 tests_require=[
75 'mock',
75 'mock',
76 'pytest',
76 'pytest',
77 'pytest-sugar',
77 'WebTest',
78 'WebTest',
78 ],
79 ],
79 install_requires=[
80 install_requires=[
80 'configobj',
81 'configobj',
81 'dulwich',
82 'dulwich',
82 'hgsubversion',
83 'hgsubversion',
83 'infrae.cache',
84 'infrae.cache',
84 'mercurial',
85 'mercurial',
85 'msgpack-python',
86 'msgpack-python',
86 'pyramid',
87 'pyramid',
87 'Pyro4',
88 'Pyro4',
88 'simplejson',
89 'simplejson',
89 'subprocess32',
90 'subprocess32',
90 'waitress',
91 'waitress',
91 'WebOb',
92 'WebOb',
92 ],
93 ],
93 package_data={
94 package_data={
94 'vcsserver': ['VERSION'],
95 'vcsserver': ['VERSION'],
95 },
96 },
96 entry_points={
97 entry_points={
97 'console_scripts': [
98 'console_scripts': [
98 'vcsserver=vcsserver.main:main',
99 'vcsserver=vcsserver.main:main',
99 ],
100 ],
100 'paste.app_factory': ['main=vcsserver.http_main:main']
101 'paste.app_factory': ['main=vcsserver.http_main:main']
101 },
102 },
102 )
103 )
@@ -1,18 +1,41 b''
1 { pkgs ? import <nixpkgs> {}
1 { pkgs ? import <nixpkgs> {}
2 , doCheck ? false
2 , doCheck ? false
3 }:
3 }:
4
4
5 let
5 let
6
6 vcsserver = import ./default.nix {
7 vcsserver = import ./default.nix {
7 inherit
8 inherit pkgs doCheck;
8 doCheck
9 pkgs;
10 };
9 };
11
10
11 vcs-pythonPackages = vcsserver.pythonPackages;
12
12 in vcsserver.override (attrs: {
13 in vcsserver.override (attrs: {
13
14
14 # Avoid that we dump any sources into the store when entering the shell and
15 # Avoid that we dump any sources into the store when entering the shell and
15 # make development a little bit more convenient.
16 # make development a little bit more convenient.
16 src = null;
17 src = null;
17
18
19 buildInputs =
20 attrs.buildInputs ++
21 (with vcs-pythonPackages; [
22 ipdb
23 ]);
24
25 # Somewhat snappier setup of the development environment
26 # TODO: think of supporting a stable path again, so that multiple shells
27 # can share it.
28 postShellHook = ''
29 # Set locale
30 export LC_ALL="en_US.UTF-8"
31
32 # Custom prompt to distinguish from other dev envs.
33 export PS1="\n\[\033[1;32m\][VCS-shell:\w]$\[\033[0m\] "
34
35 tmp_path=$(mktemp -d)
36 export PATH="$tmp_path/bin:$PATH"
37 export PYTHONPATH="$tmp_path/${vcs-pythonPackages.python.sitePackages}:$PYTHONPATH"
38 mkdir -p $tmp_path/${vcs-pythonPackages.python.sitePackages}
39 python setup.py develop --prefix $tmp_path --allow-hosts ""
40 '';
18 })
41 })
@@ -1,36 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19
20
20 from vcsserver import main
21 from vcsserver import main
22 from vcsserver.base import obfuscate_qs
21
23
22
24
23 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
25 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
24 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
25 def test_applies_largefiles_patch(patch_largefiles_capabilities):
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
26 main.main([])
28 main.main([])
27 patch_largefiles_capabilities.assert_called_once_with()
29 patch_largefiles_capabilities.assert_called_once_with()
28
30
29
31
30 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
32 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
31 @mock.patch('vcsserver.main.MercurialFactory', None)
33 @mock.patch('vcsserver.main.MercurialFactory', None)
32 @mock.patch(
34 @mock.patch(
33 'vcsserver.hgpatches.patch_largefiles_capabilities',
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
34 mock.Mock(side_effect=Exception("Must not be called")))
36 mock.Mock(side_effect=Exception("Must not be called")))
35 def test_applies_largefiles_patch_only_if_mercurial_is_available():
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
36 main.main([])
38 main.main([])
39
40
41 @pytest.mark.parametrize('given, expected', [
42 ('bad', 'bad'),
43 ('query&foo=bar', 'query&foo=bar'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret',
46 'a&b&c&query&foo=bar&auth_token=*****'),
47 ('', ''),
48 (None, None),
49 ('foo=bar', 'foo=bar'),
50 ('auth_token=secret', 'auth_token=*****'),
51 ('auth_token=secret&api_key=secret2',
52 'auth_token=*****&api_key=*****'),
53 ('auth_token=secret&api_key=secret2&param=value',
54 'auth_token=*****&api_key=*****&param=value'),
55 ])
56 def test_obfuscate_qs(given, expected):
57 assert expected == obfuscate_qs(given)
@@ -1,1 +1,1 b''
1 4.4.2 No newline at end of file
1 4.5.0 No newline at end of file
@@ -1,71 +1,85 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19
19 import urlparse
20
20
21 log = logging.getLogger(__name__)
21 log = logging.getLogger(__name__)
22
22
23
23
24 class RepoFactory(object):
24 class RepoFactory(object):
25 """
25 """
26 Utility to create instances of repository
26 Utility to create instances of repository
27
27
28 It provides internal caching of the `repo` object based on
28 It provides internal caching of the `repo` object based on
29 the :term:`call context`.
29 the :term:`call context`.
30 """
30 """
31
31
32 def __init__(self, repo_cache):
32 def __init__(self, repo_cache):
33 self._cache = repo_cache
33 self._cache = repo_cache
34
34
35 def _create_config(self, path, config):
35 def _create_config(self, path, config):
36 config = {}
36 config = {}
37 return config
37 return config
38
38
39 def _create_repo(self, wire, create):
39 def _create_repo(self, wire, create):
40 raise NotImplementedError()
40 raise NotImplementedError()
41
41
42 def repo(self, wire, create=False):
42 def repo(self, wire, create=False):
43 """
43 """
44 Get a repository instance for the given path.
44 Get a repository instance for the given path.
45
45
46 Uses internally the low level beaker API since the decorators introduce
46 Uses internally the low level beaker API since the decorators introduce
47 significant overhead.
47 significant overhead.
48 """
48 """
49 def create_new_repo():
49 def create_new_repo():
50 return self._create_repo(wire, create)
50 return self._create_repo(wire, create)
51
51
52 return self._repo(wire, create_new_repo)
52 return self._repo(wire, create_new_repo)
53
53
54 def _repo(self, wire, createfunc):
54 def _repo(self, wire, createfunc):
55 context = wire.get('context', None)
55 context = wire.get('context', None)
56 cache = wire.get('cache', True)
56 cache = wire.get('cache', True)
57 log.debug(
57 log.debug(
58 'GET %s@%s with cache:%s. Context: %s',
58 'GET %s@%s with cache:%s. Context: %s',
59 self.__class__.__name__, wire['path'], cache, context)
59 self.__class__.__name__, wire['path'], cache, context)
60
60
61 if context and cache:
61 if context and cache:
62 cache_key = (context, wire['path'])
62 cache_key = (context, wire['path'])
63 log.debug(
63 log.debug(
64 'FETCH %s@%s repo object from cache. Context: %s',
64 'FETCH %s@%s repo object from cache. Context: %s',
65 self.__class__.__name__, wire['path'], context)
65 self.__class__.__name__, wire['path'], context)
66 return self._cache.get(key=cache_key, createfunc=createfunc)
66 return self._cache.get(key=cache_key, createfunc=createfunc)
67 else:
67 else:
68 log.debug(
68 log.debug(
69 'INIT %s@%s repo object based on wire %s. Context: %s',
69 'INIT %s@%s repo object based on wire %s. Context: %s',
70 self.__class__.__name__, wire['path'], wire, context)
70 self.__class__.__name__, wire['path'], wire, context)
71 return createfunc()
71 return createfunc()
72
73
74 def obfuscate_qs(query_string):
75 if query_string is None:
76 return None
77
78 parsed = []
79 for k, v in urlparse.parse_qsl(query_string, keep_blank_values=True):
80 if k in ['auth_token', 'api_key']:
81 v = "*****"
82 parsed.append((k, v))
83
84 return '&'.join('{}{}'.format(
85 k, '={}'.format(v) if v else '') for k, v in parsed)
@@ -1,56 +1,70 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Special exception handling over the wire.
19 Special exception handling over the wire.
20
20
21 Since we cannot assume that our client is able to import our exception classes,
21 Since we cannot assume that our client is able to import our exception classes,
22 this module provides a "wrapping" mechanism to raise plain exceptions
22 this module provides a "wrapping" mechanism to raise plain exceptions
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
23 which contain an extra attribute `_vcs_kind` to allow a client to distinguish
24 different error conditions.
24 different error conditions.
25 """
25 """
26
26
27 import functools
27 import functools
28 from pyramid.httpexceptions import HTTPLocked
28
29
29
30
30 def _make_exception(kind, *args):
31 def _make_exception(kind, *args):
31 """
32 """
32 Prepares a base `Exception` instance to be sent over the wire.
33 Prepares a base `Exception` instance to be sent over the wire.
33
34
34 To give our caller a hint what this is about, it will attach an attribute
35 To give our caller a hint what this is about, it will attach an attribute
35 `_vcs_kind` to the exception.
36 `_vcs_kind` to the exception.
36 """
37 """
37 exc = Exception(*args)
38 exc = Exception(*args)
38 exc._vcs_kind = kind
39 exc._vcs_kind = kind
39 return exc
40 return exc
40
41
41
42
42 AbortException = functools.partial(_make_exception, 'abort')
43 AbortException = functools.partial(_make_exception, 'abort')
43
44
44 ArchiveException = functools.partial(_make_exception, 'archive')
45 ArchiveException = functools.partial(_make_exception, 'archive')
45
46
46 LookupException = functools.partial(_make_exception, 'lookup')
47 LookupException = functools.partial(_make_exception, 'lookup')
47
48
48 VcsException = functools.partial(_make_exception, 'error')
49 VcsException = functools.partial(_make_exception, 'error')
49
50
50 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
51 RepositoryLockedException = functools.partial(_make_exception, 'repo_locked')
51
52
52 RequirementException = functools.partial(_make_exception, 'requirement')
53 RequirementException = functools.partial(_make_exception, 'requirement')
53
54
54 UnhandledException = functools.partial(_make_exception, 'unhandled')
55 UnhandledException = functools.partial(_make_exception, 'unhandled')
55
56
56 URLError = functools.partial(_make_exception, 'url_error')
57 URLError = functools.partial(_make_exception, 'url_error')
58
59 SubrepoMergeException = functools.partial(_make_exception, 'subrepo_merge_error')
60
61
62 class HTTPRepoLocked(HTTPLocked):
63 """
64 Subclass of HTTPLocked response that allows to set the title and status
65 code via constructor arguments.
66 """
67 def __init__(self, title, status_code=None, **kwargs):
68 self.code = status_code or HTTPLocked.code
69 self.title = title
70 super(HTTPRepoLocked, self).__init__(**kwargs)
@@ -1,573 +1,580 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import urllib
23 import urllib
24 import urllib2
24 import urllib2
25 from functools import wraps
25 from functools import wraps
26
26
27 from dulwich import index, objects
27 from dulwich import index, objects
28 from dulwich.client import HttpGitClient, LocalGitClient
28 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.errors import (
29 from dulwich.errors import (
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 MissingCommitError, ObjectMissing, HangupException,
31 MissingCommitError, ObjectMissing, HangupException,
32 UnexpectedCommandError)
32 UnexpectedCommandError)
33 from dulwich.repo import Repo as DulwichRepo, Tag
33 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.server import update_server_info
34 from dulwich.server import update_server_info
35
35
36 from vcsserver import exceptions, settings, subprocessio
36 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver.utils import safe_str
37 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory
38 from vcsserver.base import RepoFactory, obfuscate_qs
39 from vcsserver.hgcompat import (
39 from vcsserver.hgcompat import (
40 hg_url, httpbasicauthhandler, httpdigestauthhandler)
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41
41
42
42
43 DIR_STAT = stat.S_IFDIR
43 DIR_STAT = stat.S_IFDIR
44 FILE_MODE = stat.S_IFMT
44 FILE_MODE = stat.S_IFMT
45 GIT_LINK = objects.S_IFGITLINK
45 GIT_LINK = objects.S_IFGITLINK
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 def reraise_safe_exceptions(func):
50 def reraise_safe_exceptions(func):
51 """Converts Dulwich exceptions to something neutral."""
51 """Converts Dulwich exceptions to something neutral."""
52 @wraps(func)
52 @wraps(func)
53 def wrapper(*args, **kwargs):
53 def wrapper(*args, **kwargs):
54 try:
54 try:
55 return func(*args, **kwargs)
55 return func(*args, **kwargs)
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 ObjectMissing) as e:
57 ObjectMissing) as e:
58 raise exceptions.LookupException(e.message)
58 raise exceptions.LookupException(e.message)
59 except (HangupException, UnexpectedCommandError) as e:
59 except (HangupException, UnexpectedCommandError) as e:
60 raise exceptions.VcsException(e.message)
60 raise exceptions.VcsException(e.message)
61 return wrapper
61 return wrapper
62
62
63
63
64 class Repo(DulwichRepo):
64 class Repo(DulwichRepo):
65 """
65 """
66 A wrapper for dulwich Repo class.
66 A wrapper for dulwich Repo class.
67
67
68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
68 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
69 "Too many open files" error. We need to close all opened file descriptors
69 "Too many open files" error. We need to close all opened file descriptors
70 once the repo object is destroyed.
70 once the repo object is destroyed.
71
71
72 TODO: mikhail: please check if we need this wrapper after updating dulwich
72 TODO: mikhail: please check if we need this wrapper after updating dulwich
73 to 0.12.0 +
73 to 0.12.0 +
74 """
74 """
75 def __del__(self):
75 def __del__(self):
76 if hasattr(self, 'object_store'):
76 if hasattr(self, 'object_store'):
77 self.close()
77 self.close()
78
78
79
79
80 class GitFactory(RepoFactory):
80 class GitFactory(RepoFactory):
81
81
82 def _create_repo(self, wire, create):
82 def _create_repo(self, wire, create):
83 repo_path = str_to_dulwich(wire['path'])
83 repo_path = str_to_dulwich(wire['path'])
84 return Repo(repo_path)
84 return Repo(repo_path)
85
85
86
86
87 class GitRemote(object):
87 class GitRemote(object):
88
88
89 def __init__(self, factory):
89 def __init__(self, factory):
90 self._factory = factory
90 self._factory = factory
91
91
92 self._bulk_methods = {
92 self._bulk_methods = {
93 "author": self.commit_attribute,
93 "author": self.commit_attribute,
94 "date": self.get_object_attrs,
94 "date": self.get_object_attrs,
95 "message": self.commit_attribute,
95 "message": self.commit_attribute,
96 "parents": self.commit_attribute,
96 "parents": self.commit_attribute,
97 "_commit": self.revision,
97 "_commit": self.revision,
98 }
98 }
99
99
100 def _assign_ref(self, wire, ref, commit_id):
100 def _assign_ref(self, wire, ref, commit_id):
101 repo = self._factory.repo(wire)
101 repo = self._factory.repo(wire)
102 repo[ref] = commit_id
102 repo[ref] = commit_id
103
103
104 @reraise_safe_exceptions
104 @reraise_safe_exceptions
105 def add_object(self, wire, content):
105 def add_object(self, wire, content):
106 repo = self._factory.repo(wire)
106 repo = self._factory.repo(wire)
107 blob = objects.Blob()
107 blob = objects.Blob()
108 blob.set_raw_string(content)
108 blob.set_raw_string(content)
109 repo.object_store.add_object(blob)
109 repo.object_store.add_object(blob)
110 return blob.id
110 return blob.id
111
111
112 @reraise_safe_exceptions
112 @reraise_safe_exceptions
113 def assert_correct_path(self, wire):
113 def assert_correct_path(self, wire):
114 try:
114 try:
115 self._factory.repo(wire)
115 self._factory.repo(wire)
116 except NotGitRepository as e:
116 except NotGitRepository as e:
117 # Exception can contain unicode which we convert
117 # Exception can contain unicode which we convert
118 raise exceptions.AbortException(repr(e))
118 raise exceptions.AbortException(repr(e))
119
119
120 @reraise_safe_exceptions
120 @reraise_safe_exceptions
121 def bare(self, wire):
121 def bare(self, wire):
122 repo = self._factory.repo(wire)
122 repo = self._factory.repo(wire)
123 return repo.bare
123 return repo.bare
124
124
125 @reraise_safe_exceptions
125 @reraise_safe_exceptions
126 def blob_as_pretty_string(self, wire, sha):
126 def blob_as_pretty_string(self, wire, sha):
127 repo = self._factory.repo(wire)
127 repo = self._factory.repo(wire)
128 return repo[sha].as_pretty_string()
128 return repo[sha].as_pretty_string()
129
129
130 @reraise_safe_exceptions
130 @reraise_safe_exceptions
131 def blob_raw_length(self, wire, sha):
131 def blob_raw_length(self, wire, sha):
132 repo = self._factory.repo(wire)
132 repo = self._factory.repo(wire)
133 blob = repo[sha]
133 blob = repo[sha]
134 return blob.raw_length()
134 return blob.raw_length()
135
135
136 @reraise_safe_exceptions
136 @reraise_safe_exceptions
137 def bulk_request(self, wire, rev, pre_load):
137 def bulk_request(self, wire, rev, pre_load):
138 result = {}
138 result = {}
139 for attr in pre_load:
139 for attr in pre_load:
140 try:
140 try:
141 method = self._bulk_methods[attr]
141 method = self._bulk_methods[attr]
142 args = [wire, rev]
142 args = [wire, rev]
143 if attr == "date":
143 if attr == "date":
144 args.extend(["commit_time", "commit_timezone"])
144 args.extend(["commit_time", "commit_timezone"])
145 elif attr in ["author", "message", "parents"]:
145 elif attr in ["author", "message", "parents"]:
146 args.append(attr)
146 args.append(attr)
147 result[attr] = method(*args)
147 result[attr] = method(*args)
148 except KeyError:
148 except KeyError:
149 raise exceptions.VcsException(
149 raise exceptions.VcsException(
150 "Unknown bulk attribute: %s" % attr)
150 "Unknown bulk attribute: %s" % attr)
151 return result
151 return result
152
152
153 def _build_opener(self, url):
153 def _build_opener(self, url):
154 handlers = []
154 handlers = []
155 url_obj = hg_url(url)
155 url_obj = url_parser(url)
156 _, authinfo = url_obj.authinfo()
156 _, authinfo = url_obj.authinfo()
157
157
158 if authinfo:
158 if authinfo:
159 # create a password manager
159 # create a password manager
160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
160 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
161 passmgr.add_password(*authinfo)
161 passmgr.add_password(*authinfo)
162
162
163 handlers.extend((httpbasicauthhandler(passmgr),
163 handlers.extend((httpbasicauthhandler(passmgr),
164 httpdigestauthhandler(passmgr)))
164 httpdigestauthhandler(passmgr)))
165
165
166 return urllib2.build_opener(*handlers)
166 return urllib2.build_opener(*handlers)
167
167
168 @reraise_safe_exceptions
168 @reraise_safe_exceptions
169 def check_url(self, url, config):
169 def check_url(self, url, config):
170 url_obj = hg_url(url)
170 url_obj = url_parser(url)
171 test_uri, _ = url_obj.authinfo()
171 test_uri, _ = url_obj.authinfo()
172 url_obj.passwd = '*****'
172 url_obj.passwd = '*****'
173 url_obj.query = obfuscate_qs(url_obj.query)
173 cleaned_uri = str(url_obj)
174 cleaned_uri = str(url_obj)
175 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
174
176
175 if not test_uri.endswith('info/refs'):
177 if not test_uri.endswith('info/refs'):
176 test_uri = test_uri.rstrip('/') + '/info/refs'
178 test_uri = test_uri.rstrip('/') + '/info/refs'
177
179
178 o = self._build_opener(url)
180 o = self._build_opener(url)
179 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
181 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
180
182
181 q = {"service": 'git-upload-pack'}
183 q = {"service": 'git-upload-pack'}
182 qs = '?%s' % urllib.urlencode(q)
184 qs = '?%s' % urllib.urlencode(q)
183 cu = "%s%s" % (test_uri, qs)
185 cu = "%s%s" % (test_uri, qs)
184 req = urllib2.Request(cu, None, {})
186 req = urllib2.Request(cu, None, {})
185
187
186 try:
188 try:
189 log.debug("Trying to open URL %s", cleaned_uri)
187 resp = o.open(req)
190 resp = o.open(req)
188 if resp.code != 200:
191 if resp.code != 200:
189 raise Exception('Return Code is not 200')
192 raise exceptions.URLError('Return Code is not 200')
190 except Exception as e:
193 except Exception as e:
194 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
191 # means it cannot be cloned
195 # means it cannot be cloned
192 raise urllib2.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
196 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
193
197
194 # now detect if it's proper git repo
198 # now detect if it's proper git repo
195 gitdata = resp.read()
199 gitdata = resp.read()
196 if 'service=git-upload-pack' in gitdata:
200 if 'service=git-upload-pack' in gitdata:
197 pass
201 pass
198 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
202 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
199 # old style git can return some other format !
203 # old style git can return some other format !
200 pass
204 pass
201 else:
205 else:
202 raise urllib2.URLError(
206 raise exceptions.URLError(
203 "url [%s] does not look like an git" % (cleaned_uri,))
207 "url [%s] does not look like an git" % (cleaned_uri,))
204
208
205 return True
209 return True
206
210
207 @reraise_safe_exceptions
211 @reraise_safe_exceptions
208 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
212 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
209 remote_refs = self.fetch(wire, url, apply_refs=False)
213 remote_refs = self.fetch(wire, url, apply_refs=False)
210 repo = self._factory.repo(wire)
214 repo = self._factory.repo(wire)
211 if isinstance(valid_refs, list):
215 if isinstance(valid_refs, list):
212 valid_refs = tuple(valid_refs)
216 valid_refs = tuple(valid_refs)
213
217
214 for k in remote_refs:
218 for k in remote_refs:
215 # only parse heads/tags and skip so called deferred tags
219 # only parse heads/tags and skip so called deferred tags
216 if k.startswith(valid_refs) and not k.endswith(deferred):
220 if k.startswith(valid_refs) and not k.endswith(deferred):
217 repo[k] = remote_refs[k]
221 repo[k] = remote_refs[k]
218
222
219 if update_after_clone:
223 if update_after_clone:
220 # we want to checkout HEAD
224 # we want to checkout HEAD
221 repo["HEAD"] = remote_refs["HEAD"]
225 repo["HEAD"] = remote_refs["HEAD"]
222 index.build_index_from_tree(repo.path, repo.index_path(),
226 index.build_index_from_tree(repo.path, repo.index_path(),
223 repo.object_store, repo["HEAD"].tree)
227 repo.object_store, repo["HEAD"].tree)
224
228
225 # TODO: this is quite complex, check if that can be simplified
229 # TODO: this is quite complex, check if that can be simplified
226 @reraise_safe_exceptions
230 @reraise_safe_exceptions
227 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
231 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
228 repo = self._factory.repo(wire)
232 repo = self._factory.repo(wire)
229 object_store = repo.object_store
233 object_store = repo.object_store
230
234
231 # Create tree and populates it with blobs
235 # Create tree and populates it with blobs
232 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
236 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
233
237
234 for node in updated:
238 for node in updated:
235 # Compute subdirs if needed
239 # Compute subdirs if needed
236 dirpath, nodename = vcspath.split(node['path'])
240 dirpath, nodename = vcspath.split(node['path'])
237 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
241 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
238 parent = commit_tree
242 parent = commit_tree
239 ancestors = [('', parent)]
243 ancestors = [('', parent)]
240
244
241 # Tries to dig for the deepest existing tree
245 # Tries to dig for the deepest existing tree
242 while dirnames:
246 while dirnames:
243 curdir = dirnames.pop(0)
247 curdir = dirnames.pop(0)
244 try:
248 try:
245 dir_id = parent[curdir][1]
249 dir_id = parent[curdir][1]
246 except KeyError:
250 except KeyError:
247 # put curdir back into dirnames and stops
251 # put curdir back into dirnames and stops
248 dirnames.insert(0, curdir)
252 dirnames.insert(0, curdir)
249 break
253 break
250 else:
254 else:
251 # If found, updates parent
255 # If found, updates parent
252 parent = repo[dir_id]
256 parent = repo[dir_id]
253 ancestors.append((curdir, parent))
257 ancestors.append((curdir, parent))
254 # Now parent is deepest existing tree and we need to create
258 # Now parent is deepest existing tree and we need to create
255 # subtrees for dirnames (in reverse order)
259 # subtrees for dirnames (in reverse order)
256 # [this only applies for nodes from added]
260 # [this only applies for nodes from added]
257 new_trees = []
261 new_trees = []
258
262
259 blob = objects.Blob.from_string(node['content'])
263 blob = objects.Blob.from_string(node['content'])
260
264
261 if dirnames:
265 if dirnames:
262 # If there are trees which should be created we need to build
266 # If there are trees which should be created we need to build
263 # them now (in reverse order)
267 # them now (in reverse order)
264 reversed_dirnames = list(reversed(dirnames))
268 reversed_dirnames = list(reversed(dirnames))
265 curtree = objects.Tree()
269 curtree = objects.Tree()
266 curtree[node['node_path']] = node['mode'], blob.id
270 curtree[node['node_path']] = node['mode'], blob.id
267 new_trees.append(curtree)
271 new_trees.append(curtree)
268 for dirname in reversed_dirnames[:-1]:
272 for dirname in reversed_dirnames[:-1]:
269 newtree = objects.Tree()
273 newtree = objects.Tree()
270 newtree[dirname] = (DIR_STAT, curtree.id)
274 newtree[dirname] = (DIR_STAT, curtree.id)
271 new_trees.append(newtree)
275 new_trees.append(newtree)
272 curtree = newtree
276 curtree = newtree
273 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
277 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
274 else:
278 else:
275 parent.add(
279 parent.add(
276 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
280 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
277
281
278 new_trees.append(parent)
282 new_trees.append(parent)
279 # Update ancestors
283 # Update ancestors
280 reversed_ancestors = reversed(
284 reversed_ancestors = reversed(
281 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
285 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
282 for parent, tree, path in reversed_ancestors:
286 for parent, tree, path in reversed_ancestors:
283 parent[path] = (DIR_STAT, tree.id)
287 parent[path] = (DIR_STAT, tree.id)
284 object_store.add_object(tree)
288 object_store.add_object(tree)
285
289
286 object_store.add_object(blob)
290 object_store.add_object(blob)
287 for tree in new_trees:
291 for tree in new_trees:
288 object_store.add_object(tree)
292 object_store.add_object(tree)
289
293
290 for node_path in removed:
294 for node_path in removed:
291 paths = node_path.split('/')
295 paths = node_path.split('/')
292 tree = commit_tree
296 tree = commit_tree
293 trees = [tree]
297 trees = [tree]
294 # Traverse deep into the forest...
298 # Traverse deep into the forest...
295 for path in paths:
299 for path in paths:
296 try:
300 try:
297 obj = repo[tree[path][1]]
301 obj = repo[tree[path][1]]
298 if isinstance(obj, objects.Tree):
302 if isinstance(obj, objects.Tree):
299 trees.append(obj)
303 trees.append(obj)
300 tree = obj
304 tree = obj
301 except KeyError:
305 except KeyError:
302 break
306 break
303 # Cut down the blob and all rotten trees on the way back...
307 # Cut down the blob and all rotten trees on the way back...
304 for path, tree in reversed(zip(paths, trees)):
308 for path, tree in reversed(zip(paths, trees)):
305 del tree[path]
309 del tree[path]
306 if tree:
310 if tree:
307 # This tree still has elements - don't remove it or any
311 # This tree still has elements - don't remove it or any
308 # of it's parents
312 # of it's parents
309 break
313 break
310
314
311 object_store.add_object(commit_tree)
315 object_store.add_object(commit_tree)
312
316
313 # Create commit
317 # Create commit
314 commit = objects.Commit()
318 commit = objects.Commit()
315 commit.tree = commit_tree.id
319 commit.tree = commit_tree.id
316 for k, v in commit_data.iteritems():
320 for k, v in commit_data.iteritems():
317 setattr(commit, k, v)
321 setattr(commit, k, v)
318 object_store.add_object(commit)
322 object_store.add_object(commit)
319
323
320 ref = 'refs/heads/%s' % branch
324 ref = 'refs/heads/%s' % branch
321 repo.refs[ref] = commit.id
325 repo.refs[ref] = commit.id
322
326
323 return commit.id
327 return commit.id
324
328
325 @reraise_safe_exceptions
329 @reraise_safe_exceptions
326 def fetch(self, wire, url, apply_refs=True, refs=None):
330 def fetch(self, wire, url, apply_refs=True, refs=None):
327 if url != 'default' and '://' not in url:
331 if url != 'default' and '://' not in url:
328 client = LocalGitClient(url)
332 client = LocalGitClient(url)
329 else:
333 else:
330 url_obj = hg_url(url)
334 url_obj = url_parser(url)
331 o = self._build_opener(url)
335 o = self._build_opener(url)
332 url, _ = url_obj.authinfo()
336 url, _ = url_obj.authinfo()
333 client = HttpGitClient(base_url=url, opener=o)
337 client = HttpGitClient(base_url=url, opener=o)
334 repo = self._factory.repo(wire)
338 repo = self._factory.repo(wire)
335
339
336 determine_wants = repo.object_store.determine_wants_all
340 determine_wants = repo.object_store.determine_wants_all
337 if refs:
341 if refs:
338 def determine_wants_requested(references):
342 def determine_wants_requested(references):
339 return [references[r] for r in references if r in refs]
343 return [references[r] for r in references if r in refs]
340 determine_wants = determine_wants_requested
344 determine_wants = determine_wants_requested
341
345
342 try:
346 try:
343 remote_refs = client.fetch(
347 remote_refs = client.fetch(
344 path=url, target=repo, determine_wants=determine_wants)
348 path=url, target=repo, determine_wants=determine_wants)
345 except NotGitRepository:
349 except NotGitRepository:
346 log.warning(
350 log.warning(
347 'Trying to fetch from "%s" failed, not a Git repository.', url)
351 'Trying to fetch from "%s" failed, not a Git repository.', url)
348 raise exceptions.AbortException()
352 raise exceptions.AbortException()
349
353
350 # mikhail: client.fetch() returns all the remote refs, but fetches only
354 # mikhail: client.fetch() returns all the remote refs, but fetches only
351 # refs filtered by `determine_wants` function. We need to filter result
355 # refs filtered by `determine_wants` function. We need to filter result
352 # as well
356 # as well
353 if refs:
357 if refs:
354 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
358 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
355
359
356 if apply_refs:
360 if apply_refs:
357 # TODO: johbo: Needs proper test coverage with a git repository
361 # TODO: johbo: Needs proper test coverage with a git repository
358 # that contains a tag object, so that we would end up with
362 # that contains a tag object, so that we would end up with
359 # a peeled ref at this point.
363 # a peeled ref at this point.
360 PEELED_REF_MARKER = '^{}'
364 PEELED_REF_MARKER = '^{}'
361 for k in remote_refs:
365 for k in remote_refs:
362 if k.endswith(PEELED_REF_MARKER):
366 if k.endswith(PEELED_REF_MARKER):
363 log.info("Skipping peeled reference %s", k)
367 log.info("Skipping peeled reference %s", k)
364 continue
368 continue
365 repo[k] = remote_refs[k]
369 repo[k] = remote_refs[k]
366
370
367 if refs:
371 if refs:
368 # mikhail: explicitly set the head to the last ref.
372 # mikhail: explicitly set the head to the last ref.
369 repo['HEAD'] = remote_refs[refs[-1]]
373 repo['HEAD'] = remote_refs[refs[-1]]
370
374
371 # TODO: mikhail: should we return remote_refs here to be
375 # TODO: mikhail: should we return remote_refs here to be
372 # consistent?
376 # consistent?
373 else:
377 else:
374 return remote_refs
378 return remote_refs
375
379
376 @reraise_safe_exceptions
380 @reraise_safe_exceptions
377 def get_remote_refs(self, wire, url):
381 def get_remote_refs(self, wire, url):
378 repo = Repo(url)
382 repo = Repo(url)
379 return repo.get_refs()
383 return repo.get_refs()
380
384
381 @reraise_safe_exceptions
385 @reraise_safe_exceptions
382 def get_description(self, wire):
386 def get_description(self, wire):
383 repo = self._factory.repo(wire)
387 repo = self._factory.repo(wire)
384 return repo.get_description()
388 return repo.get_description()
385
389
386 @reraise_safe_exceptions
390 @reraise_safe_exceptions
387 def get_file_history(self, wire, file_path, commit_id, limit):
391 def get_file_history(self, wire, file_path, commit_id, limit):
388 repo = self._factory.repo(wire)
392 repo = self._factory.repo(wire)
389 include = [commit_id]
393 include = [commit_id]
390 paths = [file_path]
394 paths = [file_path]
391
395
392 walker = repo.get_walker(include, paths=paths, max_entries=limit)
396 walker = repo.get_walker(include, paths=paths, max_entries=limit)
393 return [x.commit.id for x in walker]
397 return [x.commit.id for x in walker]
394
398
395 @reraise_safe_exceptions
399 @reraise_safe_exceptions
396 def get_missing_revs(self, wire, rev1, rev2, path2):
400 def get_missing_revs(self, wire, rev1, rev2, path2):
397 repo = self._factory.repo(wire)
401 repo = self._factory.repo(wire)
398 LocalGitClient(thin_packs=False).fetch(path2, repo)
402 LocalGitClient(thin_packs=False).fetch(path2, repo)
399
403
400 wire_remote = wire.copy()
404 wire_remote = wire.copy()
401 wire_remote['path'] = path2
405 wire_remote['path'] = path2
402 repo_remote = self._factory.repo(wire_remote)
406 repo_remote = self._factory.repo(wire_remote)
403 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
407 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
404
408
405 revs = [
409 revs = [
406 x.commit.id
410 x.commit.id
407 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
411 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
408 return revs
412 return revs
409
413
410 @reraise_safe_exceptions
414 @reraise_safe_exceptions
411 def get_object(self, wire, sha):
415 def get_object(self, wire, sha):
412 repo = self._factory.repo(wire)
416 repo = self._factory.repo(wire)
413 obj = repo.get_object(sha)
417 obj = repo.get_object(sha)
414 commit_id = obj.id
418 commit_id = obj.id
415
419
416 if isinstance(obj, Tag):
420 if isinstance(obj, Tag):
417 commit_id = obj.object[1]
421 commit_id = obj.object[1]
418
422
419 return {
423 return {
420 'id': obj.id,
424 'id': obj.id,
421 'type': obj.type_name,
425 'type': obj.type_name,
422 'commit_id': commit_id
426 'commit_id': commit_id
423 }
427 }
424
428
425 @reraise_safe_exceptions
429 @reraise_safe_exceptions
426 def get_object_attrs(self, wire, sha, *attrs):
430 def get_object_attrs(self, wire, sha, *attrs):
427 repo = self._factory.repo(wire)
431 repo = self._factory.repo(wire)
428 obj = repo.get_object(sha)
432 obj = repo.get_object(sha)
429 return list(getattr(obj, a) for a in attrs)
433 return list(getattr(obj, a) for a in attrs)
430
434
431 @reraise_safe_exceptions
435 @reraise_safe_exceptions
432 def get_refs(self, wire):
436 def get_refs(self, wire):
433 repo = self._factory.repo(wire)
437 repo = self._factory.repo(wire)
434 result = {}
438 result = {}
435 for ref, sha in repo.refs.as_dict().items():
439 for ref, sha in repo.refs.as_dict().items():
436 peeled_sha = repo.get_peeled(ref)
440 peeled_sha = repo.get_peeled(ref)
437 result[ref] = peeled_sha
441 result[ref] = peeled_sha
438 return result
442 return result
439
443
440 @reraise_safe_exceptions
444 @reraise_safe_exceptions
441 def get_refs_path(self, wire):
445 def get_refs_path(self, wire):
442 repo = self._factory.repo(wire)
446 repo = self._factory.repo(wire)
443 return repo.refs.path
447 return repo.refs.path
444
448
445 @reraise_safe_exceptions
449 @reraise_safe_exceptions
446 def head(self, wire):
450 def head(self, wire):
447 repo = self._factory.repo(wire)
451 repo = self._factory.repo(wire)
448 return repo.head()
452 return repo.head()
449
453
450 @reraise_safe_exceptions
454 @reraise_safe_exceptions
451 def init(self, wire):
455 def init(self, wire):
452 repo_path = str_to_dulwich(wire['path'])
456 repo_path = str_to_dulwich(wire['path'])
453 self.repo = Repo.init(repo_path)
457 self.repo = Repo.init(repo_path)
454
458
455 @reraise_safe_exceptions
459 @reraise_safe_exceptions
456 def init_bare(self, wire):
460 def init_bare(self, wire):
457 repo_path = str_to_dulwich(wire['path'])
461 repo_path = str_to_dulwich(wire['path'])
458 self.repo = Repo.init_bare(repo_path)
462 self.repo = Repo.init_bare(repo_path)
459
463
460 @reraise_safe_exceptions
464 @reraise_safe_exceptions
461 def revision(self, wire, rev):
465 def revision(self, wire, rev):
462 repo = self._factory.repo(wire)
466 repo = self._factory.repo(wire)
463 obj = repo[rev]
467 obj = repo[rev]
464 obj_data = {
468 obj_data = {
465 'id': obj.id,
469 'id': obj.id,
466 }
470 }
467 try:
471 try:
468 obj_data['tree'] = obj.tree
472 obj_data['tree'] = obj.tree
469 except AttributeError:
473 except AttributeError:
470 pass
474 pass
471 return obj_data
475 return obj_data
472
476
473 @reraise_safe_exceptions
477 @reraise_safe_exceptions
474 def commit_attribute(self, wire, rev, attr):
478 def commit_attribute(self, wire, rev, attr):
475 repo = self._factory.repo(wire)
479 repo = self._factory.repo(wire)
476 obj = repo[rev]
480 obj = repo[rev]
477 return getattr(obj, attr)
481 return getattr(obj, attr)
478
482
479 @reraise_safe_exceptions
483 @reraise_safe_exceptions
480 def set_refs(self, wire, key, value):
484 def set_refs(self, wire, key, value):
481 repo = self._factory.repo(wire)
485 repo = self._factory.repo(wire)
482 repo.refs[key] = value
486 repo.refs[key] = value
483
487
484 @reraise_safe_exceptions
488 @reraise_safe_exceptions
485 def remove_ref(self, wire, key):
489 def remove_ref(self, wire, key):
486 repo = self._factory.repo(wire)
490 repo = self._factory.repo(wire)
487 del repo.refs[key]
491 del repo.refs[key]
488
492
489 @reraise_safe_exceptions
493 @reraise_safe_exceptions
490 def tree_changes(self, wire, source_id, target_id):
494 def tree_changes(self, wire, source_id, target_id):
491 repo = self._factory.repo(wire)
495 repo = self._factory.repo(wire)
492 source = repo[source_id].tree if source_id else None
496 source = repo[source_id].tree if source_id else None
493 target = repo[target_id].tree
497 target = repo[target_id].tree
494 result = repo.object_store.tree_changes(source, target)
498 result = repo.object_store.tree_changes(source, target)
495 return list(result)
499 return list(result)
496
500
497 @reraise_safe_exceptions
501 @reraise_safe_exceptions
498 def tree_items(self, wire, tree_id):
502 def tree_items(self, wire, tree_id):
499 repo = self._factory.repo(wire)
503 repo = self._factory.repo(wire)
500 tree = repo[tree_id]
504 tree = repo[tree_id]
501
505
502 result = []
506 result = []
503 for item in tree.iteritems():
507 for item in tree.iteritems():
504 item_sha = item.sha
508 item_sha = item.sha
505 item_mode = item.mode
509 item_mode = item.mode
506
510
507 if FILE_MODE(item_mode) == GIT_LINK:
511 if FILE_MODE(item_mode) == GIT_LINK:
508 item_type = "link"
512 item_type = "link"
509 else:
513 else:
510 item_type = repo[item_sha].type_name
514 item_type = repo[item_sha].type_name
511
515
512 result.append((item.path, item_mode, item_sha, item_type))
516 result.append((item.path, item_mode, item_sha, item_type))
513 return result
517 return result
514
518
515 @reraise_safe_exceptions
519 @reraise_safe_exceptions
516 def update_server_info(self, wire):
520 def update_server_info(self, wire):
517 repo = self._factory.repo(wire)
521 repo = self._factory.repo(wire)
518 update_server_info(repo)
522 update_server_info(repo)
519
523
520 @reraise_safe_exceptions
524 @reraise_safe_exceptions
521 def discover_git_version(self):
525 def discover_git_version(self):
522 stdout, _ = self.run_git_command(
526 stdout, _ = self.run_git_command(
523 {}, ['--version'], _bare=True, _safe=True)
527 {}, ['--version'], _bare=True, _safe=True)
524 return stdout
528 prefix = 'git version'
529 if stdout.startswith(prefix):
530 stdout = stdout[len(prefix):]
531 return stdout.strip()
525
532
526 @reraise_safe_exceptions
533 @reraise_safe_exceptions
527 def run_git_command(self, wire, cmd, **opts):
534 def run_git_command(self, wire, cmd, **opts):
528 path = wire.get('path', None)
535 path = wire.get('path', None)
529
536
530 if path and os.path.isdir(path):
537 if path and os.path.isdir(path):
531 opts['cwd'] = path
538 opts['cwd'] = path
532
539
533 if '_bare' in opts:
540 if '_bare' in opts:
534 _copts = []
541 _copts = []
535 del opts['_bare']
542 del opts['_bare']
536 else:
543 else:
537 _copts = ['-c', 'core.quotepath=false', ]
544 _copts = ['-c', 'core.quotepath=false', ]
538 safe_call = False
545 safe_call = False
539 if '_safe' in opts:
546 if '_safe' in opts:
540 # no exc on failure
547 # no exc on failure
541 del opts['_safe']
548 del opts['_safe']
542 safe_call = True
549 safe_call = True
543
550
544 gitenv = os.environ.copy()
551 gitenv = os.environ.copy()
545 gitenv.update(opts.pop('extra_env', {}))
552 gitenv.update(opts.pop('extra_env', {}))
546 # need to clean fix GIT_DIR !
553 # need to clean fix GIT_DIR !
547 if 'GIT_DIR' in gitenv:
554 if 'GIT_DIR' in gitenv:
548 del gitenv['GIT_DIR']
555 del gitenv['GIT_DIR']
549 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
556 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
550
557
551 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
558 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
552
559
553 try:
560 try:
554 _opts = {'env': gitenv, 'shell': False}
561 _opts = {'env': gitenv, 'shell': False}
555 _opts.update(opts)
562 _opts.update(opts)
556 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
563 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
557
564
558 return ''.join(p), ''.join(p.error)
565 return ''.join(p), ''.join(p.error)
559 except (EnvironmentError, OSError) as err:
566 except (EnvironmentError, OSError) as err:
560 tb_err = ("Couldn't run git command (%s).\n"
567 tb_err = ("Couldn't run git command (%s).\n"
561 "Original error was:%s\n" % (cmd, err))
568 "Original error was:%s\n" % (cmd, err))
562 log.exception(tb_err)
569 log.exception(tb_err)
563 if safe_call:
570 if safe_call:
564 return '', err
571 return '', err
565 else:
572 else:
566 raise exceptions.VcsException(tb_err)
573 raise exceptions.VcsException(tb_err)
567
574
568
575
569 def str_to_dulwich(value):
576 def str_to_dulwich(value):
570 """
577 """
571 Dulwich 0.10.1a requires `unicode` objects to be passed in.
578 Dulwich 0.10.1a requires `unicode` objects to be passed in.
572 """
579 """
573 return value.decode(settings.WIRE_ENCODING)
580 return value.decode(settings.WIRE_ENCODING)
@@ -1,707 +1,723 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import sys
21 import sys
22 import urllib
22 import urllib
23 import urllib2
23 import urllib2
24
24
25 from hgext import largefiles, rebase
25 from hgext import largefiles, rebase
26 from hgext.strip import strip as hgext_strip
26 from hgext.strip import strip as hgext_strip
27 from mercurial import commands
27 from mercurial import commands
28 from mercurial import unionrepo
28 from mercurial import unionrepo
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory
31 from vcsserver.base import RepoFactory, obfuscate_qs
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex, hg_url,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 httpbasicauthhandler, httpdigestauthhandler, httppeer, localrepository,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 match, memctx, exchange, memfilectx, nullrev, patch, peer, revrange, ui,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 Abort, LookupError, RepoError, RepoLookupError, InterventionRequired,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
37 RequirementError)
37 InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 # force mercurial to only use 1 thread, otherwise it may try to set a
56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # signal in a non-main thread, thus generating a ValueError.
57 # signal in a non-main thread, thus generating a ValueError.
58 baseui.setconfig('worker', 'numcpus', 1)
58 baseui.setconfig('worker', 'numcpus', 1)
59
59
60 # If there is no config for the largefiles extension, we explicitly disable
60 # If there is no config for the largefiles extension, we explicitly disable
61 # it here. This overrides settings from repositories hgrc file. Recent
61 # it here. This overrides settings from repositories hgrc file. Recent
62 # mercurial versions enable largefiles in hgrc on clone from largefile
62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # repo.
63 # repo.
64 if not baseui.hasconfig('extensions', 'largefiles'):
64 if not baseui.hasconfig('extensions', 'largefiles'):
65 log.debug('Explicitly disable largefiles extension for repo.')
65 log.debug('Explicitly disable largefiles extension for repo.')
66 baseui.setconfig('extensions', 'largefiles', '!')
66 baseui.setconfig('extensions', 'largefiles', '!')
67
67
68 return baseui
68 return baseui
69
69
70
70
71 def reraise_safe_exceptions(func):
71 def reraise_safe_exceptions(func):
72 """Decorator for converting mercurial exceptions to something neutral."""
72 """Decorator for converting mercurial exceptions to something neutral."""
73 def wrapper(*args, **kwargs):
73 def wrapper(*args, **kwargs):
74 try:
74 try:
75 return func(*args, **kwargs)
75 return func(*args, **kwargs)
76 except (Abort, InterventionRequired):
76 except (Abort, InterventionRequired):
77 raise_from_original(exceptions.AbortException)
77 raise_from_original(exceptions.AbortException)
78 except RepoLookupError:
78 except RepoLookupError:
79 raise_from_original(exceptions.LookupException)
79 raise_from_original(exceptions.LookupException)
80 except RequirementError:
80 except RequirementError:
81 raise_from_original(exceptions.RequirementException)
81 raise_from_original(exceptions.RequirementException)
82 except RepoError:
82 except RepoError:
83 raise_from_original(exceptions.VcsException)
83 raise_from_original(exceptions.VcsException)
84 except LookupError:
84 except LookupError:
85 raise_from_original(exceptions.LookupException)
85 raise_from_original(exceptions.LookupException)
86 except Exception as e:
86 except Exception as e:
87 if not hasattr(e, '_vcs_kind'):
87 if not hasattr(e, '_vcs_kind'):
88 log.exception("Unhandled exception in hg remote call")
88 log.exception("Unhandled exception in hg remote call")
89 raise_from_original(exceptions.UnhandledException)
89 raise_from_original(exceptions.UnhandledException)
90 raise
90 raise
91 return wrapper
91 return wrapper
92
92
93
93
94 def raise_from_original(new_type):
94 def raise_from_original(new_type):
95 """
95 """
96 Raise a new exception type with original args and traceback.
96 Raise a new exception type with original args and traceback.
97 """
97 """
98 _, original, traceback = sys.exc_info()
98 _, original, traceback = sys.exc_info()
99 try:
99 try:
100 raise new_type(*original.args), None, traceback
100 raise new_type(*original.args), None, traceback
101 finally:
101 finally:
102 del traceback
102 del traceback
103
103
104
104
105 class MercurialFactory(RepoFactory):
105 class MercurialFactory(RepoFactory):
106
106
107 def _create_config(self, config, hooks=True):
107 def _create_config(self, config, hooks=True):
108 if not hooks:
108 if not hooks:
109 hooks_to_clean = frozenset((
109 hooks_to_clean = frozenset((
110 'changegroup.repo_size', 'preoutgoing.pre_pull',
110 'changegroup.repo_size', 'preoutgoing.pre_pull',
111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
111 'outgoing.pull_logger', 'prechangegroup.pre_push'))
112 new_config = []
112 new_config = []
113 for section, option, value in config:
113 for section, option, value in config:
114 if section == 'hooks' and option in hooks_to_clean:
114 if section == 'hooks' and option in hooks_to_clean:
115 continue
115 continue
116 new_config.append((section, option, value))
116 new_config.append((section, option, value))
117 config = new_config
117 config = new_config
118
118
119 baseui = make_ui_from_config(config)
119 baseui = make_ui_from_config(config)
120 return baseui
120 return baseui
121
121
122 def _create_repo(self, wire, create):
122 def _create_repo(self, wire, create):
123 baseui = self._create_config(wire["config"])
123 baseui = self._create_config(wire["config"])
124 return localrepository(baseui, wire["path"], create)
124 return localrepository(baseui, wire["path"], create)
125
125
126
126
127 class HgRemote(object):
127 class HgRemote(object):
128
128
129 def __init__(self, factory):
129 def __init__(self, factory):
130 self._factory = factory
130 self._factory = factory
131
131
132 self._bulk_methods = {
132 self._bulk_methods = {
133 "affected_files": self.ctx_files,
133 "affected_files": self.ctx_files,
134 "author": self.ctx_user,
134 "author": self.ctx_user,
135 "branch": self.ctx_branch,
135 "branch": self.ctx_branch,
136 "children": self.ctx_children,
136 "children": self.ctx_children,
137 "date": self.ctx_date,
137 "date": self.ctx_date,
138 "message": self.ctx_description,
138 "message": self.ctx_description,
139 "parents": self.ctx_parents,
139 "parents": self.ctx_parents,
140 "status": self.ctx_status,
140 "status": self.ctx_status,
141 "_file_paths": self.ctx_list,
141 "_file_paths": self.ctx_list,
142 }
142 }
143
143
144 @reraise_safe_exceptions
144 @reraise_safe_exceptions
145 def discover_hg_version(self):
146 from mercurial import util
147 return util.version()
148
149 @reraise_safe_exceptions
145 def archive_repo(self, archive_path, mtime, file_info, kind):
150 def archive_repo(self, archive_path, mtime, file_info, kind):
146 if kind == "tgz":
151 if kind == "tgz":
147 archiver = archival.tarit(archive_path, mtime, "gz")
152 archiver = archival.tarit(archive_path, mtime, "gz")
148 elif kind == "tbz2":
153 elif kind == "tbz2":
149 archiver = archival.tarit(archive_path, mtime, "bz2")
154 archiver = archival.tarit(archive_path, mtime, "bz2")
150 elif kind == 'zip':
155 elif kind == 'zip':
151 archiver = archival.zipit(archive_path, mtime)
156 archiver = archival.zipit(archive_path, mtime)
152 else:
157 else:
153 raise exceptions.ArchiveException(
158 raise exceptions.ArchiveException(
154 'Remote does not support: "%s".' % kind)
159 'Remote does not support: "%s".' % kind)
155
160
156 for f_path, f_mode, f_is_link, f_content in file_info:
161 for f_path, f_mode, f_is_link, f_content in file_info:
157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
162 archiver.addfile(f_path, f_mode, f_is_link, f_content)
158 archiver.done()
163 archiver.done()
159
164
160 @reraise_safe_exceptions
165 @reraise_safe_exceptions
161 def bookmarks(self, wire):
166 def bookmarks(self, wire):
162 repo = self._factory.repo(wire)
167 repo = self._factory.repo(wire)
163 return dict(repo._bookmarks)
168 return dict(repo._bookmarks)
164
169
165 @reraise_safe_exceptions
170 @reraise_safe_exceptions
166 def branches(self, wire, normal, closed):
171 def branches(self, wire, normal, closed):
167 repo = self._factory.repo(wire)
172 repo = self._factory.repo(wire)
168 iter_branches = repo.branchmap().iterbranches()
173 iter_branches = repo.branchmap().iterbranches()
169 bt = {}
174 bt = {}
170 for branch_name, _heads, tip, is_closed in iter_branches:
175 for branch_name, _heads, tip, is_closed in iter_branches:
171 if normal and not is_closed:
176 if normal and not is_closed:
172 bt[branch_name] = tip
177 bt[branch_name] = tip
173 if closed and is_closed:
178 if closed and is_closed:
174 bt[branch_name] = tip
179 bt[branch_name] = tip
175
180
176 return bt
181 return bt
177
182
178 @reraise_safe_exceptions
183 @reraise_safe_exceptions
179 def bulk_request(self, wire, rev, pre_load):
184 def bulk_request(self, wire, rev, pre_load):
180 result = {}
185 result = {}
181 for attr in pre_load:
186 for attr in pre_load:
182 try:
187 try:
183 method = self._bulk_methods[attr]
188 method = self._bulk_methods[attr]
184 result[attr] = method(wire, rev)
189 result[attr] = method(wire, rev)
185 except KeyError:
190 except KeyError:
186 raise exceptions.VcsException(
191 raise exceptions.VcsException(
187 'Unknown bulk attribute: "%s"' % attr)
192 'Unknown bulk attribute: "%s"' % attr)
188 return result
193 return result
189
194
190 @reraise_safe_exceptions
195 @reraise_safe_exceptions
191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
196 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
197 baseui = self._factory._create_config(wire["config"], hooks=hooks)
193 clone(baseui, source, dest, noupdate=not update_after_clone)
198 clone(baseui, source, dest, noupdate=not update_after_clone)
194
199
195 @reraise_safe_exceptions
200 @reraise_safe_exceptions
196 def commitctx(
201 def commitctx(
197 self, wire, message, parents, commit_time, commit_timezone,
202 self, wire, message, parents, commit_time, commit_timezone,
198 user, files, extra, removed, updated):
203 user, files, extra, removed, updated):
199
204
200 def _filectxfn(_repo, memctx, path):
205 def _filectxfn(_repo, memctx, path):
201 """
206 """
202 Marks given path as added/changed/removed in a given _repo. This is
207 Marks given path as added/changed/removed in a given _repo. This is
203 for internal mercurial commit function.
208 for internal mercurial commit function.
204 """
209 """
205
210
206 # check if this path is removed
211 # check if this path is removed
207 if path in removed:
212 if path in removed:
208 # returning None is a way to mark node for removal
213 # returning None is a way to mark node for removal
209 return None
214 return None
210
215
211 # check if this path is added
216 # check if this path is added
212 for node in updated:
217 for node in updated:
213 if node['path'] == path:
218 if node['path'] == path:
214 return memfilectx(
219 return memfilectx(
215 _repo,
220 _repo,
216 path=node['path'],
221 path=node['path'],
217 data=node['content'],
222 data=node['content'],
218 islink=False,
223 islink=False,
219 isexec=bool(node['mode'] & stat.S_IXUSR),
224 isexec=bool(node['mode'] & stat.S_IXUSR),
220 copied=False,
225 copied=False,
221 memctx=memctx)
226 memctx=memctx)
222
227
223 raise exceptions.AbortException(
228 raise exceptions.AbortException(
224 "Given path haven't been marked as added, "
229 "Given path haven't been marked as added, "
225 "changed or removed (%s)" % path)
230 "changed or removed (%s)" % path)
226
231
227 repo = self._factory.repo(wire)
232 repo = self._factory.repo(wire)
228
233
229 commit_ctx = memctx(
234 commit_ctx = memctx(
230 repo=repo,
235 repo=repo,
231 parents=parents,
236 parents=parents,
232 text=message,
237 text=message,
233 files=files,
238 files=files,
234 filectxfn=_filectxfn,
239 filectxfn=_filectxfn,
235 user=user,
240 user=user,
236 date=(commit_time, commit_timezone),
241 date=(commit_time, commit_timezone),
237 extra=extra)
242 extra=extra)
238
243
239 n = repo.commitctx(commit_ctx)
244 n = repo.commitctx(commit_ctx)
240 new_id = hex(n)
245 new_id = hex(n)
241
246
242 return new_id
247 return new_id
243
248
244 @reraise_safe_exceptions
249 @reraise_safe_exceptions
245 def ctx_branch(self, wire, revision):
250 def ctx_branch(self, wire, revision):
246 repo = self._factory.repo(wire)
251 repo = self._factory.repo(wire)
247 ctx = repo[revision]
252 ctx = repo[revision]
248 return ctx.branch()
253 return ctx.branch()
249
254
250 @reraise_safe_exceptions
255 @reraise_safe_exceptions
251 def ctx_children(self, wire, revision):
256 def ctx_children(self, wire, revision):
252 repo = self._factory.repo(wire)
257 repo = self._factory.repo(wire)
253 ctx = repo[revision]
258 ctx = repo[revision]
254 return [child.rev() for child in ctx.children()]
259 return [child.rev() for child in ctx.children()]
255
260
256 @reraise_safe_exceptions
261 @reraise_safe_exceptions
257 def ctx_date(self, wire, revision):
262 def ctx_date(self, wire, revision):
258 repo = self._factory.repo(wire)
263 repo = self._factory.repo(wire)
259 ctx = repo[revision]
264 ctx = repo[revision]
260 return ctx.date()
265 return ctx.date()
261
266
262 @reraise_safe_exceptions
267 @reraise_safe_exceptions
263 def ctx_description(self, wire, revision):
268 def ctx_description(self, wire, revision):
264 repo = self._factory.repo(wire)
269 repo = self._factory.repo(wire)
265 ctx = repo[revision]
270 ctx = repo[revision]
266 return ctx.description()
271 return ctx.description()
267
272
268 @reraise_safe_exceptions
273 @reraise_safe_exceptions
269 def ctx_diff(
274 def ctx_diff(
270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
275 self, wire, revision, git=True, ignore_whitespace=True, context=3):
271 repo = self._factory.repo(wire)
276 repo = self._factory.repo(wire)
272 ctx = repo[revision]
277 ctx = repo[revision]
273 result = ctx.diff(
278 result = ctx.diff(
274 git=git, ignore_whitespace=ignore_whitespace, context=context)
279 git=git, ignore_whitespace=ignore_whitespace, context=context)
275 return list(result)
280 return list(result)
276
281
277 @reraise_safe_exceptions
282 @reraise_safe_exceptions
278 def ctx_files(self, wire, revision):
283 def ctx_files(self, wire, revision):
279 repo = self._factory.repo(wire)
284 repo = self._factory.repo(wire)
280 ctx = repo[revision]
285 ctx = repo[revision]
281 return ctx.files()
286 return ctx.files()
282
287
283 @reraise_safe_exceptions
288 @reraise_safe_exceptions
284 def ctx_list(self, path, revision):
289 def ctx_list(self, path, revision):
285 repo = self._factory.repo(path)
290 repo = self._factory.repo(path)
286 ctx = repo[revision]
291 ctx = repo[revision]
287 return list(ctx)
292 return list(ctx)
288
293
289 @reraise_safe_exceptions
294 @reraise_safe_exceptions
290 def ctx_parents(self, wire, revision):
295 def ctx_parents(self, wire, revision):
291 repo = self._factory.repo(wire)
296 repo = self._factory.repo(wire)
292 ctx = repo[revision]
297 ctx = repo[revision]
293 return [parent.rev() for parent in ctx.parents()]
298 return [parent.rev() for parent in ctx.parents()]
294
299
295 @reraise_safe_exceptions
300 @reraise_safe_exceptions
296 def ctx_substate(self, wire, revision):
301 def ctx_substate(self, wire, revision):
297 repo = self._factory.repo(wire)
302 repo = self._factory.repo(wire)
298 ctx = repo[revision]
303 ctx = repo[revision]
299 return ctx.substate
304 return ctx.substate
300
305
301 @reraise_safe_exceptions
306 @reraise_safe_exceptions
302 def ctx_status(self, wire, revision):
307 def ctx_status(self, wire, revision):
303 repo = self._factory.repo(wire)
308 repo = self._factory.repo(wire)
304 ctx = repo[revision]
309 ctx = repo[revision]
305 status = repo[ctx.p1().node()].status(other=ctx.node())
310 status = repo[ctx.p1().node()].status(other=ctx.node())
306 # object of status (odd, custom named tuple in mercurial) is not
311 # object of status (odd, custom named tuple in mercurial) is not
307 # correctly serializable via Pyro, we make it a list, as the underling
312 # correctly serializable via Pyro, we make it a list, as the underling
308 # API expects this to be a list
313 # API expects this to be a list
309 return list(status)
314 return list(status)
310
315
311 @reraise_safe_exceptions
316 @reraise_safe_exceptions
312 def ctx_user(self, wire, revision):
317 def ctx_user(self, wire, revision):
313 repo = self._factory.repo(wire)
318 repo = self._factory.repo(wire)
314 ctx = repo[revision]
319 ctx = repo[revision]
315 return ctx.user()
320 return ctx.user()
316
321
317 @reraise_safe_exceptions
322 @reraise_safe_exceptions
318 def check_url(self, url, config):
323 def check_url(self, url, config):
319 log.info("Checking URL for remote cloning/import: %s", url)
320 _proto = None
324 _proto = None
321 if '+' in url[:url.find('://')]:
325 if '+' in url[:url.find('://')]:
322 _proto = url[0:url.find('+')]
326 _proto = url[0:url.find('+')]
323 url = url[url.find('+') + 1:]
327 url = url[url.find('+') + 1:]
324 handlers = []
328 handlers = []
325 url_obj = hg_url(url)
329 url_obj = url_parser(url)
326 test_uri, authinfo = url_obj.authinfo()
330 test_uri, authinfo = url_obj.authinfo()
327 url_obj.passwd = '*****'
331 url_obj.passwd = '*****'
332 url_obj.query = obfuscate_qs(url_obj.query)
333
328 cleaned_uri = str(url_obj)
334 cleaned_uri = str(url_obj)
335 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
329
336
330 if authinfo:
337 if authinfo:
331 # create a password manager
338 # create a password manager
332 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
339 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
333 passmgr.add_password(*authinfo)
340 passmgr.add_password(*authinfo)
334
341
335 handlers.extend((httpbasicauthhandler(passmgr),
342 handlers.extend((httpbasicauthhandler(passmgr),
336 httpdigestauthhandler(passmgr)))
343 httpdigestauthhandler(passmgr)))
337
344
338 o = urllib2.build_opener(*handlers)
345 o = urllib2.build_opener(*handlers)
339 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
346 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
340 ('Accept', 'application/mercurial-0.1')]
347 ('Accept', 'application/mercurial-0.1')]
341
348
342 q = {"cmd": 'between'}
349 q = {"cmd": 'between'}
343 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
350 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
344 qs = '?%s' % urllib.urlencode(q)
351 qs = '?%s' % urllib.urlencode(q)
345 cu = "%s%s" % (test_uri, qs)
352 cu = "%s%s" % (test_uri, qs)
346 req = urllib2.Request(cu, None, {})
353 req = urllib2.Request(cu, None, {})
347
354
348 try:
355 try:
349 log.debug("Trying to open URL %s", url)
356 log.debug("Trying to open URL %s", cleaned_uri)
350 resp = o.open(req)
357 resp = o.open(req)
351 if resp.code != 200:
358 if resp.code != 200:
352 raise exceptions.URLError('Return Code is not 200')
359 raise exceptions.URLError('Return Code is not 200')
353 except Exception as e:
360 except Exception as e:
354 log.warning("URL cannot be opened: %s", url, exc_info=True)
361 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
355 # means it cannot be cloned
362 # means it cannot be cloned
356 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
363 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
357
364
358 # now check if it's a proper hg repo, but don't do it for svn
365 # now check if it's a proper hg repo, but don't do it for svn
359 try:
366 try:
360 if _proto == 'svn':
367 if _proto == 'svn':
361 pass
368 pass
362 else:
369 else:
363 # check for pure hg repos
370 # check for pure hg repos
364 log.debug(
371 log.debug(
365 "Verifying if URL is a Mercurial repository: %s", url)
372 "Verifying if URL is a Mercurial repository: %s",
373 cleaned_uri)
366 httppeer(make_ui_from_config(config), url).lookup('tip')
374 httppeer(make_ui_from_config(config), url).lookup('tip')
367 except Exception as e:
375 except Exception as e:
368 log.warning("URL is not a valid Mercurial repository: %s", url)
376 log.warning("URL is not a valid Mercurial repository: %s",
377 cleaned_uri)
369 raise exceptions.URLError(
378 raise exceptions.URLError(
370 "url [%s] does not look like an hg repo org_exc: %s"
379 "url [%s] does not look like an hg repo org_exc: %s"
371 % (cleaned_uri, e))
380 % (cleaned_uri, e))
372
381
373 log.info("URL is a valid Mercurial repository: %s", url)
382 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
374 return True
383 return True
375
384
376 @reraise_safe_exceptions
385 @reraise_safe_exceptions
377 def diff(
386 def diff(
378 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
387 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
379 context):
388 context):
380 repo = self._factory.repo(wire)
389 repo = self._factory.repo(wire)
381
390
382 if file_filter:
391 if file_filter:
383 filter = match(file_filter[0], '', [file_filter[1]])
392 filter = match(file_filter[0], '', [file_filter[1]])
384 else:
393 else:
385 filter = file_filter
394 filter = file_filter
386 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
395 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
387
396
388 try:
397 try:
389 return "".join(patch.diff(
398 return "".join(patch.diff(
390 repo, node1=rev1, node2=rev2, match=filter, opts=opts))
399 repo, node1=rev1, node2=rev2, match=filter, opts=opts))
391 except RepoLookupError:
400 except RepoLookupError:
392 raise exceptions.LookupException()
401 raise exceptions.LookupException()
393
402
394 @reraise_safe_exceptions
403 @reraise_safe_exceptions
395 def file_history(self, wire, revision, path, limit):
404 def file_history(self, wire, revision, path, limit):
396 repo = self._factory.repo(wire)
405 repo = self._factory.repo(wire)
397
406
398 ctx = repo[revision]
407 ctx = repo[revision]
399 fctx = ctx.filectx(path)
408 fctx = ctx.filectx(path)
400
409
401 def history_iter():
410 def history_iter():
402 limit_rev = fctx.rev()
411 limit_rev = fctx.rev()
403 for obj in reversed(list(fctx.filelog())):
412 for obj in reversed(list(fctx.filelog())):
404 obj = fctx.filectx(obj)
413 obj = fctx.filectx(obj)
405 if limit_rev >= obj.rev():
414 if limit_rev >= obj.rev():
406 yield obj
415 yield obj
407
416
408 history = []
417 history = []
409 for cnt, obj in enumerate(history_iter()):
418 for cnt, obj in enumerate(history_iter()):
410 if limit and cnt >= limit:
419 if limit and cnt >= limit:
411 break
420 break
412 history.append(hex(obj.node()))
421 history.append(hex(obj.node()))
413
422
414 return [x for x in history]
423 return [x for x in history]
415
424
416 @reraise_safe_exceptions
425 @reraise_safe_exceptions
417 def file_history_untill(self, wire, revision, path, limit):
426 def file_history_untill(self, wire, revision, path, limit):
418 repo = self._factory.repo(wire)
427 repo = self._factory.repo(wire)
419 ctx = repo[revision]
428 ctx = repo[revision]
420 fctx = ctx.filectx(path)
429 fctx = ctx.filectx(path)
421
430
422 file_log = list(fctx.filelog())
431 file_log = list(fctx.filelog())
423 if limit:
432 if limit:
424 # Limit to the last n items
433 # Limit to the last n items
425 file_log = file_log[-limit:]
434 file_log = file_log[-limit:]
426
435
427 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
436 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
428
437
429 @reraise_safe_exceptions
438 @reraise_safe_exceptions
430 def fctx_annotate(self, wire, revision, path):
439 def fctx_annotate(self, wire, revision, path):
431 repo = self._factory.repo(wire)
440 repo = self._factory.repo(wire)
432 ctx = repo[revision]
441 ctx = repo[revision]
433 fctx = ctx.filectx(path)
442 fctx = ctx.filectx(path)
434
443
435 result = []
444 result = []
436 for i, annotate_data in enumerate(fctx.annotate()):
445 for i, annotate_data in enumerate(fctx.annotate()):
437 ln_no = i + 1
446 ln_no = i + 1
438 sha = hex(annotate_data[0].node())
447 sha = hex(annotate_data[0].node())
439 result.append((ln_no, sha, annotate_data[1]))
448 result.append((ln_no, sha, annotate_data[1]))
440 return result
449 return result
441
450
442 @reraise_safe_exceptions
451 @reraise_safe_exceptions
443 def fctx_data(self, wire, revision, path):
452 def fctx_data(self, wire, revision, path):
444 repo = self._factory.repo(wire)
453 repo = self._factory.repo(wire)
445 ctx = repo[revision]
454 ctx = repo[revision]
446 fctx = ctx.filectx(path)
455 fctx = ctx.filectx(path)
447 return fctx.data()
456 return fctx.data()
448
457
449 @reraise_safe_exceptions
458 @reraise_safe_exceptions
450 def fctx_flags(self, wire, revision, path):
459 def fctx_flags(self, wire, revision, path):
451 repo = self._factory.repo(wire)
460 repo = self._factory.repo(wire)
452 ctx = repo[revision]
461 ctx = repo[revision]
453 fctx = ctx.filectx(path)
462 fctx = ctx.filectx(path)
454 return fctx.flags()
463 return fctx.flags()
455
464
456 @reraise_safe_exceptions
465 @reraise_safe_exceptions
457 def fctx_size(self, wire, revision, path):
466 def fctx_size(self, wire, revision, path):
458 repo = self._factory.repo(wire)
467 repo = self._factory.repo(wire)
459 ctx = repo[revision]
468 ctx = repo[revision]
460 fctx = ctx.filectx(path)
469 fctx = ctx.filectx(path)
461 return fctx.size()
470 return fctx.size()
462
471
463 @reraise_safe_exceptions
472 @reraise_safe_exceptions
464 def get_all_commit_ids(self, wire, name):
473 def get_all_commit_ids(self, wire, name):
465 repo = self._factory.repo(wire)
474 repo = self._factory.repo(wire)
466 revs = repo.filtered(name).changelog.index
475 revs = repo.filtered(name).changelog.index
467 return map(lambda x: hex(x[7]), revs)[:-1]
476 return map(lambda x: hex(x[7]), revs)[:-1]
468
477
469 @reraise_safe_exceptions
478 @reraise_safe_exceptions
470 def get_config_value(self, wire, section, name, untrusted=False):
479 def get_config_value(self, wire, section, name, untrusted=False):
471 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
472 return repo.ui.config(section, name, untrusted=untrusted)
481 return repo.ui.config(section, name, untrusted=untrusted)
473
482
474 @reraise_safe_exceptions
483 @reraise_safe_exceptions
475 def get_config_bool(self, wire, section, name, untrusted=False):
484 def get_config_bool(self, wire, section, name, untrusted=False):
476 repo = self._factory.repo(wire)
485 repo = self._factory.repo(wire)
477 return repo.ui.configbool(section, name, untrusted=untrusted)
486 return repo.ui.configbool(section, name, untrusted=untrusted)
478
487
479 @reraise_safe_exceptions
488 @reraise_safe_exceptions
480 def get_config_list(self, wire, section, name, untrusted=False):
489 def get_config_list(self, wire, section, name, untrusted=False):
481 repo = self._factory.repo(wire)
490 repo = self._factory.repo(wire)
482 return repo.ui.configlist(section, name, untrusted=untrusted)
491 return repo.ui.configlist(section, name, untrusted=untrusted)
483
492
484 @reraise_safe_exceptions
493 @reraise_safe_exceptions
485 def is_large_file(self, wire, path):
494 def is_large_file(self, wire, path):
486 return largefiles.lfutil.isstandin(path)
495 return largefiles.lfutil.isstandin(path)
487
496
488 @reraise_safe_exceptions
497 @reraise_safe_exceptions
489 def in_store(self, wire, sha):
498 def in_store(self, wire, sha):
490 repo = self._factory.repo(wire)
499 repo = self._factory.repo(wire)
491 return largefiles.lfutil.instore(repo, sha)
500 return largefiles.lfutil.instore(repo, sha)
492
501
493 @reraise_safe_exceptions
502 @reraise_safe_exceptions
494 def in_user_cache(self, wire, sha):
503 def in_user_cache(self, wire, sha):
495 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
496 return largefiles.lfutil.inusercache(repo.ui, sha)
505 return largefiles.lfutil.inusercache(repo.ui, sha)
497
506
498 @reraise_safe_exceptions
507 @reraise_safe_exceptions
499 def store_path(self, wire, sha):
508 def store_path(self, wire, sha):
500 repo = self._factory.repo(wire)
509 repo = self._factory.repo(wire)
501 return largefiles.lfutil.storepath(repo, sha)
510 return largefiles.lfutil.storepath(repo, sha)
502
511
503 @reraise_safe_exceptions
512 @reraise_safe_exceptions
504 def link(self, wire, sha, path):
513 def link(self, wire, sha, path):
505 repo = self._factory.repo(wire)
514 repo = self._factory.repo(wire)
506 largefiles.lfutil.link(
515 largefiles.lfutil.link(
507 largefiles.lfutil.usercachepath(repo.ui, sha), path)
516 largefiles.lfutil.usercachepath(repo.ui, sha), path)
508
517
509 @reraise_safe_exceptions
518 @reraise_safe_exceptions
510 def localrepository(self, wire, create=False):
519 def localrepository(self, wire, create=False):
511 self._factory.repo(wire, create=create)
520 self._factory.repo(wire, create=create)
512
521
513 @reraise_safe_exceptions
522 @reraise_safe_exceptions
514 def lookup(self, wire, revision, both):
523 def lookup(self, wire, revision, both):
515 # TODO Paris: Ugly hack to "deserialize" long for msgpack
524 # TODO Paris: Ugly hack to "deserialize" long for msgpack
516 if isinstance(revision, float):
525 if isinstance(revision, float):
517 revision = long(revision)
526 revision = long(revision)
518 repo = self._factory.repo(wire)
527 repo = self._factory.repo(wire)
519 try:
528 try:
520 ctx = repo[revision]
529 ctx = repo[revision]
521 except RepoLookupError:
530 except RepoLookupError:
522 raise exceptions.LookupException(revision)
531 raise exceptions.LookupException(revision)
523 except LookupError as e:
532 except LookupError as e:
524 raise exceptions.LookupException(e.name)
533 raise exceptions.LookupException(e.name)
525
534
526 if not both:
535 if not both:
527 return ctx.hex()
536 return ctx.hex()
528
537
529 ctx = repo[ctx.hex()]
538 ctx = repo[ctx.hex()]
530 return ctx.hex(), ctx.rev()
539 return ctx.hex(), ctx.rev()
531
540
532 @reraise_safe_exceptions
541 @reraise_safe_exceptions
533 def pull(self, wire, url, commit_ids=None):
542 def pull(self, wire, url, commit_ids=None):
534 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
535 remote = peer(repo, {}, url)
544 remote = peer(repo, {}, url)
536 if commit_ids:
545 if commit_ids:
537 commit_ids = [bin(commit_id) for commit_id in commit_ids]
546 commit_ids = [bin(commit_id) for commit_id in commit_ids]
538
547
539 return exchange.pull(
548 return exchange.pull(
540 repo, remote, heads=commit_ids, force=None).cgresult
549 repo, remote, heads=commit_ids, force=None).cgresult
541
550
542 @reraise_safe_exceptions
551 @reraise_safe_exceptions
543 def revision(self, wire, rev):
552 def revision(self, wire, rev):
544 repo = self._factory.repo(wire)
553 repo = self._factory.repo(wire)
545 ctx = repo[rev]
554 ctx = repo[rev]
546 return ctx.rev()
555 return ctx.rev()
547
556
548 @reraise_safe_exceptions
557 @reraise_safe_exceptions
549 def rev_range(self, wire, filter):
558 def rev_range(self, wire, filter):
550 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
551 revisions = [rev for rev in revrange(repo, filter)]
560 revisions = [rev for rev in revrange(repo, filter)]
552 return revisions
561 return revisions
553
562
554 @reraise_safe_exceptions
563 @reraise_safe_exceptions
555 def rev_range_hash(self, wire, node):
564 def rev_range_hash(self, wire, node):
556 repo = self._factory.repo(wire)
565 repo = self._factory.repo(wire)
557
566
558 def get_revs(repo, rev_opt):
567 def get_revs(repo, rev_opt):
559 if rev_opt:
568 if rev_opt:
560 revs = revrange(repo, rev_opt)
569 revs = revrange(repo, rev_opt)
561 if len(revs) == 0:
570 if len(revs) == 0:
562 return (nullrev, nullrev)
571 return (nullrev, nullrev)
563 return max(revs), min(revs)
572 return max(revs), min(revs)
564 else:
573 else:
565 return len(repo) - 1, 0
574 return len(repo) - 1, 0
566
575
567 stop, start = get_revs(repo, [node + ':'])
576 stop, start = get_revs(repo, [node + ':'])
568 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
577 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
569 return revs
578 return revs
570
579
571 @reraise_safe_exceptions
580 @reraise_safe_exceptions
572 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
581 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
573 other_path = kwargs.pop('other_path', None)
582 other_path = kwargs.pop('other_path', None)
574
583
575 # case when we want to compare two independent repositories
584 # case when we want to compare two independent repositories
576 if other_path and other_path != wire["path"]:
585 if other_path and other_path != wire["path"]:
577 baseui = self._factory._create_config(wire["config"])
586 baseui = self._factory._create_config(wire["config"])
578 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
587 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
579 else:
588 else:
580 repo = self._factory.repo(wire)
589 repo = self._factory.repo(wire)
581 return list(repo.revs(rev_spec, *args))
590 return list(repo.revs(rev_spec, *args))
582
591
583 @reraise_safe_exceptions
592 @reraise_safe_exceptions
584 def strip(self, wire, revision, update, backup):
593 def strip(self, wire, revision, update, backup):
585 repo = self._factory.repo(wire)
594 repo = self._factory.repo(wire)
586 ctx = repo[revision]
595 ctx = repo[revision]
587 hgext_strip(
596 hgext_strip(
588 repo.baseui, repo, ctx.node(), update=update, backup=backup)
597 repo.baseui, repo, ctx.node(), update=update, backup=backup)
589
598
590 @reraise_safe_exceptions
599 @reraise_safe_exceptions
591 def tag(self, wire, name, revision, message, local, user,
600 def tag(self, wire, name, revision, message, local, user,
592 tag_time, tag_timezone):
601 tag_time, tag_timezone):
593 repo = self._factory.repo(wire)
602 repo = self._factory.repo(wire)
594 ctx = repo[revision]
603 ctx = repo[revision]
595 node = ctx.node()
604 node = ctx.node()
596
605
597 date = (tag_time, tag_timezone)
606 date = (tag_time, tag_timezone)
598 try:
607 try:
599 repo.tag(name, node, message, local, user, date)
608 repo.tag(name, node, message, local, user, date)
600 except Abort:
609 except Abort:
601 log.exception("Tag operation aborted")
610 log.exception("Tag operation aborted")
602 raise exceptions.AbortException()
611 raise exceptions.AbortException()
603
612
604 @reraise_safe_exceptions
613 @reraise_safe_exceptions
605 def tags(self, wire):
614 def tags(self, wire):
606 repo = self._factory.repo(wire)
615 repo = self._factory.repo(wire)
607 return repo.tags()
616 return repo.tags()
608
617
609 @reraise_safe_exceptions
618 @reraise_safe_exceptions
610 def update(self, wire, node=None, clean=False):
619 def update(self, wire, node=None, clean=False):
611 repo = self._factory.repo(wire)
620 repo = self._factory.repo(wire)
612 baseui = self._factory._create_config(wire['config'])
621 baseui = self._factory._create_config(wire['config'])
613 commands.update(baseui, repo, node=node, clean=clean)
622 commands.update(baseui, repo, node=node, clean=clean)
614
623
615 @reraise_safe_exceptions
624 @reraise_safe_exceptions
616 def identify(self, wire):
625 def identify(self, wire):
617 repo = self._factory.repo(wire)
626 repo = self._factory.repo(wire)
618 baseui = self._factory._create_config(wire['config'])
627 baseui = self._factory._create_config(wire['config'])
619 output = io.BytesIO()
628 output = io.BytesIO()
620 baseui.write = output.write
629 baseui.write = output.write
621 # This is required to get a full node id
630 # This is required to get a full node id
622 baseui.debugflag = True
631 baseui.debugflag = True
623 commands.identify(baseui, repo, id=True)
632 commands.identify(baseui, repo, id=True)
624
633
625 return output.getvalue()
634 return output.getvalue()
626
635
627 @reraise_safe_exceptions
636 @reraise_safe_exceptions
628 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
637 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
629 hooks=True):
638 hooks=True):
630 repo = self._factory.repo(wire)
639 repo = self._factory.repo(wire)
631 baseui = self._factory._create_config(wire['config'], hooks=hooks)
640 baseui = self._factory._create_config(wire['config'], hooks=hooks)
632
641
633 # Mercurial internally has a lot of logic that checks ONLY if
642 # Mercurial internally has a lot of logic that checks ONLY if
634 # option is defined, we just pass those if they are defined then
643 # option is defined, we just pass those if they are defined then
635 opts = {}
644 opts = {}
636 if bookmark:
645 if bookmark:
637 opts['bookmark'] = bookmark
646 opts['bookmark'] = bookmark
638 if branch:
647 if branch:
639 opts['branch'] = branch
648 opts['branch'] = branch
640 if revision:
649 if revision:
641 opts['rev'] = revision
650 opts['rev'] = revision
642
651
643 commands.pull(baseui, repo, source, **opts)
652 commands.pull(baseui, repo, source, **opts)
644
653
645 @reraise_safe_exceptions
654 @reraise_safe_exceptions
646 def heads(self, wire, branch=None):
655 def heads(self, wire, branch=None):
647 repo = self._factory.repo(wire)
656 repo = self._factory.repo(wire)
648 baseui = self._factory._create_config(wire['config'])
657 baseui = self._factory._create_config(wire['config'])
649 output = io.BytesIO()
658 output = io.BytesIO()
650
659
651 def write(data, **unused_kwargs):
660 def write(data, **unused_kwargs):
652 output.write(data)
661 output.write(data)
653
662
654 baseui.write = write
663 baseui.write = write
655 if branch:
664 if branch:
656 args = [branch]
665 args = [branch]
657 else:
666 else:
658 args = []
667 args = []
659 commands.heads(baseui, repo, template='{node} ', *args)
668 commands.heads(baseui, repo, template='{node} ', *args)
660
669
661 return output.getvalue()
670 return output.getvalue()
662
671
663 @reraise_safe_exceptions
672 @reraise_safe_exceptions
664 def ancestor(self, wire, revision1, revision2):
673 def ancestor(self, wire, revision1, revision2):
665 repo = self._factory.repo(wire)
674 repo = self._factory.repo(wire)
666 baseui = self._factory._create_config(wire['config'])
675 baseui = self._factory._create_config(wire['config'])
667 output = io.BytesIO()
676 output = io.BytesIO()
668 baseui.write = output.write
677 baseui.write = output.write
669 commands.debugancestor(baseui, repo, revision1, revision2)
678 commands.debugancestor(baseui, repo, revision1, revision2)
670
679
671 return output.getvalue()
680 return output.getvalue()
672
681
673 @reraise_safe_exceptions
682 @reraise_safe_exceptions
674 def push(self, wire, revisions, dest_path, hooks=True,
683 def push(self, wire, revisions, dest_path, hooks=True,
675 push_branches=False):
684 push_branches=False):
676 repo = self._factory.repo(wire)
685 repo = self._factory.repo(wire)
677 baseui = self._factory._create_config(wire['config'], hooks=hooks)
686 baseui = self._factory._create_config(wire['config'], hooks=hooks)
678 commands.push(baseui, repo, dest=dest_path, rev=revisions,
687 commands.push(baseui, repo, dest=dest_path, rev=revisions,
679 new_branch=push_branches)
688 new_branch=push_branches)
680
689
681 @reraise_safe_exceptions
690 @reraise_safe_exceptions
682 def merge(self, wire, revision):
691 def merge(self, wire, revision):
683 repo = self._factory.repo(wire)
692 repo = self._factory.repo(wire)
684 baseui = self._factory._create_config(wire['config'])
693 baseui = self._factory._create_config(wire['config'])
685 repo.ui.setconfig('ui', 'merge', 'internal:dump')
694 repo.ui.setconfig('ui', 'merge', 'internal:dump')
695
696 # In case of sub repositories are used mercurial prompts the user in
697 # case of merge conflicts or different sub repository sources. By
698 # setting the interactive flag to `False` mercurial doesn't prompt the
699 # used but instead uses a default value.
700 repo.ui.setconfig('ui', 'interactive', False)
701
686 commands.merge(baseui, repo, rev=revision)
702 commands.merge(baseui, repo, rev=revision)
687
703
688 @reraise_safe_exceptions
704 @reraise_safe_exceptions
689 def commit(self, wire, message, username):
705 def commit(self, wire, message, username):
690 repo = self._factory.repo(wire)
706 repo = self._factory.repo(wire)
691 baseui = self._factory._create_config(wire['config'])
707 baseui = self._factory._create_config(wire['config'])
692 repo.ui.setconfig('ui', 'username', username)
708 repo.ui.setconfig('ui', 'username', username)
693 commands.commit(baseui, repo, message=message)
709 commands.commit(baseui, repo, message=message)
694
710
695 @reraise_safe_exceptions
711 @reraise_safe_exceptions
696 def rebase(self, wire, source=None, dest=None, abort=False):
712 def rebase(self, wire, source=None, dest=None, abort=False):
697 repo = self._factory.repo(wire)
713 repo = self._factory.repo(wire)
698 baseui = self._factory._create_config(wire['config'])
714 baseui = self._factory._create_config(wire['config'])
699 repo.ui.setconfig('ui', 'merge', 'internal:dump')
715 repo.ui.setconfig('ui', 'merge', 'internal:dump')
700 rebase.rebase(
716 rebase.rebase(
701 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
717 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
702
718
703 @reraise_safe_exceptions
719 @reraise_safe_exceptions
704 def bookmark(self, wire, bookmark, revision=None):
720 def bookmark(self, wire, bookmark, revision=None):
705 repo = self._factory.repo(wire)
721 repo = self._factory.repo(wire)
706 baseui = self._factory._create_config(wire['config'])
722 baseui = self._factory._create_config(wire['config'])
707 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
723 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,61 +1,62 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 import mercurial.demandimport
23 import mercurial.demandimport
24 # patch demandimport, due to bug in mercurial when it always triggers
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
25 # demandimport.enable()
26 mercurial.demandimport.enable = lambda *args, **kwargs: 1
26 mercurial.demandimport.enable = lambda *args, **kwargs: 1
27
27
28 from mercurial import ui
28 from mercurial import ui
29 from mercurial import patch
29 from mercurial import patch
30 from mercurial import config
30 from mercurial import config
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial import scmutil
32 from mercurial import scmutil
33 from mercurial import archival
33 from mercurial import archival
34 from mercurial import discovery
34 from mercurial import discovery
35 from mercurial import unionrepo
35 from mercurial import unionrepo
36 from mercurial import localrepo
36 from mercurial import localrepo
37 from mercurial import merge as hg_merge
37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38
39
39 from mercurial.commands import clone, nullid, pull
40 from mercurial.commands import clone, nullid, pull
40 from mercurial.context import memctx, memfilectx
41 from mercurial.context import memctx, memfilectx
41 from mercurial.error import (
42 from mercurial.error import (
42 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
43 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
43 RequirementError)
44 RequirementError)
44 from mercurial.hgweb import hgweb_mod
45 from mercurial.hgweb import hgweb_mod
45 from mercurial.localrepo import localrepository
46 from mercurial.localrepo import localrepository
46 from mercurial.match import match
47 from mercurial.match import match
47 from mercurial.mdiff import diffopts
48 from mercurial.mdiff import diffopts
48 from mercurial.node import bin, hex
49 from mercurial.node import bin, hex
49 from mercurial.encoding import tolocal
50 from mercurial.encoding import tolocal
50 from mercurial.discovery import findcommonoutgoing
51 from mercurial.discovery import findcommonoutgoing
51 from mercurial.hg import peer
52 from mercurial.hg import peer
52 from mercurial.httppeer import httppeer
53 from mercurial.httppeer import httppeer
53 from mercurial.util import url as hg_url
54 from mercurial.util import url as hg_url
54 from mercurial.scmutil import revrange
55 from mercurial.scmutil import revrange
55 from mercurial.node import nullrev
56 from mercurial.node import nullrev
56 from mercurial import exchange
57 from mercurial import exchange
57 from hgext import largefiles
58 from hgext import largefiles
58
59
59 # those authnadlers are patched for python 2.6.5 bug an
60 # those authnadlers are patched for python 2.6.5 bug an
60 # infinit looping when given invalid resources
61 # infinit looping when given invalid resources
61 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
62 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
@@ -1,60 +1,134 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Adjustments to Mercurial
19 Adjustments to Mercurial
20
20
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
21 Intentionally kept separate from `hgcompat` and `hg`, so that these patches can
22 be applied without having to import the whole Mercurial machinery.
22 be applied without having to import the whole Mercurial machinery.
23
23
24 Imports are function local, so that just importing this module does not cause
24 Imports are function local, so that just importing this module does not cause
25 side-effects other than these functions being defined.
25 side-effects other than these functions being defined.
26 """
26 """
27
27
28 import logging
28 import logging
29
29
30
30
31 def patch_largefiles_capabilities():
31 def patch_largefiles_capabilities():
32 """
32 """
33 Patches the capabilities function in the largefiles extension.
33 Patches the capabilities function in the largefiles extension.
34 """
34 """
35 from vcsserver import hgcompat
35 from vcsserver import hgcompat
36 lfproto = hgcompat.largefiles.proto
36 lfproto = hgcompat.largefiles.proto
37 wrapper = _dynamic_capabilities_wrapper(
37 wrapper = _dynamic_capabilities_wrapper(
38 lfproto, hgcompat.extensions.extensions)
38 lfproto, hgcompat.extensions.extensions)
39 lfproto.capabilities = wrapper
39 lfproto.capabilities = wrapper
40
40
41
41
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
42 def _dynamic_capabilities_wrapper(lfproto, extensions):
43
43
44 wrapped_capabilities = lfproto.capabilities
44 wrapped_capabilities = lfproto.capabilities
45 logger = logging.getLogger('vcsserver.hg')
45 logger = logging.getLogger('vcsserver.hg')
46
46
47 def _dynamic_capabilities(repo, proto):
47 def _dynamic_capabilities(repo, proto):
48 """
48 """
49 Adds dynamic behavior, so that the capability is only added if the
49 Adds dynamic behavior, so that the capability is only added if the
50 extension is enabled in the current ui object.
50 extension is enabled in the current ui object.
51 """
51 """
52 if 'largefiles' in dict(extensions(repo.ui)):
52 if 'largefiles' in dict(extensions(repo.ui)):
53 logger.debug('Extension largefiles enabled')
53 logger.debug('Extension largefiles enabled')
54 calc_capabilities = wrapped_capabilities
54 calc_capabilities = wrapped_capabilities
55 else:
55 else:
56 logger.debug('Extension largefiles disabled')
56 logger.debug('Extension largefiles disabled')
57 calc_capabilities = lfproto.capabilitiesorig
57 calc_capabilities = lfproto.capabilitiesorig
58 return calc_capabilities(repo, proto)
58 return calc_capabilities(repo, proto)
59
59
60 return _dynamic_capabilities
60 return _dynamic_capabilities
61
62
63 def patch_subrepo_type_mapping():
64 from collections import defaultdict
65 from hgcompat import subrepo
66 from exceptions import SubrepoMergeException
67
68 class NoOpSubrepo(subrepo.abstractsubrepo):
69
70 def __init__(self, ctx, path, *args, **kwargs):
71 """Initialize abstractsubrepo part
72
73 ``ctx`` is the context referring this subrepository in the
74 parent repository.
75
76 ``path`` is the path to this subrepository as seen from
77 innermost repository.
78 """
79 self.ui = ctx.repo().ui
80 self._ctx = ctx
81 self._path = path
82
83 def storeclean(self, path):
84 """
85 returns true if the repository has not changed since it was last
86 cloned from or pushed to a given repository.
87 """
88 return True
89
90 def dirty(self, ignoreupdate=False):
91 """returns true if the dirstate of the subrepo is dirty or does not
92 match current stored state. If ignoreupdate is true, only check
93 whether the subrepo has uncommitted changes in its dirstate.
94 """
95 return False
96
97 def basestate(self):
98 """current working directory base state, disregarding .hgsubstate
99 state and working directory modifications"""
100 substate = subrepo.state(self._ctx, self.ui)
101 file_system_path, rev, repotype = substate.get(self._path)
102 return rev
103
104 def remove(self):
105 """remove the subrepo
106
107 (should verify the dirstate is not dirty first)
108 """
109 pass
110
111 def get(self, state, overwrite=False):
112 """run whatever commands are needed to put the subrepo into
113 this state
114 """
115 pass
116
117 def merge(self, state):
118 """merge currently-saved state with the new state."""
119 raise SubrepoMergeException()
120
121 def push(self, opts):
122 """perform whatever action is analogous to 'hg push'
123
124 This may be a no-op on some systems.
125 """
126 pass
127
128 # Patch subrepo type mapping to always return our NoOpSubrepo class
129 # whenever a subrepo class is looked up.
130 subrepo.types = {
131 'hg': NoOpSubrepo,
132 'git': NoOpSubrepo,
133 'svn': NoOpSubrepo
134 }
@@ -1,337 +1,376 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import base64
18 import base64
19 import locale
19 import locale
20 import logging
20 import logging
21 import uuid
21 import uuid
22 import wsgiref.util
22 import wsgiref.util
23 from itertools import chain
23 from itertools import chain
24
24
25 import msgpack
25 import msgpack
26 from beaker.cache import CacheManager
26 from beaker.cache import CacheManager
27 from beaker.util import parse_cache_config_options
27 from beaker.util import parse_cache_config_options
28 from pyramid.config import Configurator
28 from pyramid.config import Configurator
29 from pyramid.wsgi import wsgiapp
29 from pyramid.wsgi import wsgiapp
30
30
31 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
31 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
32 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
32 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
33 from vcsserver.echo_stub.echo_app import EchoApp
33 from vcsserver.echo_stub.echo_app import EchoApp
34 from vcsserver.exceptions import HTTPRepoLocked
34 from vcsserver.server import VcsServer
35 from vcsserver.server import VcsServer
35
36
36 try:
37 try:
37 from vcsserver.git import GitFactory, GitRemote
38 from vcsserver.git import GitFactory, GitRemote
38 except ImportError:
39 except ImportError:
39 GitFactory = None
40 GitFactory = None
40 GitRemote = None
41 GitRemote = None
41 try:
42 try:
42 from vcsserver.hg import MercurialFactory, HgRemote
43 from vcsserver.hg import MercurialFactory, HgRemote
43 except ImportError:
44 except ImportError:
44 MercurialFactory = None
45 MercurialFactory = None
45 HgRemote = None
46 HgRemote = None
46 try:
47 try:
47 from vcsserver.svn import SubversionFactory, SvnRemote
48 from vcsserver.svn import SubversionFactory, SvnRemote
48 except ImportError:
49 except ImportError:
49 SubversionFactory = None
50 SubversionFactory = None
50 SvnRemote = None
51 SvnRemote = None
51
52
52 log = logging.getLogger(__name__)
53 log = logging.getLogger(__name__)
53
54
54
55
55 class VCS(object):
56 class VCS(object):
56 def __init__(self, locale=None, cache_config=None):
57 def __init__(self, locale=None, cache_config=None):
57 self.locale = locale
58 self.locale = locale
58 self.cache_config = cache_config
59 self.cache_config = cache_config
59 self._configure_locale()
60 self._configure_locale()
60 self._initialize_cache()
61 self._initialize_cache()
61
62
62 if GitFactory and GitRemote:
63 if GitFactory and GitRemote:
63 git_repo_cache = self.cache.get_cache_region(
64 git_repo_cache = self.cache.get_cache_region(
64 'git', region='repo_object')
65 'git', region='repo_object')
65 git_factory = GitFactory(git_repo_cache)
66 git_factory = GitFactory(git_repo_cache)
66 self._git_remote = GitRemote(git_factory)
67 self._git_remote = GitRemote(git_factory)
67 else:
68 else:
68 log.info("Git client import failed")
69 log.info("Git client import failed")
69
70
70 if MercurialFactory and HgRemote:
71 if MercurialFactory and HgRemote:
71 hg_repo_cache = self.cache.get_cache_region(
72 hg_repo_cache = self.cache.get_cache_region(
72 'hg', region='repo_object')
73 'hg', region='repo_object')
73 hg_factory = MercurialFactory(hg_repo_cache)
74 hg_factory = MercurialFactory(hg_repo_cache)
74 self._hg_remote = HgRemote(hg_factory)
75 self._hg_remote = HgRemote(hg_factory)
75 else:
76 else:
76 log.info("Mercurial client import failed")
77 log.info("Mercurial client import failed")
77
78
78 if SubversionFactory and SvnRemote:
79 if SubversionFactory and SvnRemote:
79 svn_repo_cache = self.cache.get_cache_region(
80 svn_repo_cache = self.cache.get_cache_region(
80 'svn', region='repo_object')
81 'svn', region='repo_object')
81 svn_factory = SubversionFactory(svn_repo_cache)
82 svn_factory = SubversionFactory(svn_repo_cache)
82 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
83 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
83 else:
84 else:
84 log.info("Subversion client import failed")
85 log.info("Subversion client import failed")
85
86
86 self._vcsserver = VcsServer()
87 self._vcsserver = VcsServer()
87
88
88 def _initialize_cache(self):
89 def _initialize_cache(self):
89 cache_config = parse_cache_config_options(self.cache_config)
90 cache_config = parse_cache_config_options(self.cache_config)
90 log.info('Initializing beaker cache: %s' % cache_config)
91 log.info('Initializing beaker cache: %s' % cache_config)
91 self.cache = CacheManager(**cache_config)
92 self.cache = CacheManager(**cache_config)
92
93
93 def _configure_locale(self):
94 def _configure_locale(self):
94 if self.locale:
95 if self.locale:
95 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
96 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
96 else:
97 else:
97 log.info(
98 log.info(
98 'Configuring locale subsystem based on environment variables')
99 'Configuring locale subsystem based on environment variables')
99 try:
100 try:
100 # If self.locale is the empty string, then the locale
101 # If self.locale is the empty string, then the locale
101 # module will use the environment variables. See the
102 # module will use the environment variables. See the
102 # documentation of the package `locale`.
103 # documentation of the package `locale`.
103 locale.setlocale(locale.LC_ALL, self.locale)
104 locale.setlocale(locale.LC_ALL, self.locale)
104
105
105 language_code, encoding = locale.getlocale()
106 language_code, encoding = locale.getlocale()
106 log.info(
107 log.info(
107 'Locale set to language code "%s" with encoding "%s".',
108 'Locale set to language code "%s" with encoding "%s".',
108 language_code, encoding)
109 language_code, encoding)
109 except locale.Error:
110 except locale.Error:
110 log.exception(
111 log.exception(
111 'Cannot set locale, not configuring the locale system')
112 'Cannot set locale, not configuring the locale system')
112
113
113
114
114 class WsgiProxy(object):
115 class WsgiProxy(object):
115 def __init__(self, wsgi):
116 def __init__(self, wsgi):
116 self.wsgi = wsgi
117 self.wsgi = wsgi
117
118
118 def __call__(self, environ, start_response):
119 def __call__(self, environ, start_response):
119 input_data = environ['wsgi.input'].read()
120 input_data = environ['wsgi.input'].read()
120 input_data = msgpack.unpackb(input_data)
121 input_data = msgpack.unpackb(input_data)
121
122
122 error = None
123 error = None
123 try:
124 try:
124 data, status, headers = self.wsgi.handle(
125 data, status, headers = self.wsgi.handle(
125 input_data['environment'], input_data['input_data'],
126 input_data['environment'], input_data['input_data'],
126 *input_data['args'], **input_data['kwargs'])
127 *input_data['args'], **input_data['kwargs'])
127 except Exception as e:
128 except Exception as e:
128 data, status, headers = [], None, None
129 data, status, headers = [], None, None
129 error = {
130 error = {
130 'message': str(e),
131 'message': str(e),
131 '_vcs_kind': getattr(e, '_vcs_kind', None)
132 '_vcs_kind': getattr(e, '_vcs_kind', None)
132 }
133 }
133
134
134 start_response(200, {})
135 start_response(200, {})
135 return self._iterator(error, status, headers, data)
136 return self._iterator(error, status, headers, data)
136
137
137 def _iterator(self, error, status, headers, data):
138 def _iterator(self, error, status, headers, data):
138 initial_data = [
139 initial_data = [
139 error,
140 error,
140 status,
141 status,
141 headers,
142 headers,
142 ]
143 ]
143
144
144 for d in chain(initial_data, data):
145 for d in chain(initial_data, data):
145 yield msgpack.packb(d)
146 yield msgpack.packb(d)
146
147
147
148
148 class HTTPApplication(object):
149 class HTTPApplication(object):
149 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
150 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
150
151
151 remote_wsgi = remote_wsgi
152 remote_wsgi = remote_wsgi
152 _use_echo_app = False
153 _use_echo_app = False
153
154
154 def __init__(self, settings=None):
155 def __init__(self, settings=None):
155 self.config = Configurator(settings=settings)
156 self.config = Configurator(settings=settings)
156 locale = settings.get('', 'en_US.UTF-8')
157 locale = settings.get('', 'en_US.UTF-8')
157 vcs = VCS(locale=locale, cache_config=settings)
158 vcs = VCS(locale=locale, cache_config=settings)
158 self._remotes = {
159 self._remotes = {
159 'hg': vcs._hg_remote,
160 'hg': vcs._hg_remote,
160 'git': vcs._git_remote,
161 'git': vcs._git_remote,
161 'svn': vcs._svn_remote,
162 'svn': vcs._svn_remote,
162 'server': vcs._vcsserver,
163 'server': vcs._vcsserver,
163 }
164 }
164 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
165 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
165 self._use_echo_app = True
166 self._use_echo_app = True
166 log.warning("Using EchoApp for VCS operations.")
167 log.warning("Using EchoApp for VCS operations.")
167 self.remote_wsgi = remote_wsgi_stub
168 self.remote_wsgi = remote_wsgi_stub
168 self._configure_settings(settings)
169 self._configure_settings(settings)
169 self._configure()
170 self._configure()
170
171
171 def _configure_settings(self, app_settings):
172 def _configure_settings(self, app_settings):
172 """
173 """
173 Configure the settings module.
174 Configure the settings module.
174 """
175 """
175 git_path = app_settings.get('git_path', None)
176 git_path = app_settings.get('git_path', None)
176 if git_path:
177 if git_path:
177 settings.GIT_EXECUTABLE = git_path
178 settings.GIT_EXECUTABLE = git_path
178
179
179 def _configure(self):
180 def _configure(self):
180 self.config.add_renderer(
181 self.config.add_renderer(
181 name='msgpack',
182 name='msgpack',
182 factory=self._msgpack_renderer_factory)
183 factory=self._msgpack_renderer_factory)
183
184
185 self.config.add_route('service', '/_service')
184 self.config.add_route('status', '/status')
186 self.config.add_route('status', '/status')
185 self.config.add_route('hg_proxy', '/proxy/hg')
187 self.config.add_route('hg_proxy', '/proxy/hg')
186 self.config.add_route('git_proxy', '/proxy/git')
188 self.config.add_route('git_proxy', '/proxy/git')
187 self.config.add_route('vcs', '/{backend}')
189 self.config.add_route('vcs', '/{backend}')
188 self.config.add_route('stream_git', '/stream/git/*repo_name')
190 self.config.add_route('stream_git', '/stream/git/*repo_name')
189 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
191 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
190
192
191 self.config.add_view(
193 self.config.add_view(
192 self.status_view, route_name='status', renderer='json')
194 self.status_view, route_name='status', renderer='json')
195 self.config.add_view(
196 self.service_view, route_name='service', renderer='msgpack')
197
193 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
198 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
194 self.config.add_view(self.git_proxy(), route_name='git_proxy')
199 self.config.add_view(self.git_proxy(), route_name='git_proxy')
195 self.config.add_view(
200 self.config.add_view(
196 self.vcs_view, route_name='vcs', renderer='msgpack')
201 self.vcs_view, route_name='vcs', renderer='msgpack')
197
202
198 self.config.add_view(self.hg_stream(), route_name='stream_hg')
203 self.config.add_view(self.hg_stream(), route_name='stream_hg')
199 self.config.add_view(self.git_stream(), route_name='stream_git')
204 self.config.add_view(self.git_stream(), route_name='stream_git')
205 self.config.add_view(
206 self.handle_vcs_exception, context=Exception,
207 custom_predicates=[self.is_vcs_exception])
200
208
201 def wsgi_app(self):
209 def wsgi_app(self):
202 return self.config.make_wsgi_app()
210 return self.config.make_wsgi_app()
203
211
204 def vcs_view(self, request):
212 def vcs_view(self, request):
205 remote = self._remotes[request.matchdict['backend']]
213 remote = self._remotes[request.matchdict['backend']]
206 payload = msgpack.unpackb(request.body, use_list=True)
214 payload = msgpack.unpackb(request.body, use_list=True)
207 method = payload.get('method')
215 method = payload.get('method')
208 params = payload.get('params')
216 params = payload.get('params')
209 wire = params.get('wire')
217 wire = params.get('wire')
210 args = params.get('args')
218 args = params.get('args')
211 kwargs = params.get('kwargs')
219 kwargs = params.get('kwargs')
212 if wire:
220 if wire:
213 try:
221 try:
214 wire['context'] = uuid.UUID(wire['context'])
222 wire['context'] = uuid.UUID(wire['context'])
215 except KeyError:
223 except KeyError:
216 pass
224 pass
217 args.insert(0, wire)
225 args.insert(0, wire)
218
226
219 try:
227 try:
220 resp = getattr(remote, method)(*args, **kwargs)
228 resp = getattr(remote, method)(*args, **kwargs)
221 except Exception as e:
229 except Exception as e:
222 type_ = e.__class__.__name__
230 type_ = e.__class__.__name__
223 if type_ not in self.ALLOWED_EXCEPTIONS:
231 if type_ not in self.ALLOWED_EXCEPTIONS:
224 type_ = None
232 type_ = None
225
233
226 resp = {
234 resp = {
227 'id': payload.get('id'),
235 'id': payload.get('id'),
228 'error': {
236 'error': {
229 'message': e.message,
237 'message': e.message,
230 'type': type_
238 'type': type_
231 }
239 }
232 }
240 }
233 try:
241 try:
234 resp['error']['_vcs_kind'] = e._vcs_kind
242 resp['error']['_vcs_kind'] = e._vcs_kind
235 except AttributeError:
243 except AttributeError:
236 pass
244 pass
237 else:
245 else:
238 resp = {
246 resp = {
239 'id': payload.get('id'),
247 'id': payload.get('id'),
240 'result': resp
248 'result': resp
241 }
249 }
242
250
243 return resp
251 return resp
244
252
245 def status_view(self, request):
253 def status_view(self, request):
246 return {'status': 'OK'}
254 return {'status': 'OK'}
247
255
256 def service_view(self, request):
257 import vcsserver
258 payload = msgpack.unpackb(request.body, use_list=True)
259 resp = {
260 'id': payload.get('id'),
261 'result': dict(
262 version=vcsserver.__version__,
263 config={},
264 payload=payload,
265 )
266 }
267 return resp
268
248 def _msgpack_renderer_factory(self, info):
269 def _msgpack_renderer_factory(self, info):
249 def _render(value, system):
270 def _render(value, system):
250 value = msgpack.packb(value)
271 value = msgpack.packb(value)
251 request = system.get('request')
272 request = system.get('request')
252 if request is not None:
273 if request is not None:
253 response = request.response
274 response = request.response
254 ct = response.content_type
275 ct = response.content_type
255 if ct == response.default_content_type:
276 if ct == response.default_content_type:
256 response.content_type = 'application/x-msgpack'
277 response.content_type = 'application/x-msgpack'
257 return value
278 return value
258 return _render
279 return _render
259
280
260 def hg_proxy(self):
281 def hg_proxy(self):
261 @wsgiapp
282 @wsgiapp
262 def _hg_proxy(environ, start_response):
283 def _hg_proxy(environ, start_response):
263 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
284 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
264 return app(environ, start_response)
285 return app(environ, start_response)
265 return _hg_proxy
286 return _hg_proxy
266
287
267 def git_proxy(self):
288 def git_proxy(self):
268 @wsgiapp
289 @wsgiapp
269 def _git_proxy(environ, start_response):
290 def _git_proxy(environ, start_response):
270 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
291 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
271 return app(environ, start_response)
292 return app(environ, start_response)
272 return _git_proxy
293 return _git_proxy
273
294
274 def hg_stream(self):
295 def hg_stream(self):
275 if self._use_echo_app:
296 if self._use_echo_app:
276 @wsgiapp
297 @wsgiapp
277 def _hg_stream(environ, start_response):
298 def _hg_stream(environ, start_response):
278 app = EchoApp('fake_path', 'fake_name', None)
299 app = EchoApp('fake_path', 'fake_name', None)
279 return app(environ, start_response)
300 return app(environ, start_response)
280 return _hg_stream
301 return _hg_stream
281 else:
302 else:
282 @wsgiapp
303 @wsgiapp
283 def _hg_stream(environ, start_response):
304 def _hg_stream(environ, start_response):
284 repo_path = environ['HTTP_X_RC_REPO_PATH']
305 repo_path = environ['HTTP_X_RC_REPO_PATH']
285 repo_name = environ['HTTP_X_RC_REPO_NAME']
306 repo_name = environ['HTTP_X_RC_REPO_NAME']
286 packed_config = base64.b64decode(
307 packed_config = base64.b64decode(
287 environ['HTTP_X_RC_REPO_CONFIG'])
308 environ['HTTP_X_RC_REPO_CONFIG'])
288 config = msgpack.unpackb(packed_config)
309 config = msgpack.unpackb(packed_config)
289 app = scm_app.create_hg_wsgi_app(
310 app = scm_app.create_hg_wsgi_app(
290 repo_path, repo_name, config)
311 repo_path, repo_name, config)
291
312
292 # Consitent path information for hgweb
313 # Consitent path information for hgweb
293 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
314 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
294 environ['REPO_NAME'] = repo_name
315 environ['REPO_NAME'] = repo_name
295 return app(environ, ResponseFilter(start_response))
316 return app(environ, ResponseFilter(start_response))
296 return _hg_stream
317 return _hg_stream
297
318
298 def git_stream(self):
319 def git_stream(self):
299 if self._use_echo_app:
320 if self._use_echo_app:
300 @wsgiapp
321 @wsgiapp
301 def _git_stream(environ, start_response):
322 def _git_stream(environ, start_response):
302 app = EchoApp('fake_path', 'fake_name', None)
323 app = EchoApp('fake_path', 'fake_name', None)
303 return app(environ, start_response)
324 return app(environ, start_response)
304 return _git_stream
325 return _git_stream
305 else:
326 else:
306 @wsgiapp
327 @wsgiapp
307 def _git_stream(environ, start_response):
328 def _git_stream(environ, start_response):
308 repo_path = environ['HTTP_X_RC_REPO_PATH']
329 repo_path = environ['HTTP_X_RC_REPO_PATH']
309 repo_name = environ['HTTP_X_RC_REPO_NAME']
330 repo_name = environ['HTTP_X_RC_REPO_NAME']
310 packed_config = base64.b64decode(
331 packed_config = base64.b64decode(
311 environ['HTTP_X_RC_REPO_CONFIG'])
332 environ['HTTP_X_RC_REPO_CONFIG'])
312 config = msgpack.unpackb(packed_config)
333 config = msgpack.unpackb(packed_config)
313
334
314 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
335 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
315 app = scm_app.create_git_wsgi_app(
336 app = scm_app.create_git_wsgi_app(
316 repo_path, repo_name, config)
337 repo_path, repo_name, config)
317 return app(environ, start_response)
338 return app(environ, start_response)
318 return _git_stream
339 return _git_stream
319
340
341 def is_vcs_exception(self, context, request):
342 """
343 View predicate that returns true if the context object is a VCS
344 exception.
345 """
346 return hasattr(context, '_vcs_kind')
347
348 def handle_vcs_exception(self, exception, request):
349 if exception._vcs_kind == 'repo_locked':
350 # Get custom repo-locked status code if present.
351 status_code = request.headers.get('X-RC-Locked-Status-Code')
352 return HTTPRepoLocked(
353 title=exception.message, status_code=status_code)
354
355 # Re-raise exception if we can not handle it.
356 raise exception
357
320
358
321 class ResponseFilter(object):
359 class ResponseFilter(object):
322
360
323 def __init__(self, start_response):
361 def __init__(self, start_response):
324 self._start_response = start_response
362 self._start_response = start_response
325
363
326 def __call__(self, status, response_headers, exc_info=None):
364 def __call__(self, status, response_headers, exc_info=None):
327 headers = tuple(
365 headers = tuple(
328 (h, v) for h, v in response_headers
366 (h, v) for h, v in response_headers
329 if not wsgiref.util.is_hop_by_hop(h))
367 if not wsgiref.util.is_hop_by_hop(h))
330 return self._start_response(status, headers, exc_info)
368 return self._start_response(status, headers, exc_info)
331
369
332
370
333 def main(global_config, **settings):
371 def main(global_config, **settings):
334 if MercurialFactory:
372 if MercurialFactory:
335 hgpatches.patch_largefiles_capabilities()
373 hgpatches.patch_largefiles_capabilities()
374 hgpatches.patch_subrepo_type_mapping()
336 app = HTTPApplication(settings=settings)
375 app = HTTPApplication(settings=settings)
337 return app.wsgi_app()
376 return app.wsgi_app()
@@ -1,507 +1,508 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import atexit
18 import atexit
19 import locale
19 import locale
20 import logging
20 import logging
21 import optparse
21 import optparse
22 import os
22 import os
23 import textwrap
23 import textwrap
24 import threading
24 import threading
25 import sys
25 import sys
26
26
27 import configobj
27 import configobj
28 import Pyro4
28 import Pyro4
29 from beaker.cache import CacheManager
29 from beaker.cache import CacheManager
30 from beaker.util import parse_cache_config_options
30 from beaker.util import parse_cache_config_options
31
31
32 try:
32 try:
33 from vcsserver.git import GitFactory, GitRemote
33 from vcsserver.git import GitFactory, GitRemote
34 except ImportError:
34 except ImportError:
35 GitFactory = None
35 GitFactory = None
36 GitRemote = None
36 GitRemote = None
37 try:
37 try:
38 from vcsserver.hg import MercurialFactory, HgRemote
38 from vcsserver.hg import MercurialFactory, HgRemote
39 except ImportError:
39 except ImportError:
40 MercurialFactory = None
40 MercurialFactory = None
41 HgRemote = None
41 HgRemote = None
42 try:
42 try:
43 from vcsserver.svn import SubversionFactory, SvnRemote
43 from vcsserver.svn import SubversionFactory, SvnRemote
44 except ImportError:
44 except ImportError:
45 SubversionFactory = None
45 SubversionFactory = None
46 SvnRemote = None
46 SvnRemote = None
47
47
48 from server import VcsServer
48 from server import VcsServer
49 from vcsserver import hgpatches, remote_wsgi, settings
49 from vcsserver import hgpatches, remote_wsgi, settings
50 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
50 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
51
51
52 log = logging.getLogger(__name__)
52 log = logging.getLogger(__name__)
53
53
54 HERE = os.path.dirname(os.path.abspath(__file__))
54 HERE = os.path.dirname(os.path.abspath(__file__))
55 SERVER_RUNNING_FILE = None
55 SERVER_RUNNING_FILE = None
56
56
57
57
58 # HOOKS - inspired by gunicorn #
58 # HOOKS - inspired by gunicorn #
59
59
60 def when_ready(server):
60 def when_ready(server):
61 """
61 """
62 Called just after the server is started.
62 Called just after the server is started.
63 """
63 """
64
64
65 def _remove_server_running_file():
65 def _remove_server_running_file():
66 if os.path.isfile(SERVER_RUNNING_FILE):
66 if os.path.isfile(SERVER_RUNNING_FILE):
67 os.remove(SERVER_RUNNING_FILE)
67 os.remove(SERVER_RUNNING_FILE)
68
68
69 # top up to match to level location
69 # top up to match to level location
70 if SERVER_RUNNING_FILE:
70 if SERVER_RUNNING_FILE:
71 with open(SERVER_RUNNING_FILE, 'wb') as f:
71 with open(SERVER_RUNNING_FILE, 'wb') as f:
72 f.write(str(os.getpid()))
72 f.write(str(os.getpid()))
73 # register cleanup of that file when server exits
73 # register cleanup of that file when server exits
74 atexit.register(_remove_server_running_file)
74 atexit.register(_remove_server_running_file)
75
75
76
76
77 class LazyWriter(object):
77 class LazyWriter(object):
78 """
78 """
79 File-like object that opens a file lazily when it is first written
79 File-like object that opens a file lazily when it is first written
80 to.
80 to.
81 """
81 """
82
82
83 def __init__(self, filename, mode='w'):
83 def __init__(self, filename, mode='w'):
84 self.filename = filename
84 self.filename = filename
85 self.fileobj = None
85 self.fileobj = None
86 self.lock = threading.Lock()
86 self.lock = threading.Lock()
87 self.mode = mode
87 self.mode = mode
88
88
89 def open(self):
89 def open(self):
90 if self.fileobj is None:
90 if self.fileobj is None:
91 with self.lock:
91 with self.lock:
92 self.fileobj = open(self.filename, self.mode)
92 self.fileobj = open(self.filename, self.mode)
93 return self.fileobj
93 return self.fileobj
94
94
95 def close(self):
95 def close(self):
96 fileobj = self.fileobj
96 fileobj = self.fileobj
97 if fileobj is not None:
97 if fileobj is not None:
98 fileobj.close()
98 fileobj.close()
99
99
100 def __del__(self):
100 def __del__(self):
101 self.close()
101 self.close()
102
102
103 def write(self, text):
103 def write(self, text):
104 fileobj = self.open()
104 fileobj = self.open()
105 fileobj.write(text)
105 fileobj.write(text)
106 fileobj.flush()
106 fileobj.flush()
107
107
108 def writelines(self, text):
108 def writelines(self, text):
109 fileobj = self.open()
109 fileobj = self.open()
110 fileobj.writelines(text)
110 fileobj.writelines(text)
111 fileobj.flush()
111 fileobj.flush()
112
112
113 def flush(self):
113 def flush(self):
114 self.open().flush()
114 self.open().flush()
115
115
116
116
117 class Application(object):
117 class Application(object):
118 """
118 """
119 Represents the vcs server application.
119 Represents the vcs server application.
120
120
121 This object is responsible to initialize the application and all needed
121 This object is responsible to initialize the application and all needed
122 libraries. After that it hooks together the different objects and provides
122 libraries. After that it hooks together the different objects and provides
123 them a way to access things like configuration.
123 them a way to access things like configuration.
124 """
124 """
125
125
126 def __init__(
126 def __init__(
127 self, host, port=None, locale='', threadpool_size=None,
127 self, host, port=None, locale='', threadpool_size=None,
128 timeout=None, cache_config=None, remote_wsgi_=None):
128 timeout=None, cache_config=None, remote_wsgi_=None):
129
129
130 self.host = host
130 self.host = host
131 self.port = int(port) or settings.PYRO_PORT
131 self.port = int(port) or settings.PYRO_PORT
132 self.threadpool_size = (
132 self.threadpool_size = (
133 int(threadpool_size) if threadpool_size else None)
133 int(threadpool_size) if threadpool_size else None)
134 self.locale = locale
134 self.locale = locale
135 self.timeout = timeout
135 self.timeout = timeout
136 self.cache_config = cache_config
136 self.cache_config = cache_config
137 self.remote_wsgi = remote_wsgi_ or remote_wsgi
137 self.remote_wsgi = remote_wsgi_ or remote_wsgi
138
138
139 def init(self):
139 def init(self):
140 """
140 """
141 Configure and hook together all relevant objects.
141 Configure and hook together all relevant objects.
142 """
142 """
143 self._configure_locale()
143 self._configure_locale()
144 self._configure_pyro()
144 self._configure_pyro()
145 self._initialize_cache()
145 self._initialize_cache()
146 self._create_daemon_and_remote_objects(host=self.host, port=self.port)
146 self._create_daemon_and_remote_objects(host=self.host, port=self.port)
147
147
148 def run(self):
148 def run(self):
149 """
149 """
150 Start the main loop of the application.
150 Start the main loop of the application.
151 """
151 """
152
152
153 if hasattr(os, 'getpid'):
153 if hasattr(os, 'getpid'):
154 log.info('Starting %s in PID %i.', __name__, os.getpid())
154 log.info('Starting %s in PID %i.', __name__, os.getpid())
155 else:
155 else:
156 log.info('Starting %s.', __name__)
156 log.info('Starting %s.', __name__)
157 if SERVER_RUNNING_FILE:
157 if SERVER_RUNNING_FILE:
158 log.info('PID file written as %s', SERVER_RUNNING_FILE)
158 log.info('PID file written as %s', SERVER_RUNNING_FILE)
159 else:
159 else:
160 log.info('No PID file written by default.')
160 log.info('No PID file written by default.')
161 when_ready(self)
161 when_ready(self)
162 try:
162 try:
163 self._pyrodaemon.requestLoop(
163 self._pyrodaemon.requestLoop(
164 loopCondition=lambda: not self._vcsserver._shutdown)
164 loopCondition=lambda: not self._vcsserver._shutdown)
165 finally:
165 finally:
166 self._pyrodaemon.shutdown()
166 self._pyrodaemon.shutdown()
167
167
168 def _configure_locale(self):
168 def _configure_locale(self):
169 if self.locale:
169 if self.locale:
170 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
170 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
171 else:
171 else:
172 log.info(
172 log.info(
173 'Configuring locale subsystem based on environment variables')
173 'Configuring locale subsystem based on environment variables')
174
174
175 try:
175 try:
176 # If self.locale is the empty string, then the locale
176 # If self.locale is the empty string, then the locale
177 # module will use the environment variables. See the
177 # module will use the environment variables. See the
178 # documentation of the package `locale`.
178 # documentation of the package `locale`.
179 locale.setlocale(locale.LC_ALL, self.locale)
179 locale.setlocale(locale.LC_ALL, self.locale)
180
180
181 language_code, encoding = locale.getlocale()
181 language_code, encoding = locale.getlocale()
182 log.info(
182 log.info(
183 'Locale set to language code "%s" with encoding "%s".',
183 'Locale set to language code "%s" with encoding "%s".',
184 language_code, encoding)
184 language_code, encoding)
185 except locale.Error:
185 except locale.Error:
186 log.exception(
186 log.exception(
187 'Cannot set locale, not configuring the locale system')
187 'Cannot set locale, not configuring the locale system')
188
188
189 def _configure_pyro(self):
189 def _configure_pyro(self):
190 if self.threadpool_size is not None:
190 if self.threadpool_size is not None:
191 log.info("Threadpool size set to %s", self.threadpool_size)
191 log.info("Threadpool size set to %s", self.threadpool_size)
192 Pyro4.config.THREADPOOL_SIZE = self.threadpool_size
192 Pyro4.config.THREADPOOL_SIZE = self.threadpool_size
193 if self.timeout not in (None, 0, 0.0, '0'):
193 if self.timeout not in (None, 0, 0.0, '0'):
194 log.info("Timeout for RPC calls set to %s seconds", self.timeout)
194 log.info("Timeout for RPC calls set to %s seconds", self.timeout)
195 Pyro4.config.COMMTIMEOUT = float(self.timeout)
195 Pyro4.config.COMMTIMEOUT = float(self.timeout)
196 Pyro4.config.SERIALIZER = 'pickle'
196 Pyro4.config.SERIALIZER = 'pickle'
197 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
197 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle')
198 Pyro4.config.SOCK_REUSE = True
198 Pyro4.config.SOCK_REUSE = True
199 # Uncomment the next line when you need to debug remote errors
199 # Uncomment the next line when you need to debug remote errors
200 # Pyro4.config.DETAILED_TRACEBACK = True
200 # Pyro4.config.DETAILED_TRACEBACK = True
201
201
202 def _initialize_cache(self):
202 def _initialize_cache(self):
203 cache_config = parse_cache_config_options(self.cache_config)
203 cache_config = parse_cache_config_options(self.cache_config)
204 log.info('Initializing beaker cache: %s' % cache_config)
204 log.info('Initializing beaker cache: %s' % cache_config)
205 self.cache = CacheManager(**cache_config)
205 self.cache = CacheManager(**cache_config)
206
206
207 def _create_daemon_and_remote_objects(self, host='localhost',
207 def _create_daemon_and_remote_objects(self, host='localhost',
208 port=settings.PYRO_PORT):
208 port=settings.PYRO_PORT):
209 daemon = Pyro4.Daemon(host=host, port=port)
209 daemon = Pyro4.Daemon(host=host, port=port)
210
210
211 self._vcsserver = VcsServer()
211 self._vcsserver = VcsServer()
212 uri = daemon.register(
212 uri = daemon.register(
213 self._vcsserver, objectId=settings.PYRO_VCSSERVER)
213 self._vcsserver, objectId=settings.PYRO_VCSSERVER)
214 log.info("Object registered = %s", uri)
214 log.info("Object registered = %s", uri)
215
215
216 if GitFactory and GitRemote:
216 if GitFactory and GitRemote:
217 git_repo_cache = self.cache.get_cache_region('git', region='repo_object')
217 git_repo_cache = self.cache.get_cache_region('git', region='repo_object')
218 git_factory = GitFactory(git_repo_cache)
218 git_factory = GitFactory(git_repo_cache)
219 self._git_remote = GitRemote(git_factory)
219 self._git_remote = GitRemote(git_factory)
220 uri = daemon.register(self._git_remote, objectId=settings.PYRO_GIT)
220 uri = daemon.register(self._git_remote, objectId=settings.PYRO_GIT)
221 log.info("Object registered = %s", uri)
221 log.info("Object registered = %s", uri)
222 else:
222 else:
223 log.info("Git client import failed")
223 log.info("Git client import failed")
224
224
225 if MercurialFactory and HgRemote:
225 if MercurialFactory and HgRemote:
226 hg_repo_cache = self.cache.get_cache_region('hg', region='repo_object')
226 hg_repo_cache = self.cache.get_cache_region('hg', region='repo_object')
227 hg_factory = MercurialFactory(hg_repo_cache)
227 hg_factory = MercurialFactory(hg_repo_cache)
228 self._hg_remote = HgRemote(hg_factory)
228 self._hg_remote = HgRemote(hg_factory)
229 uri = daemon.register(self._hg_remote, objectId=settings.PYRO_HG)
229 uri = daemon.register(self._hg_remote, objectId=settings.PYRO_HG)
230 log.info("Object registered = %s", uri)
230 log.info("Object registered = %s", uri)
231 else:
231 else:
232 log.info("Mercurial client import failed")
232 log.info("Mercurial client import failed")
233
233
234 if SubversionFactory and SvnRemote:
234 if SubversionFactory and SvnRemote:
235 svn_repo_cache = self.cache.get_cache_region('svn', region='repo_object')
235 svn_repo_cache = self.cache.get_cache_region('svn', region='repo_object')
236 svn_factory = SubversionFactory(svn_repo_cache)
236 svn_factory = SubversionFactory(svn_repo_cache)
237 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
237 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
238 uri = daemon.register(self._svn_remote, objectId=settings.PYRO_SVN)
238 uri = daemon.register(self._svn_remote, objectId=settings.PYRO_SVN)
239 log.info("Object registered = %s", uri)
239 log.info("Object registered = %s", uri)
240 else:
240 else:
241 log.info("Subversion client import failed")
241 log.info("Subversion client import failed")
242
242
243 self._git_remote_wsgi = self.remote_wsgi.GitRemoteWsgi()
243 self._git_remote_wsgi = self.remote_wsgi.GitRemoteWsgi()
244 uri = daemon.register(self._git_remote_wsgi,
244 uri = daemon.register(self._git_remote_wsgi,
245 objectId=settings.PYRO_GIT_REMOTE_WSGI)
245 objectId=settings.PYRO_GIT_REMOTE_WSGI)
246 log.info("Object registered = %s", uri)
246 log.info("Object registered = %s", uri)
247
247
248 self._hg_remote_wsgi = self.remote_wsgi.HgRemoteWsgi()
248 self._hg_remote_wsgi = self.remote_wsgi.HgRemoteWsgi()
249 uri = daemon.register(self._hg_remote_wsgi,
249 uri = daemon.register(self._hg_remote_wsgi,
250 objectId=settings.PYRO_HG_REMOTE_WSGI)
250 objectId=settings.PYRO_HG_REMOTE_WSGI)
251 log.info("Object registered = %s", uri)
251 log.info("Object registered = %s", uri)
252
252
253 self._pyrodaemon = daemon
253 self._pyrodaemon = daemon
254
254
255
255
256 class VcsServerCommand(object):
256 class VcsServerCommand(object):
257
257
258 usage = '%prog'
258 usage = '%prog'
259 description = """
259 description = """
260 Runs the VCS server
260 Runs the VCS server
261 """
261 """
262 default_verbosity = 1
262 default_verbosity = 1
263
263
264 parser = optparse.OptionParser(
264 parser = optparse.OptionParser(
265 usage,
265 usage,
266 description=textwrap.dedent(description)
266 description=textwrap.dedent(description)
267 )
267 )
268 parser.add_option(
268 parser.add_option(
269 '--host',
269 '--host',
270 type="str",
270 type="str",
271 dest="host",
271 dest="host",
272 )
272 )
273 parser.add_option(
273 parser.add_option(
274 '--port',
274 '--port',
275 type="int",
275 type="int",
276 dest="port"
276 dest="port"
277 )
277 )
278 parser.add_option(
278 parser.add_option(
279 '--running-file',
279 '--running-file',
280 dest='running_file',
280 dest='running_file',
281 metavar='RUNNING_FILE',
281 metavar='RUNNING_FILE',
282 help="Create a running file after the server is initalized with "
282 help="Create a running file after the server is initalized with "
283 "stored PID of process"
283 "stored PID of process"
284 )
284 )
285 parser.add_option(
285 parser.add_option(
286 '--locale',
286 '--locale',
287 dest='locale',
287 dest='locale',
288 help="Allows to set the locale, e.g. en_US.UTF-8",
288 help="Allows to set the locale, e.g. en_US.UTF-8",
289 default=""
289 default=""
290 )
290 )
291 parser.add_option(
291 parser.add_option(
292 '--log-file',
292 '--log-file',
293 dest='log_file',
293 dest='log_file',
294 metavar='LOG_FILE',
294 metavar='LOG_FILE',
295 help="Save output to the given log file (redirects stdout)"
295 help="Save output to the given log file (redirects stdout)"
296 )
296 )
297 parser.add_option(
297 parser.add_option(
298 '--log-level',
298 '--log-level',
299 dest="log_level",
299 dest="log_level",
300 metavar="LOG_LEVEL",
300 metavar="LOG_LEVEL",
301 help="use LOG_LEVEL to set log level "
301 help="use LOG_LEVEL to set log level "
302 "(debug,info,warning,error,critical)"
302 "(debug,info,warning,error,critical)"
303 )
303 )
304 parser.add_option(
304 parser.add_option(
305 '--threadpool',
305 '--threadpool',
306 dest='threadpool_size',
306 dest='threadpool_size',
307 type='int',
307 type='int',
308 help="Set the size of the threadpool used to communicate with the "
308 help="Set the size of the threadpool used to communicate with the "
309 "WSGI workers. This should be at least 6 times the number of "
309 "WSGI workers. This should be at least 6 times the number of "
310 "WSGI worker processes."
310 "WSGI worker processes."
311 )
311 )
312 parser.add_option(
312 parser.add_option(
313 '--timeout',
313 '--timeout',
314 dest='timeout',
314 dest='timeout',
315 type='float',
315 type='float',
316 help="Set the timeout for RPC communication in seconds."
316 help="Set the timeout for RPC communication in seconds."
317 )
317 )
318 parser.add_option(
318 parser.add_option(
319 '--config',
319 '--config',
320 dest='config_file',
320 dest='config_file',
321 type='string',
321 type='string',
322 help="Configuration file for vcsserver."
322 help="Configuration file for vcsserver."
323 )
323 )
324
324
325 def __init__(self, argv, quiet=False):
325 def __init__(self, argv, quiet=False):
326 self.options, self.args = self.parser.parse_args(argv[1:])
326 self.options, self.args = self.parser.parse_args(argv[1:])
327 if quiet:
327 if quiet:
328 self.options.verbose = 0
328 self.options.verbose = 0
329
329
330 def _get_file_config(self):
330 def _get_file_config(self):
331 ini_conf = {}
331 ini_conf = {}
332 conf = configobj.ConfigObj(self.options.config_file)
332 conf = configobj.ConfigObj(self.options.config_file)
333 if 'DEFAULT' in conf:
333 if 'DEFAULT' in conf:
334 ini_conf = conf['DEFAULT']
334 ini_conf = conf['DEFAULT']
335
335
336 return ini_conf
336 return ini_conf
337
337
338 def _show_config(self, vcsserver_config):
338 def _show_config(self, vcsserver_config):
339 order = [
339 order = [
340 'config_file',
340 'config_file',
341 'host',
341 'host',
342 'port',
342 'port',
343 'log_file',
343 'log_file',
344 'log_level',
344 'log_level',
345 'locale',
345 'locale',
346 'threadpool_size',
346 'threadpool_size',
347 'timeout',
347 'timeout',
348 'cache_config',
348 'cache_config',
349 ]
349 ]
350
350
351 def sorter(k):
351 def sorter(k):
352 return dict([(y, x) for x, y in enumerate(order)]).get(k)
352 return dict([(y, x) for x, y in enumerate(order)]).get(k)
353
353
354 _config = []
354 _config = []
355 for k in sorted(vcsserver_config.keys(), key=sorter):
355 for k in sorted(vcsserver_config.keys(), key=sorter):
356 v = vcsserver_config[k]
356 v = vcsserver_config[k]
357 # construct padded key for display eg %-20s % = key: val
357 # construct padded key for display eg %-20s % = key: val
358 k_formatted = ('%-'+str(len(max(order, key=len))+1)+'s') % (k+':')
358 k_formatted = ('%-'+str(len(max(order, key=len))+1)+'s') % (k+':')
359 _config.append(' * %s %s' % (k_formatted, v))
359 _config.append(' * %s %s' % (k_formatted, v))
360 log.info('\n[vcsserver configuration]:\n'+'\n'.join(_config))
360 log.info('\n[vcsserver configuration]:\n'+'\n'.join(_config))
361
361
362 def _get_vcsserver_configuration(self):
362 def _get_vcsserver_configuration(self):
363 _defaults = {
363 _defaults = {
364 'config_file': None,
364 'config_file': None,
365 'git_path': 'git',
365 'git_path': 'git',
366 'host': 'localhost',
366 'host': 'localhost',
367 'port': settings.PYRO_PORT,
367 'port': settings.PYRO_PORT,
368 'log_file': None,
368 'log_file': None,
369 'log_level': 'debug',
369 'log_level': 'debug',
370 'locale': None,
370 'locale': None,
371 'threadpool_size': 16,
371 'threadpool_size': 16,
372 'timeout': None,
372 'timeout': None,
373
373
374 # Development support
374 # Development support
375 'dev.use_echo_app': False,
375 'dev.use_echo_app': False,
376
376
377 # caches, baker style config
377 # caches, baker style config
378 'beaker.cache.regions': 'repo_object',
378 'beaker.cache.regions': 'repo_object',
379 'beaker.cache.repo_object.expire': '10',
379 'beaker.cache.repo_object.expire': '10',
380 'beaker.cache.repo_object.type': 'memory',
380 'beaker.cache.repo_object.type': 'memory',
381 }
381 }
382 config = {}
382 config = {}
383 config.update(_defaults)
383 config.update(_defaults)
384 # overwrite defaults with one loaded from file
384 # overwrite defaults with one loaded from file
385 config.update(self._get_file_config())
385 config.update(self._get_file_config())
386
386
387 # overwrite with self.option which has the top priority
387 # overwrite with self.option which has the top priority
388 for k, v in self.options.__dict__.items():
388 for k, v in self.options.__dict__.items():
389 if v or v == 0:
389 if v or v == 0:
390 config[k] = v
390 config[k] = v
391
391
392 # clear all "extra" keys if they are somehow passed,
392 # clear all "extra" keys if they are somehow passed,
393 # we only want defaults, so any extra stuff from self.options is cleared
393 # we only want defaults, so any extra stuff from self.options is cleared
394 # except beaker stuff which needs to be dynamic
394 # except beaker stuff which needs to be dynamic
395 for k in [k for k in config.copy().keys() if not k.startswith('beaker.cache.')]:
395 for k in [k for k in config.copy().keys() if not k.startswith('beaker.cache.')]:
396 if k not in _defaults:
396 if k not in _defaults:
397 del config[k]
397 del config[k]
398
398
399 # group together the cache into one key.
399 # group together the cache into one key.
400 # Needed further for beaker lib configuration
400 # Needed further for beaker lib configuration
401 _k = {}
401 _k = {}
402 for k in [k for k in config.copy() if k.startswith('beaker.cache.')]:
402 for k in [k for k in config.copy() if k.startswith('beaker.cache.')]:
403 _k[k] = config.pop(k)
403 _k[k] = config.pop(k)
404 config['cache_config'] = _k
404 config['cache_config'] = _k
405
405
406 return config
406 return config
407
407
408 def out(self, msg): # pragma: no cover
408 def out(self, msg): # pragma: no cover
409 if self.options.verbose > 0:
409 if self.options.verbose > 0:
410 print(msg)
410 print(msg)
411
411
412 def run(self): # pragma: no cover
412 def run(self): # pragma: no cover
413 vcsserver_config = self._get_vcsserver_configuration()
413 vcsserver_config = self._get_vcsserver_configuration()
414
414
415 # Ensure the log file is writeable
415 # Ensure the log file is writeable
416 if vcsserver_config['log_file']:
416 if vcsserver_config['log_file']:
417 stdout_log = self._configure_logfile()
417 stdout_log = self._configure_logfile()
418 else:
418 else:
419 stdout_log = None
419 stdout_log = None
420
420
421 # set PID file with running lock
421 # set PID file with running lock
422 if self.options.running_file:
422 if self.options.running_file:
423 global SERVER_RUNNING_FILE
423 global SERVER_RUNNING_FILE
424 SERVER_RUNNING_FILE = self.options.running_file
424 SERVER_RUNNING_FILE = self.options.running_file
425
425
426 # configure logging, and logging based on configuration file
426 # configure logging, and logging based on configuration file
427 self._configure_logging(level=vcsserver_config['log_level'],
427 self._configure_logging(level=vcsserver_config['log_level'],
428 stream=stdout_log)
428 stream=stdout_log)
429 if self.options.config_file:
429 if self.options.config_file:
430 if not os.path.isfile(self.options.config_file):
430 if not os.path.isfile(self.options.config_file):
431 raise OSError('File %s does not exist' %
431 raise OSError('File %s does not exist' %
432 self.options.config_file)
432 self.options.config_file)
433
433
434 self._configure_file_logging(self.options.config_file)
434 self._configure_file_logging(self.options.config_file)
435
435
436 self._configure_settings(vcsserver_config)
436 self._configure_settings(vcsserver_config)
437
437
438 # display current configuration of vcsserver
438 # display current configuration of vcsserver
439 self._show_config(vcsserver_config)
439 self._show_config(vcsserver_config)
440
440
441 if not vcsserver_config['dev.use_echo_app']:
441 if not vcsserver_config['dev.use_echo_app']:
442 remote_wsgi_mod = remote_wsgi
442 remote_wsgi_mod = remote_wsgi
443 else:
443 else:
444 log.warning("Using EchoApp for VCS endpoints.")
444 log.warning("Using EchoApp for VCS endpoints.")
445 remote_wsgi_mod = remote_wsgi_stub
445 remote_wsgi_mod = remote_wsgi_stub
446
446
447 app = Application(
447 app = Application(
448 host=vcsserver_config['host'],
448 host=vcsserver_config['host'],
449 port=vcsserver_config['port'],
449 port=vcsserver_config['port'],
450 locale=vcsserver_config['locale'],
450 locale=vcsserver_config['locale'],
451 threadpool_size=vcsserver_config['threadpool_size'],
451 threadpool_size=vcsserver_config['threadpool_size'],
452 timeout=vcsserver_config['timeout'],
452 timeout=vcsserver_config['timeout'],
453 cache_config=vcsserver_config['cache_config'],
453 cache_config=vcsserver_config['cache_config'],
454 remote_wsgi_=remote_wsgi_mod)
454 remote_wsgi_=remote_wsgi_mod)
455 app.init()
455 app.init()
456 app.run()
456 app.run()
457
457
458 def _configure_logging(self, level, stream=None):
458 def _configure_logging(self, level, stream=None):
459 _format = (
459 _format = (
460 '%(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s')
460 '%(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s')
461 levels = {
461 levels = {
462 'debug': logging.DEBUG,
462 'debug': logging.DEBUG,
463 'info': logging.INFO,
463 'info': logging.INFO,
464 'warning': logging.WARNING,
464 'warning': logging.WARNING,
465 'error': logging.ERROR,
465 'error': logging.ERROR,
466 'critical': logging.CRITICAL,
466 'critical': logging.CRITICAL,
467 }
467 }
468 try:
468 try:
469 level = levels[level]
469 level = levels[level]
470 except KeyError:
470 except KeyError:
471 raise AttributeError(
471 raise AttributeError(
472 'Invalid log level please use one of %s' % (levels.keys(),))
472 'Invalid log level please use one of %s' % (levels.keys(),))
473 logging.basicConfig(format=_format, stream=stream, level=level)
473 logging.basicConfig(format=_format, stream=stream, level=level)
474 logging.getLogger('Pyro4').setLevel(level)
474 logging.getLogger('Pyro4').setLevel(level)
475
475
476 def _configure_file_logging(self, config):
476 def _configure_file_logging(self, config):
477 import logging.config
477 import logging.config
478 try:
478 try:
479 logging.config.fileConfig(config)
479 logging.config.fileConfig(config)
480 except Exception as e:
480 except Exception as e:
481 log.warning('Failed to configure logging based on given '
481 log.warning('Failed to configure logging based on given '
482 'config file. Error: %s' % e)
482 'config file. Error: %s' % e)
483
483
484 def _configure_logfile(self):
484 def _configure_logfile(self):
485 try:
485 try:
486 writeable_log_file = open(self.options.log_file, 'a')
486 writeable_log_file = open(self.options.log_file, 'a')
487 except IOError as ioe:
487 except IOError as ioe:
488 msg = 'Error: Unable to write to log file: %s' % ioe
488 msg = 'Error: Unable to write to log file: %s' % ioe
489 raise ValueError(msg)
489 raise ValueError(msg)
490 writeable_log_file.close()
490 writeable_log_file.close()
491 stdout_log = LazyWriter(self.options.log_file, 'a')
491 stdout_log = LazyWriter(self.options.log_file, 'a')
492 sys.stdout = stdout_log
492 sys.stdout = stdout_log
493 sys.stderr = stdout_log
493 sys.stderr = stdout_log
494 return stdout_log
494 return stdout_log
495
495
496 def _configure_settings(self, config):
496 def _configure_settings(self, config):
497 """
497 """
498 Configure the settings module based on the given `config`.
498 Configure the settings module based on the given `config`.
499 """
499 """
500 settings.GIT_EXECUTABLE = config['git_path']
500 settings.GIT_EXECUTABLE = config['git_path']
501
501
502
502
503 def main(argv=sys.argv, quiet=False):
503 def main(argv=sys.argv, quiet=False):
504 if MercurialFactory:
504 if MercurialFactory:
505 hgpatches.patch_largefiles_capabilities()
505 hgpatches.patch_largefiles_capabilities()
506 hgpatches.patch_subrepo_type_mapping()
506 command = VcsServerCommand(argv, quiet=quiet)
507 command = VcsServerCommand(argv, quiet=quiet)
507 return command.run()
508 return command.run()
@@ -1,591 +1,627 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2016 RodeCode GmbH
2 # Copyright (C) 2014-2016 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 from urllib2 import URLError
20 from urllib2 import URLError
21 import logging
21 import logging
22 import posixpath as vcspath
22 import posixpath as vcspath
23 import StringIO
23 import StringIO
24 import subprocess
24 import subprocess
25 import urllib
25 import urllib
26
26
27 import svn.client
27 import svn.client
28 import svn.core
28 import svn.core
29 import svn.delta
29 import svn.delta
30 import svn.diff
30 import svn.diff
31 import svn.fs
31 import svn.fs
32 import svn.repos
32 import svn.repos
33
33
34 from vcsserver import svn_diff
34 from vcsserver import svn_diff
35 from vcsserver import exceptions
35 from vcsserver.base import RepoFactory
36 from vcsserver.base import RepoFactory
36
37
37
38
38 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
39
40
40
41
41 # Set of svn compatible version flags.
42 # Set of svn compatible version flags.
42 # Compare with subversion/svnadmin/svnadmin.c
43 # Compare with subversion/svnadmin/svnadmin.c
43 svn_compatible_versions = set([
44 svn_compatible_versions = set([
44 'pre-1.4-compatible',
45 'pre-1.4-compatible',
45 'pre-1.5-compatible',
46 'pre-1.5-compatible',
46 'pre-1.6-compatible',
47 'pre-1.6-compatible',
47 'pre-1.8-compatible',
48 'pre-1.8-compatible',
48 ])
49 ])
49
50
50
51
52 def reraise_safe_exceptions(func):
53 """Decorator for converting svn exceptions to something neutral."""
54 def wrapper(*args, **kwargs):
55 try:
56 return func(*args, **kwargs)
57 except Exception as e:
58 if not hasattr(e, '_vcs_kind'):
59 log.exception("Unhandled exception in hg remote call")
60 raise_from_original(exceptions.UnhandledException)
61 raise
62 return wrapper
63
64
65 def raise_from_original(new_type):
66 """
67 Raise a new exception type with original args and traceback.
68 """
69 _, original, traceback = sys.exc_info()
70 try:
71 raise new_type(*original.args), None, traceback
72 finally:
73 del traceback
74
75
51 class SubversionFactory(RepoFactory):
76 class SubversionFactory(RepoFactory):
52
77
53 def _create_repo(self, wire, create, compatible_version):
78 def _create_repo(self, wire, create, compatible_version):
54 path = svn.core.svn_path_canonicalize(wire['path'])
79 path = svn.core.svn_path_canonicalize(wire['path'])
55 if create:
80 if create:
56 fs_config = {}
81 fs_config = {}
57 if compatible_version:
82 if compatible_version:
58 if compatible_version not in svn_compatible_versions:
83 if compatible_version not in svn_compatible_versions:
59 raise Exception('Unknown SVN compatible version "{}"'
84 raise Exception('Unknown SVN compatible version "{}"'
60 .format(compatible_version))
85 .format(compatible_version))
61 log.debug('Create SVN repo with compatible version "%s"',
86 log.debug('Create SVN repo with compatible version "%s"',
62 compatible_version)
87 compatible_version)
63 fs_config[compatible_version] = '1'
88 fs_config[compatible_version] = '1'
64 repo = svn.repos.create(path, "", "", None, fs_config)
89 repo = svn.repos.create(path, "", "", None, fs_config)
65 else:
90 else:
66 repo = svn.repos.open(path)
91 repo = svn.repos.open(path)
67 return repo
92 return repo
68
93
69 def repo(self, wire, create=False, compatible_version=None):
94 def repo(self, wire, create=False, compatible_version=None):
70 def create_new_repo():
95 def create_new_repo():
71 return self._create_repo(wire, create, compatible_version)
96 return self._create_repo(wire, create, compatible_version)
72
97
73 return self._repo(wire, create_new_repo)
98 return self._repo(wire, create_new_repo)
74
99
75
100
76
101
77 NODE_TYPE_MAPPING = {
102 NODE_TYPE_MAPPING = {
78 svn.core.svn_node_file: 'file',
103 svn.core.svn_node_file: 'file',
79 svn.core.svn_node_dir: 'dir',
104 svn.core.svn_node_dir: 'dir',
80 }
105 }
81
106
82
107
83 class SvnRemote(object):
108 class SvnRemote(object):
84
109
85 def __init__(self, factory, hg_factory=None):
110 def __init__(self, factory, hg_factory=None):
86 self._factory = factory
111 self._factory = factory
87 # TODO: Remove once we do not use internal Mercurial objects anymore
112 # TODO: Remove once we do not use internal Mercurial objects anymore
88 # for subversion
113 # for subversion
89 self._hg_factory = hg_factory
114 self._hg_factory = hg_factory
90
115
116 @reraise_safe_exceptions
117 def discover_svn_version(self):
118 try:
119 import svn.core
120 svn_ver = svn.core.SVN_VERSION
121 except ImportError:
122 svn_ver = None
123 return svn_ver
124
91 def check_url(self, url, config_items):
125 def check_url(self, url, config_items):
92 # this can throw exception if not installed, but we detect this
126 # this can throw exception if not installed, but we detect this
93 from hgsubversion import svnrepo
127 from hgsubversion import svnrepo
94
128
95 baseui = self._hg_factory._create_config(config_items)
129 baseui = self._hg_factory._create_config(config_items)
96 # uuid function get's only valid UUID from proper repo, else
130 # uuid function get's only valid UUID from proper repo, else
97 # throws exception
131 # throws exception
98 try:
132 try:
99 svnrepo.svnremoterepo(baseui, url).svn.uuid
133 svnrepo.svnremoterepo(baseui, url).svn.uuid
100 except:
134 except:
101 log.debug("Invalid svn url: %s", url)
135 log.debug("Invalid svn url: %s", url)
102 raise URLError(
136 raise URLError(
103 '"%s" is not a valid Subversion source url.' % (url, ))
137 '"%s" is not a valid Subversion source url.' % (url, ))
104 return True
138 return True
105
139
106 def is_path_valid_repository(self, wire, path):
140 def is_path_valid_repository(self, wire, path):
107 try:
141 try:
108 svn.repos.open(path)
142 svn.repos.open(path)
109 except svn.core.SubversionException:
143 except svn.core.SubversionException:
110 log.debug("Invalid Subversion path %s", path)
144 log.debug("Invalid Subversion path %s", path)
111 return False
145 return False
112 return True
146 return True
113
147
114 def lookup(self, wire, revision):
148 def lookup(self, wire, revision):
115 if revision not in [-1, None, 'HEAD']:
149 if revision not in [-1, None, 'HEAD']:
116 raise NotImplementedError
150 raise NotImplementedError
117 repo = self._factory.repo(wire)
151 repo = self._factory.repo(wire)
118 fs_ptr = svn.repos.fs(repo)
152 fs_ptr = svn.repos.fs(repo)
119 head = svn.fs.youngest_rev(fs_ptr)
153 head = svn.fs.youngest_rev(fs_ptr)
120 return head
154 return head
121
155
122 def lookup_interval(self, wire, start_ts, end_ts):
156 def lookup_interval(self, wire, start_ts, end_ts):
123 repo = self._factory.repo(wire)
157 repo = self._factory.repo(wire)
124 fsobj = svn.repos.fs(repo)
158 fsobj = svn.repos.fs(repo)
125 start_rev = None
159 start_rev = None
126 end_rev = None
160 end_rev = None
127 if start_ts:
161 if start_ts:
128 start_ts_svn = apr_time_t(start_ts)
162 start_ts_svn = apr_time_t(start_ts)
129 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
163 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
130 else:
164 else:
131 start_rev = 1
165 start_rev = 1
132 if end_ts:
166 if end_ts:
133 end_ts_svn = apr_time_t(end_ts)
167 end_ts_svn = apr_time_t(end_ts)
134 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
168 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
135 else:
169 else:
136 end_rev = svn.fs.youngest_rev(fsobj)
170 end_rev = svn.fs.youngest_rev(fsobj)
137 return start_rev, end_rev
171 return start_rev, end_rev
138
172
139 def revision_properties(self, wire, revision):
173 def revision_properties(self, wire, revision):
140 repo = self._factory.repo(wire)
174 repo = self._factory.repo(wire)
141 fs_ptr = svn.repos.fs(repo)
175 fs_ptr = svn.repos.fs(repo)
142 return svn.fs.revision_proplist(fs_ptr, revision)
176 return svn.fs.revision_proplist(fs_ptr, revision)
143
177
144 def revision_changes(self, wire, revision):
178 def revision_changes(self, wire, revision):
145
179
146 repo = self._factory.repo(wire)
180 repo = self._factory.repo(wire)
147 fsobj = svn.repos.fs(repo)
181 fsobj = svn.repos.fs(repo)
148 rev_root = svn.fs.revision_root(fsobj, revision)
182 rev_root = svn.fs.revision_root(fsobj, revision)
149
183
150 editor = svn.repos.ChangeCollector(fsobj, rev_root)
184 editor = svn.repos.ChangeCollector(fsobj, rev_root)
151 editor_ptr, editor_baton = svn.delta.make_editor(editor)
185 editor_ptr, editor_baton = svn.delta.make_editor(editor)
152 base_dir = ""
186 base_dir = ""
153 send_deltas = False
187 send_deltas = False
154 svn.repos.replay2(
188 svn.repos.replay2(
155 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
189 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
156 editor_ptr, editor_baton, None)
190 editor_ptr, editor_baton, None)
157
191
158 added = []
192 added = []
159 changed = []
193 changed = []
160 removed = []
194 removed = []
161
195
162 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
196 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
163 for path, change in editor.changes.iteritems():
197 for path, change in editor.changes.iteritems():
164 # TODO: Decide what to do with directory nodes. Subversion can add
198 # TODO: Decide what to do with directory nodes. Subversion can add
165 # empty directories.
199 # empty directories.
200
166 if change.item_kind == svn.core.svn_node_dir:
201 if change.item_kind == svn.core.svn_node_dir:
167 continue
202 continue
168 if change.action == svn.repos.CHANGE_ACTION_ADD:
203 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
169 added.append(path)
204 added.append(path)
170 elif change.action == svn.repos.CHANGE_ACTION_MODIFY:
205 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
206 svn.repos.CHANGE_ACTION_REPLACE]:
171 changed.append(path)
207 changed.append(path)
172 elif change.action == svn.repos.CHANGE_ACTION_DELETE:
208 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
173 removed.append(path)
209 removed.append(path)
174 else:
210 else:
175 raise NotImplementedError(
211 raise NotImplementedError(
176 "Action %s not supported on path %s" % (
212 "Action %s not supported on path %s" % (
177 change.action, path))
213 change.action, path))
178
214
179 changes = {
215 changes = {
180 'added': added,
216 'added': added,
181 'changed': changed,
217 'changed': changed,
182 'removed': removed,
218 'removed': removed,
183 }
219 }
184 return changes
220 return changes
185
221
186 def node_history(self, wire, path, revision, limit):
222 def node_history(self, wire, path, revision, limit):
187 cross_copies = False
223 cross_copies = False
188 repo = self._factory.repo(wire)
224 repo = self._factory.repo(wire)
189 fsobj = svn.repos.fs(repo)
225 fsobj = svn.repos.fs(repo)
190 rev_root = svn.fs.revision_root(fsobj, revision)
226 rev_root = svn.fs.revision_root(fsobj, revision)
191
227
192 history_revisions = []
228 history_revisions = []
193 history = svn.fs.node_history(rev_root, path)
229 history = svn.fs.node_history(rev_root, path)
194 history = svn.fs.history_prev(history, cross_copies)
230 history = svn.fs.history_prev(history, cross_copies)
195 while history:
231 while history:
196 __, node_revision = svn.fs.history_location(history)
232 __, node_revision = svn.fs.history_location(history)
197 history_revisions.append(node_revision)
233 history_revisions.append(node_revision)
198 if limit and len(history_revisions) >= limit:
234 if limit and len(history_revisions) >= limit:
199 break
235 break
200 history = svn.fs.history_prev(history, cross_copies)
236 history = svn.fs.history_prev(history, cross_copies)
201 return history_revisions
237 return history_revisions
202
238
203 def node_properties(self, wire, path, revision):
239 def node_properties(self, wire, path, revision):
204 repo = self._factory.repo(wire)
240 repo = self._factory.repo(wire)
205 fsobj = svn.repos.fs(repo)
241 fsobj = svn.repos.fs(repo)
206 rev_root = svn.fs.revision_root(fsobj, revision)
242 rev_root = svn.fs.revision_root(fsobj, revision)
207 return svn.fs.node_proplist(rev_root, path)
243 return svn.fs.node_proplist(rev_root, path)
208
244
209 def file_annotate(self, wire, path, revision):
245 def file_annotate(self, wire, path, revision):
210 abs_path = 'file://' + urllib.pathname2url(
246 abs_path = 'file://' + urllib.pathname2url(
211 vcspath.join(wire['path'], path))
247 vcspath.join(wire['path'], path))
212 file_uri = svn.core.svn_path_canonicalize(abs_path)
248 file_uri = svn.core.svn_path_canonicalize(abs_path)
213
249
214 start_rev = svn_opt_revision_value_t(0)
250 start_rev = svn_opt_revision_value_t(0)
215 peg_rev = svn_opt_revision_value_t(revision)
251 peg_rev = svn_opt_revision_value_t(revision)
216 end_rev = peg_rev
252 end_rev = peg_rev
217
253
218 annotations = []
254 annotations = []
219
255
220 def receiver(line_no, revision, author, date, line, pool):
256 def receiver(line_no, revision, author, date, line, pool):
221 annotations.append((line_no, revision, line))
257 annotations.append((line_no, revision, line))
222
258
223 # TODO: Cannot use blame5, missing typemap function in the swig code
259 # TODO: Cannot use blame5, missing typemap function in the swig code
224 try:
260 try:
225 svn.client.blame2(
261 svn.client.blame2(
226 file_uri, peg_rev, start_rev, end_rev,
262 file_uri, peg_rev, start_rev, end_rev,
227 receiver, svn.client.create_context())
263 receiver, svn.client.create_context())
228 except svn.core.SubversionException as exc:
264 except svn.core.SubversionException as exc:
229 log.exception("Error during blame operation.")
265 log.exception("Error during blame operation.")
230 raise Exception(
266 raise Exception(
231 "Blame not supported or file does not exist at path %s. "
267 "Blame not supported or file does not exist at path %s. "
232 "Error %s." % (path, exc))
268 "Error %s." % (path, exc))
233
269
234 return annotations
270 return annotations
235
271
236 def get_node_type(self, wire, path, rev=None):
272 def get_node_type(self, wire, path, rev=None):
237 repo = self._factory.repo(wire)
273 repo = self._factory.repo(wire)
238 fs_ptr = svn.repos.fs(repo)
274 fs_ptr = svn.repos.fs(repo)
239 if rev is None:
275 if rev is None:
240 rev = svn.fs.youngest_rev(fs_ptr)
276 rev = svn.fs.youngest_rev(fs_ptr)
241 root = svn.fs.revision_root(fs_ptr, rev)
277 root = svn.fs.revision_root(fs_ptr, rev)
242 node = svn.fs.check_path(root, path)
278 node = svn.fs.check_path(root, path)
243 return NODE_TYPE_MAPPING.get(node, None)
279 return NODE_TYPE_MAPPING.get(node, None)
244
280
245 def get_nodes(self, wire, path, revision=None):
281 def get_nodes(self, wire, path, revision=None):
246 repo = self._factory.repo(wire)
282 repo = self._factory.repo(wire)
247 fsobj = svn.repos.fs(repo)
283 fsobj = svn.repos.fs(repo)
248 if revision is None:
284 if revision is None:
249 revision = svn.fs.youngest_rev(fsobj)
285 revision = svn.fs.youngest_rev(fsobj)
250 root = svn.fs.revision_root(fsobj, revision)
286 root = svn.fs.revision_root(fsobj, revision)
251 entries = svn.fs.dir_entries(root, path)
287 entries = svn.fs.dir_entries(root, path)
252 result = []
288 result = []
253 for entry_path, entry_info in entries.iteritems():
289 for entry_path, entry_info in entries.iteritems():
254 result.append(
290 result.append(
255 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
291 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
256 return result
292 return result
257
293
258 def get_file_content(self, wire, path, rev=None):
294 def get_file_content(self, wire, path, rev=None):
259 repo = self._factory.repo(wire)
295 repo = self._factory.repo(wire)
260 fsobj = svn.repos.fs(repo)
296 fsobj = svn.repos.fs(repo)
261 if rev is None:
297 if rev is None:
262 rev = svn.fs.youngest_revision(fsobj)
298 rev = svn.fs.youngest_revision(fsobj)
263 root = svn.fs.revision_root(fsobj, rev)
299 root = svn.fs.revision_root(fsobj, rev)
264 content = svn.core.Stream(svn.fs.file_contents(root, path))
300 content = svn.core.Stream(svn.fs.file_contents(root, path))
265 return content.read()
301 return content.read()
266
302
267 def get_file_size(self, wire, path, revision=None):
303 def get_file_size(self, wire, path, revision=None):
268 repo = self._factory.repo(wire)
304 repo = self._factory.repo(wire)
269 fsobj = svn.repos.fs(repo)
305 fsobj = svn.repos.fs(repo)
270 if revision is None:
306 if revision is None:
271 revision = svn.fs.youngest_revision(fsobj)
307 revision = svn.fs.youngest_revision(fsobj)
272 root = svn.fs.revision_root(fsobj, revision)
308 root = svn.fs.revision_root(fsobj, revision)
273 size = svn.fs.file_length(root, path)
309 size = svn.fs.file_length(root, path)
274 return size
310 return size
275
311
276 def create_repository(self, wire, compatible_version=None):
312 def create_repository(self, wire, compatible_version=None):
277 log.info('Creating Subversion repository in path "%s"', wire['path'])
313 log.info('Creating Subversion repository in path "%s"', wire['path'])
278 self._factory.repo(wire, create=True,
314 self._factory.repo(wire, create=True,
279 compatible_version=compatible_version)
315 compatible_version=compatible_version)
280
316
281 def import_remote_repository(self, wire, src_url):
317 def import_remote_repository(self, wire, src_url):
282 repo_path = wire['path']
318 repo_path = wire['path']
283 if not self.is_path_valid_repository(wire, repo_path):
319 if not self.is_path_valid_repository(wire, repo_path):
284 raise Exception(
320 raise Exception(
285 "Path %s is not a valid Subversion repository." % repo_path)
321 "Path %s is not a valid Subversion repository." % repo_path)
286 # TODO: johbo: URL checks ?
322 # TODO: johbo: URL checks ?
287 rdump = subprocess.Popen(
323 rdump = subprocess.Popen(
288 ['svnrdump', 'dump', '--non-interactive', src_url],
324 ['svnrdump', 'dump', '--non-interactive', src_url],
289 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
325 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
290 load = subprocess.Popen(
326 load = subprocess.Popen(
291 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
327 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
292
328
293 # TODO: johbo: This can be a very long operation, might be better
329 # TODO: johbo: This can be a very long operation, might be better
294 # to track some kind of status and provide an api to check if the
330 # to track some kind of status and provide an api to check if the
295 # import is done.
331 # import is done.
296 rdump.wait()
332 rdump.wait()
297 load.wait()
333 load.wait()
298
334
299 if rdump.returncode != 0:
335 if rdump.returncode != 0:
300 errors = rdump.stderr.read()
336 errors = rdump.stderr.read()
301 log.error('svnrdump dump failed: statuscode %s: message: %s',
337 log.error('svnrdump dump failed: statuscode %s: message: %s',
302 rdump.returncode, errors)
338 rdump.returncode, errors)
303 reason = 'UNKNOWN'
339 reason = 'UNKNOWN'
304 if 'svnrdump: E230001:' in errors:
340 if 'svnrdump: E230001:' in errors:
305 reason = 'INVALID_CERTIFICATE'
341 reason = 'INVALID_CERTIFICATE'
306 raise Exception(
342 raise Exception(
307 'Failed to dump the remote repository from %s.' % src_url,
343 'Failed to dump the remote repository from %s.' % src_url,
308 reason)
344 reason)
309 if load.returncode != 0:
345 if load.returncode != 0:
310 raise Exception(
346 raise Exception(
311 'Failed to load the dump of remote repository from %s.' %
347 'Failed to load the dump of remote repository from %s.' %
312 (src_url, ))
348 (src_url, ))
313
349
314 def commit(self, wire, message, author, timestamp, updated, removed):
350 def commit(self, wire, message, author, timestamp, updated, removed):
315 assert isinstance(message, str)
351 assert isinstance(message, str)
316 assert isinstance(author, str)
352 assert isinstance(author, str)
317
353
318 repo = self._factory.repo(wire)
354 repo = self._factory.repo(wire)
319 fsobj = svn.repos.fs(repo)
355 fsobj = svn.repos.fs(repo)
320
356
321 rev = svn.fs.youngest_rev(fsobj)
357 rev = svn.fs.youngest_rev(fsobj)
322 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
358 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
323 txn_root = svn.fs.txn_root(txn)
359 txn_root = svn.fs.txn_root(txn)
324
360
325 for node in updated:
361 for node in updated:
326 TxnNodeProcessor(node, txn_root).update()
362 TxnNodeProcessor(node, txn_root).update()
327 for node in removed:
363 for node in removed:
328 TxnNodeProcessor(node, txn_root).remove()
364 TxnNodeProcessor(node, txn_root).remove()
329
365
330 commit_id = svn.repos.fs_commit_txn(repo, txn)
366 commit_id = svn.repos.fs_commit_txn(repo, txn)
331
367
332 if timestamp:
368 if timestamp:
333 apr_time = apr_time_t(timestamp)
369 apr_time = apr_time_t(timestamp)
334 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
370 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
335 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
371 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
336
372
337 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
373 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
338 return commit_id
374 return commit_id
339
375
340 def diff(self, wire, rev1, rev2, path1=None, path2=None,
376 def diff(self, wire, rev1, rev2, path1=None, path2=None,
341 ignore_whitespace=False, context=3):
377 ignore_whitespace=False, context=3):
342 wire.update(cache=False)
378 wire.update(cache=False)
343 repo = self._factory.repo(wire)
379 repo = self._factory.repo(wire)
344 diff_creator = SvnDiffer(
380 diff_creator = SvnDiffer(
345 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
381 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
346 return diff_creator.generate_diff()
382 return diff_creator.generate_diff()
347
383
348
384
349 class SvnDiffer(object):
385 class SvnDiffer(object):
350 """
386 """
351 Utility to create diffs based on difflib and the Subversion api
387 Utility to create diffs based on difflib and the Subversion api
352 """
388 """
353
389
354 binary_content = False
390 binary_content = False
355
391
356 def __init__(
392 def __init__(
357 self, repo, src_rev, src_path, tgt_rev, tgt_path,
393 self, repo, src_rev, src_path, tgt_rev, tgt_path,
358 ignore_whitespace, context):
394 ignore_whitespace, context):
359 self.repo = repo
395 self.repo = repo
360 self.ignore_whitespace = ignore_whitespace
396 self.ignore_whitespace = ignore_whitespace
361 self.context = context
397 self.context = context
362
398
363 fsobj = svn.repos.fs(repo)
399 fsobj = svn.repos.fs(repo)
364
400
365 self.tgt_rev = tgt_rev
401 self.tgt_rev = tgt_rev
366 self.tgt_path = tgt_path or ''
402 self.tgt_path = tgt_path or ''
367 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
403 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
368 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
404 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
369
405
370 self.src_rev = src_rev
406 self.src_rev = src_rev
371 self.src_path = src_path or self.tgt_path
407 self.src_path = src_path or self.tgt_path
372 self.src_root = svn.fs.revision_root(fsobj, src_rev)
408 self.src_root = svn.fs.revision_root(fsobj, src_rev)
373 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
409 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
374
410
375 self._validate()
411 self._validate()
376
412
377 def _validate(self):
413 def _validate(self):
378 if (self.tgt_kind != svn.core.svn_node_none and
414 if (self.tgt_kind != svn.core.svn_node_none and
379 self.src_kind != svn.core.svn_node_none and
415 self.src_kind != svn.core.svn_node_none and
380 self.src_kind != self.tgt_kind):
416 self.src_kind != self.tgt_kind):
381 # TODO: johbo: proper error handling
417 # TODO: johbo: proper error handling
382 raise Exception(
418 raise Exception(
383 "Source and target are not compatible for diff generation. "
419 "Source and target are not compatible for diff generation. "
384 "Source type: %s, target type: %s" %
420 "Source type: %s, target type: %s" %
385 (self.src_kind, self.tgt_kind))
421 (self.src_kind, self.tgt_kind))
386
422
387 def generate_diff(self):
423 def generate_diff(self):
388 buf = StringIO.StringIO()
424 buf = StringIO.StringIO()
389 if self.tgt_kind == svn.core.svn_node_dir:
425 if self.tgt_kind == svn.core.svn_node_dir:
390 self._generate_dir_diff(buf)
426 self._generate_dir_diff(buf)
391 else:
427 else:
392 self._generate_file_diff(buf)
428 self._generate_file_diff(buf)
393 return buf.getvalue()
429 return buf.getvalue()
394
430
395 def _generate_dir_diff(self, buf):
431 def _generate_dir_diff(self, buf):
396 editor = DiffChangeEditor()
432 editor = DiffChangeEditor()
397 editor_ptr, editor_baton = svn.delta.make_editor(editor)
433 editor_ptr, editor_baton = svn.delta.make_editor(editor)
398 svn.repos.dir_delta2(
434 svn.repos.dir_delta2(
399 self.src_root,
435 self.src_root,
400 self.src_path,
436 self.src_path,
401 '', # src_entry
437 '', # src_entry
402 self.tgt_root,
438 self.tgt_root,
403 self.tgt_path,
439 self.tgt_path,
404 editor_ptr, editor_baton,
440 editor_ptr, editor_baton,
405 authorization_callback_allow_all,
441 authorization_callback_allow_all,
406 False, # text_deltas
442 False, # text_deltas
407 svn.core.svn_depth_infinity, # depth
443 svn.core.svn_depth_infinity, # depth
408 False, # entry_props
444 False, # entry_props
409 False, # ignore_ancestry
445 False, # ignore_ancestry
410 )
446 )
411
447
412 for path, __, change in sorted(editor.changes):
448 for path, __, change in sorted(editor.changes):
413 self._generate_node_diff(
449 self._generate_node_diff(
414 buf, change, path, self.tgt_path, path, self.src_path)
450 buf, change, path, self.tgt_path, path, self.src_path)
415
451
416 def _generate_file_diff(self, buf):
452 def _generate_file_diff(self, buf):
417 change = None
453 change = None
418 if self.src_kind == svn.core.svn_node_none:
454 if self.src_kind == svn.core.svn_node_none:
419 change = "add"
455 change = "add"
420 elif self.tgt_kind == svn.core.svn_node_none:
456 elif self.tgt_kind == svn.core.svn_node_none:
421 change = "delete"
457 change = "delete"
422 tgt_base, tgt_path = vcspath.split(self.tgt_path)
458 tgt_base, tgt_path = vcspath.split(self.tgt_path)
423 src_base, src_path = vcspath.split(self.src_path)
459 src_base, src_path = vcspath.split(self.src_path)
424 self._generate_node_diff(
460 self._generate_node_diff(
425 buf, change, tgt_path, tgt_base, src_path, src_base)
461 buf, change, tgt_path, tgt_base, src_path, src_base)
426
462
427 def _generate_node_diff(
463 def _generate_node_diff(
428 self, buf, change, tgt_path, tgt_base, src_path, src_base):
464 self, buf, change, tgt_path, tgt_base, src_path, src_base):
429 tgt_full_path = vcspath.join(tgt_base, tgt_path)
465 tgt_full_path = vcspath.join(tgt_base, tgt_path)
430 src_full_path = vcspath.join(src_base, src_path)
466 src_full_path = vcspath.join(src_base, src_path)
431
467
432 self.binary_content = False
468 self.binary_content = False
433 mime_type = self._get_mime_type(tgt_full_path)
469 mime_type = self._get_mime_type(tgt_full_path)
434 if mime_type and not mime_type.startswith('text'):
470 if mime_type and not mime_type.startswith('text'):
435 self.binary_content = True
471 self.binary_content = True
436 buf.write("=" * 67 + '\n')
472 buf.write("=" * 67 + '\n')
437 buf.write("Cannot display: file marked as a binary type.\n")
473 buf.write("Cannot display: file marked as a binary type.\n")
438 buf.write("svn:mime-type = %s\n" % mime_type)
474 buf.write("svn:mime-type = %s\n" % mime_type)
439 buf.write("Index: %s\n" % (tgt_path, ))
475 buf.write("Index: %s\n" % (tgt_path, ))
440 buf.write("=" * 67 + '\n')
476 buf.write("=" * 67 + '\n')
441 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
477 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
442 'tgt_path': tgt_path})
478 'tgt_path': tgt_path})
443
479
444 if change == 'add':
480 if change == 'add':
445 # TODO: johbo: SVN is missing a zero here compared to git
481 # TODO: johbo: SVN is missing a zero here compared to git
446 buf.write("new file mode 10644\n")
482 buf.write("new file mode 10644\n")
447 buf.write("--- /dev/null\t(revision 0)\n")
483 buf.write("--- /dev/null\t(revision 0)\n")
448 src_lines = []
484 src_lines = []
449 else:
485 else:
450 if change == 'delete':
486 if change == 'delete':
451 buf.write("deleted file mode 10644\n")
487 buf.write("deleted file mode 10644\n")
452 buf.write("--- a/%s\t(revision %s)\n" % (
488 buf.write("--- a/%s\t(revision %s)\n" % (
453 src_path, self.src_rev))
489 src_path, self.src_rev))
454 src_lines = self._svn_readlines(self.src_root, src_full_path)
490 src_lines = self._svn_readlines(self.src_root, src_full_path)
455
491
456 if change == 'delete':
492 if change == 'delete':
457 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
493 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
458 tgt_lines = []
494 tgt_lines = []
459 else:
495 else:
460 buf.write("+++ b/%s\t(revision %s)\n" % (
496 buf.write("+++ b/%s\t(revision %s)\n" % (
461 tgt_path, self.tgt_rev))
497 tgt_path, self.tgt_rev))
462 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
498 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
463
499
464 if not self.binary_content:
500 if not self.binary_content:
465 udiff = svn_diff.unified_diff(
501 udiff = svn_diff.unified_diff(
466 src_lines, tgt_lines, context=self.context,
502 src_lines, tgt_lines, context=self.context,
467 ignore_blank_lines=self.ignore_whitespace,
503 ignore_blank_lines=self.ignore_whitespace,
468 ignore_case=False,
504 ignore_case=False,
469 ignore_space_changes=self.ignore_whitespace)
505 ignore_space_changes=self.ignore_whitespace)
470 buf.writelines(udiff)
506 buf.writelines(udiff)
471
507
472 def _get_mime_type(self, path):
508 def _get_mime_type(self, path):
473 try:
509 try:
474 mime_type = svn.fs.node_prop(
510 mime_type = svn.fs.node_prop(
475 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
511 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
476 except svn.core.SubversionException:
512 except svn.core.SubversionException:
477 mime_type = svn.fs.node_prop(
513 mime_type = svn.fs.node_prop(
478 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
514 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
479 return mime_type
515 return mime_type
480
516
481 def _svn_readlines(self, fs_root, node_path):
517 def _svn_readlines(self, fs_root, node_path):
482 if self.binary_content:
518 if self.binary_content:
483 return []
519 return []
484 node_kind = svn.fs.check_path(fs_root, node_path)
520 node_kind = svn.fs.check_path(fs_root, node_path)
485 if node_kind not in (
521 if node_kind not in (
486 svn.core.svn_node_file, svn.core.svn_node_symlink):
522 svn.core.svn_node_file, svn.core.svn_node_symlink):
487 return []
523 return []
488 content = svn.core.Stream(
524 content = svn.core.Stream(
489 svn.fs.file_contents(fs_root, node_path)).read()
525 svn.fs.file_contents(fs_root, node_path)).read()
490 return content.splitlines(True)
526 return content.splitlines(True)
491
527
492
528
493 class DiffChangeEditor(svn.delta.Editor):
529 class DiffChangeEditor(svn.delta.Editor):
494 """
530 """
495 Records changes between two given revisions
531 Records changes between two given revisions
496 """
532 """
497
533
498 def __init__(self):
534 def __init__(self):
499 self.changes = []
535 self.changes = []
500
536
501 def delete_entry(self, path, revision, parent_baton, pool=None):
537 def delete_entry(self, path, revision, parent_baton, pool=None):
502 self.changes.append((path, None, 'delete'))
538 self.changes.append((path, None, 'delete'))
503
539
504 def add_file(
540 def add_file(
505 self, path, parent_baton, copyfrom_path, copyfrom_revision,
541 self, path, parent_baton, copyfrom_path, copyfrom_revision,
506 file_pool=None):
542 file_pool=None):
507 self.changes.append((path, 'file', 'add'))
543 self.changes.append((path, 'file', 'add'))
508
544
509 def open_file(self, path, parent_baton, base_revision, file_pool=None):
545 def open_file(self, path, parent_baton, base_revision, file_pool=None):
510 self.changes.append((path, 'file', 'change'))
546 self.changes.append((path, 'file', 'change'))
511
547
512
548
513 def authorization_callback_allow_all(root, path, pool):
549 def authorization_callback_allow_all(root, path, pool):
514 return True
550 return True
515
551
516
552
517 class TxnNodeProcessor(object):
553 class TxnNodeProcessor(object):
518 """
554 """
519 Utility to process the change of one node within a transaction root.
555 Utility to process the change of one node within a transaction root.
520
556
521 It encapsulates the knowledge of how to add, update or remove
557 It encapsulates the knowledge of how to add, update or remove
522 a node for a given transaction root. The purpose is to support the method
558 a node for a given transaction root. The purpose is to support the method
523 `SvnRemote.commit`.
559 `SvnRemote.commit`.
524 """
560 """
525
561
526 def __init__(self, node, txn_root):
562 def __init__(self, node, txn_root):
527 assert isinstance(node['path'], str)
563 assert isinstance(node['path'], str)
528
564
529 self.node = node
565 self.node = node
530 self.txn_root = txn_root
566 self.txn_root = txn_root
531
567
532 def update(self):
568 def update(self):
533 self._ensure_parent_dirs()
569 self._ensure_parent_dirs()
534 self._add_file_if_node_does_not_exist()
570 self._add_file_if_node_does_not_exist()
535 self._update_file_content()
571 self._update_file_content()
536 self._update_file_properties()
572 self._update_file_properties()
537
573
538 def remove(self):
574 def remove(self):
539 svn.fs.delete(self.txn_root, self.node['path'])
575 svn.fs.delete(self.txn_root, self.node['path'])
540 # TODO: Clean up directory if empty
576 # TODO: Clean up directory if empty
541
577
542 def _ensure_parent_dirs(self):
578 def _ensure_parent_dirs(self):
543 curdir = vcspath.dirname(self.node['path'])
579 curdir = vcspath.dirname(self.node['path'])
544 dirs_to_create = []
580 dirs_to_create = []
545 while not self._svn_path_exists(curdir):
581 while not self._svn_path_exists(curdir):
546 dirs_to_create.append(curdir)
582 dirs_to_create.append(curdir)
547 curdir = vcspath.dirname(curdir)
583 curdir = vcspath.dirname(curdir)
548
584
549 for curdir in reversed(dirs_to_create):
585 for curdir in reversed(dirs_to_create):
550 log.debug('Creating missing directory "%s"', curdir)
586 log.debug('Creating missing directory "%s"', curdir)
551 svn.fs.make_dir(self.txn_root, curdir)
587 svn.fs.make_dir(self.txn_root, curdir)
552
588
553 def _svn_path_exists(self, path):
589 def _svn_path_exists(self, path):
554 path_status = svn.fs.check_path(self.txn_root, path)
590 path_status = svn.fs.check_path(self.txn_root, path)
555 return path_status != svn.core.svn_node_none
591 return path_status != svn.core.svn_node_none
556
592
557 def _add_file_if_node_does_not_exist(self):
593 def _add_file_if_node_does_not_exist(self):
558 kind = svn.fs.check_path(self.txn_root, self.node['path'])
594 kind = svn.fs.check_path(self.txn_root, self.node['path'])
559 if kind == svn.core.svn_node_none:
595 if kind == svn.core.svn_node_none:
560 svn.fs.make_file(self.txn_root, self.node['path'])
596 svn.fs.make_file(self.txn_root, self.node['path'])
561
597
562 def _update_file_content(self):
598 def _update_file_content(self):
563 assert isinstance(self.node['content'], str)
599 assert isinstance(self.node['content'], str)
564 handler, baton = svn.fs.apply_textdelta(
600 handler, baton = svn.fs.apply_textdelta(
565 self.txn_root, self.node['path'], None, None)
601 self.txn_root, self.node['path'], None, None)
566 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
602 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
567
603
568 def _update_file_properties(self):
604 def _update_file_properties(self):
569 properties = self.node.get('properties', {})
605 properties = self.node.get('properties', {})
570 for key, value in properties.iteritems():
606 for key, value in properties.iteritems():
571 svn.fs.change_node_prop(
607 svn.fs.change_node_prop(
572 self.txn_root, self.node['path'], key, value)
608 self.txn_root, self.node['path'], key, value)
573
609
574
610
575 def apr_time_t(timestamp):
611 def apr_time_t(timestamp):
576 """
612 """
577 Convert a Python timestamp into APR timestamp type apr_time_t
613 Convert a Python timestamp into APR timestamp type apr_time_t
578 """
614 """
579 return timestamp * 1E6
615 return timestamp * 1E6
580
616
581
617
582 def svn_opt_revision_value_t(num):
618 def svn_opt_revision_value_t(num):
583 """
619 """
584 Put `num` into a `svn_opt_revision_value_t` structure.
620 Put `num` into a `svn_opt_revision_value_t` structure.
585 """
621 """
586 value = svn.core.svn_opt_revision_value_t()
622 value = svn.core.svn_opt_revision_value_t()
587 value.number = num
623 value.number = num
588 revision = svn.core.svn_opt_revision_t()
624 revision = svn.core.svn_opt_revision_t()
589 revision.kind = svn.core.svn_opt_revision_number
625 revision.kind = svn.core.svn_opt_revision_number
590 revision.value = value
626 revision.value = value
591 return revision
627 return revision
General Comments 0
You need to be logged in to leave comments. Login now