##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r572:fd48aa4e merge stable
parent child Browse files
Show More
@@ -1,6 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.13.3
2 current_version = 4.14.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
6
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.13.3
13 version = 4.14.0
16
14
@@ -1,87 +1,87 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
3 ################################################################################
5
4
6
5
7 [server:main]
6 [server:main]
8 ## COMMON ##
7 ## COMMON ##
9 host = 0.0.0.0
8 host = 0.0.0.0
10 port = 9900
9 port = 9900
11
10
12 use = egg:waitress#main
11 use = egg:waitress#main
13
12
14
13
15 [app:main]
14 [app:main]
16 use = egg:rhodecode-vcsserver
15 use = egg:rhodecode-vcsserver
17
16
18 pyramid.default_locale_name = en
17 pyramid.default_locale_name = en
19 pyramid.includes =
18 pyramid.includes =
20
19
21 ## default locale used by VCS systems
20 ## default locale used by VCS systems
22 locale = en_US.UTF-8
21 locale = en_US.UTF-8
23
22
24
23
25 ## path to binaries for vcsserver, it should be set by the installer
24 ## path to binaries for vcsserver, it should be set by the installer
26 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
25 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
27 core.binary_dir = ""
26 core.binary_dir = ""
28
27
29 ## custom exception store path, defaults to TMPDIR
28 ## Custom exception store path, defaults to TMPDIR
30 exception_tracker.store_path =
29 ## This is used to store exception from RhodeCode in shared directory
30 #exception_tracker.store_path =
31
31
32 ## Default cache dir for caches. Putting this into a ramdisk
32 ## Default cache dir for caches. Putting this into a ramdisk
33 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
33 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
34 ## large ammount of space
34 ## large amount of space
35 cache_dir = %(here)s/rcdev/data
35 cache_dir = %(here)s/rcdev/data
36
36
37 ## cache region for storing repo_objects cache
37 ## cache region for storing repo_objects cache
38 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
38 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
39 ## cache auto-expires after N seconds
39 ## cache auto-expires after N seconds
40 rc_cache.repo_object.expiration_time = 300
40 rc_cache.repo_object.expiration_time = 300
41 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
41 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
42 rc_cache.repo_object.max_size = 100
42 rc_cache.repo_object.max_size = 100
43
43
44
44
45 ################################
45 ################################
46 ### LOGGING CONFIGURATION ####
46 ### LOGGING CONFIGURATION ####
47 ################################
47 ################################
48 [loggers]
48 [loggers]
49 keys = root, vcsserver
49 keys = root, vcsserver
50
50
51 [handlers]
51 [handlers]
52 keys = console
52 keys = console
53
53
54 [formatters]
54 [formatters]
55 keys = generic
55 keys = generic
56
56
57 #############
57 #############
58 ## LOGGERS ##
58 ## LOGGERS ##
59 #############
59 #############
60 [logger_root]
60 [logger_root]
61 level = NOTSET
61 level = NOTSET
62 handlers = console
62 handlers = console
63
63
64 [logger_vcsserver]
64 [logger_vcsserver]
65 level = DEBUG
65 level = DEBUG
66 handlers =
66 handlers =
67 qualname = vcsserver
67 qualname = vcsserver
68 propagate = 1
68 propagate = 1
69
69
70
70
71 ##############
71 ##############
72 ## HANDLERS ##
72 ## HANDLERS ##
73 ##############
73 ##############
74
74
75 [handler_console]
75 [handler_console]
76 class = StreamHandler
76 class = StreamHandler
77 args = (sys.stderr,)
77 args = (sys.stderr,)
78 level = DEBUG
78 level = DEBUG
79 formatter = generic
79 formatter = generic
80
80
81 ################
81 ################
82 ## FORMATTERS ##
82 ## FORMATTERS ##
83 ################
83 ################
84
84
85 [formatter_generic]
85 [formatter_generic]
86 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
86 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
87 datefmt = %Y-%m-%d %H:%M:%S
87 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,108 +1,108 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
3 ################################################################################
5
4
6
5
7 [server:main]
6 [server:main]
8 ## COMMON ##
7 ## COMMON ##
9 host = 127.0.0.1
8 host = 127.0.0.1
10 port = 9900
9 port = 9900
11
10
12
11
13 ##########################
12 ##########################
14 ## GUNICORN WSGI SERVER ##
13 ## GUNICORN WSGI SERVER ##
15 ##########################
14 ##########################
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
15 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 use = egg:gunicorn#main
16 use = egg:gunicorn#main
18 ## Sets the number of process workers. Recommended
17 ## Sets the number of process workers. Recommended
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
18 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 workers = 2
19 workers = 2
21 ## process name
20 ## process name
22 proc_name = rhodecode_vcsserver
21 proc_name = rhodecode_vcsserver
23 ## type of worker class, currently `sync` is the only option allowed.
22 ## type of worker class, currently `sync` is the only option allowed.
24 worker_class = sync
23 worker_class = sync
25 ## The maximum number of simultaneous clients. Valid only for Gevent
24 ## The maximum number of simultaneous clients. Valid only for Gevent
26 #worker_connections = 10
25 #worker_connections = 10
27 ## max number of requests that worker will handle before being gracefully
26 ## max number of requests that worker will handle before being gracefully
28 ## restarted, could prevent memory leaks
27 ## restarted, could prevent memory leaks
29 max_requests = 1000
28 max_requests = 1000
30 max_requests_jitter = 30
29 max_requests_jitter = 30
31 ## amount of time a worker can spend with handling a request before it
30 ## amount of time a worker can spend with handling a request before it
32 ## gets killed and restarted. Set to 6hrs
31 ## gets killed and restarted. Set to 6hrs
33 timeout = 21600
32 timeout = 21600
34
33
35
34
36 [app:main]
35 [app:main]
37 use = egg:rhodecode-vcsserver
36 use = egg:rhodecode-vcsserver
38
37
39 pyramid.default_locale_name = en
38 pyramid.default_locale_name = en
40 pyramid.includes =
39 pyramid.includes =
41
40
42 ## default locale used by VCS systems
41 ## default locale used by VCS systems
43 locale = en_US.UTF-8
42 locale = en_US.UTF-8
44
43
45
44
46 ## path to binaries for vcsserver, it should be set by the installer
45 ## path to binaries for vcsserver, it should be set by the installer
47 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
46 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
48 core.binary_dir = ""
47 core.binary_dir = ""
49
48
50 ## custom exception store path, defaults to TMPDIR
49 ## Custom exception store path, defaults to TMPDIR
51 exception_tracker.store_path =
50 ## This is used to store exception from RhodeCode in shared directory
51 #exception_tracker.store_path =
52
52
53 ## Default cache dir for caches. Putting this into a ramdisk
53 ## Default cache dir for caches. Putting this into a ramdisk
54 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
54 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
55 ## large ammount of space
55 ## large amount of space
56 cache_dir = %(here)s/rcdev/data
56 cache_dir = %(here)s/rcdev/data
57
57
58 ## cache region for storing repo_objects cache
58 ## cache region for storing repo_objects cache
59 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
59 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
60 ## cache auto-expires after N seconds
60 ## cache auto-expires after N seconds
61 rc_cache.repo_object.expiration_time = 300
61 rc_cache.repo_object.expiration_time = 300
62 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
62 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
63 rc_cache.repo_object.max_size = 100
63 rc_cache.repo_object.max_size = 100
64
64
65
65
66 ################################
66 ################################
67 ### LOGGING CONFIGURATION ####
67 ### LOGGING CONFIGURATION ####
68 ################################
68 ################################
69 [loggers]
69 [loggers]
70 keys = root, vcsserver
70 keys = root, vcsserver
71
71
72 [handlers]
72 [handlers]
73 keys = console
73 keys = console
74
74
75 [formatters]
75 [formatters]
76 keys = generic
76 keys = generic
77
77
78 #############
78 #############
79 ## LOGGERS ##
79 ## LOGGERS ##
80 #############
80 #############
81 [logger_root]
81 [logger_root]
82 level = NOTSET
82 level = NOTSET
83 handlers = console
83 handlers = console
84
84
85 [logger_vcsserver]
85 [logger_vcsserver]
86 level = DEBUG
86 level = DEBUG
87 handlers =
87 handlers =
88 qualname = vcsserver
88 qualname = vcsserver
89 propagate = 1
89 propagate = 1
90
90
91
91
92 ##############
92 ##############
93 ## HANDLERS ##
93 ## HANDLERS ##
94 ##############
94 ##############
95
95
96 [handler_console]
96 [handler_console]
97 class = StreamHandler
97 class = StreamHandler
98 args = (sys.stderr,)
98 args = (sys.stderr,)
99 level = DEBUG
99 level = DEBUG
100 formatter = generic
100 formatter = generic
101
101
102 ################
102 ################
103 ## FORMATTERS ##
103 ## FORMATTERS ##
104 ################
104 ################
105
105
106 [formatter_generic]
106 [formatter_generic]
107 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
107 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
108 datefmt = %Y-%m-%d %H:%M:%S
108 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,178 +1,196 b''
1 # Nix environment for the community edition
1 # Nix environment for the community edition
2 #
2 #
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6
6
7 args@
7 args@
8 { pythonPackages ? "python27Packages"
8 { pythonPackages ? "python27Packages"
9 , pythonExternalOverrides ? self: super: {}
9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? false
10 , doCheck ? false
11 , ...
11 , ...
12 }:
12 }:
13
13
14 let pkgs_ = (import <nixpkgs> {}); in
14 let
15 pkgs_ = (import <nixpkgs> {});
16 in
15
17
16 let
18 let
17
18 # TODO: Currently we ignore the passed in pkgs, instead we should use it
19 # somehow as a base and apply overlays to it.
20 pkgs = import <nixpkgs> {
19 pkgs = import <nixpkgs> {
21 overlays = [
20 overlays = [
22 (import ./pkgs/overlays.nix)
21 (import ./pkgs/overlays.nix)
23 ];
22 ];
24 inherit (pkgs_)
23 inherit
24 (pkgs_)
25 system;
25 system;
26 };
26 };
27
27
28 # Works with the new python-packages, still can fallback to the old
28 # Works with the new python-packages, still can fallback to the old
29 # variant.
29 # variant.
30 basePythonPackagesUnfix = basePythonPackages.__unfix__ or (
30 basePythonPackagesUnfix = basePythonPackages.__unfix__ or (
31 self: basePythonPackages.override (a: { inherit self; }));
31 self: basePythonPackages.override (a: { inherit self; }));
32
32
33 # Evaluates to the last segment of a file system path.
33 # Evaluates to the last segment of a file system path.
34 basename = path: with pkgs.lib; last (splitString "/" path);
34 basename = path: with pkgs.lib; last (splitString "/" path);
35
35
36 # source code filter used as arugment to builtins.filterSource.
36 # source code filter used as arugment to builtins.filterSource.
37 src-filter = path: type: with pkgs.lib;
37 src-filter = path: type: with pkgs.lib;
38 let
38 let
39 ext = last (splitString "." path);
39 ext = last (splitString "." path);
40 in
40 in
41 !builtins.elem (basename path) [
41 !builtins.elem (basename path) [
42 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
42 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
43 "bower_components" "node_modules"
43 "node_modules" "node_binaries"
44 "build" "data" "result" "tmp"] &&
44 "build" "data" "result" "tmp"] &&
45 !builtins.elem ext ["egg-info" "pyc"] &&
45 !builtins.elem ext ["egg-info" "pyc"] &&
46 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
46 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
47 # it would still be good to restore it since we want to ignore "result-*".
47 # it would still be good to restore it since we want to ignore "result-*".
48 !hasPrefix "result" path;
48 !hasPrefix "result" path;
49
49
50 sources =
50 sources =
51 let
51 let
52 inherit (pkgs.lib) all isString attrValues;
52 inherit
53 (pkgs.lib)
54 all
55 isString
56 attrValues;
53 sourcesConfig = pkgs.config.rc.sources or {};
57 sourcesConfig = pkgs.config.rc.sources or {};
54 in
58 in
55 # Ensure that sources are configured as strings. Using a path
59 # Ensure that sources are configured as strings. Using a path
56 # would result in a copy into the nix store.
60 # would result in a copy into the nix store.
57 assert all isString (attrValues sourcesConfig);
61 assert all isString (attrValues sourcesConfig);
58 sourcesConfig;
62 sourcesConfig;
59
63
60 version = builtins.readFile "${rhodecode-vcsserver-src}/vcsserver/VERSION";
64 version = builtins.readFile "${rhodecode-vcsserver-src}/vcsserver/VERSION";
61 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
65 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
62
66
63 pythonLocalOverrides = self: super: {
67 pythonLocalOverrides = self: super: {
64 rhodecode-vcsserver =
68 rhodecode-vcsserver =
65 let
69 let
66 releaseName = "RhodeCodeVCSServer-${version}";
70 releaseName = "RhodeCodeVCSServer-${version}";
67 in super.rhodecode-vcsserver.override (attrs: {
71 in super.rhodecode-vcsserver.override (attrs: {
68 inherit
72 inherit
69 doCheck
73 doCheck
70 version;
74 version;
71
75
72 name = "rhodecode-vcsserver-${version}";
76 name = "rhodecode-vcsserver-${version}";
73 releaseName = releaseName;
77 releaseName = releaseName;
74 src = rhodecode-vcsserver-src;
78 src = rhodecode-vcsserver-src;
75 dontStrip = true; # prevent strip, we don't need it.
79 dontStrip = true; # prevent strip, we don't need it.
76
80
77 # expose following attributed outside
81 # expose following attributed outside
78 passthru = {
82 passthru = {
79 pythonPackages = self;
83 pythonPackages = self;
80 };
84 };
81
85
82 propagatedBuildInputs =
86 propagatedBuildInputs =
83 attrs.propagatedBuildInputs or [] ++ [
87 attrs.propagatedBuildInputs or [] ++ [
84 pkgs.git
88 pkgs.git
85 pkgs.subversion
89 pkgs.subversion
86 ];
90 ];
87
91
88 # set some default locale env variables
92 # set some default locale env variables
89 LC_ALL = "en_US.UTF-8";
93 LC_ALL = "en_US.UTF-8";
90 LOCALE_ARCHIVE =
94 LOCALE_ARCHIVE =
91 if pkgs.stdenv.isLinux
95 if pkgs.stdenv.isLinux
92 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
96 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
93 else "";
97 else "";
94
98
95 # Add bin directory to path so that tests can find 'vcsserver'.
99 # Add bin directory to path so that tests can find 'vcsserver'.
96 preCheck = ''
100 preCheck = ''
97 export PATH="$out/bin:$PATH"
101 export PATH="$out/bin:$PATH"
98 '';
102 '';
99
103
100 # custom check phase for testing
104 # custom check phase for testing
101 checkPhase = ''
105 checkPhase = ''
102 runHook preCheck
106 runHook preCheck
103 PYTHONHASHSEED=random py.test -vv -p no:sugar -r xw --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
107 PYTHONHASHSEED=random py.test -vv -p no:sugar -r xw --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
104 runHook postCheck
108 runHook postCheck
105 '';
109 '';
106
110
107 postCheck = ''
111 postCheck = ''
108 echo "Cleanup of vcsserver/tests"
112 echo "Cleanup of vcsserver/tests"
109 rm -rf $out/lib/${self.python.libPrefix}/site-packages/vcsserver/tests
113 rm -rf $out/lib/${self.python.libPrefix}/site-packages/vcsserver/tests
110 '';
114 '';
111
115
112 postInstall = ''
116 postInstall = ''
113 echo "Writing vcsserver meta information for rccontrol to nix-support/rccontrol"
117 echo "Writing vcsserver meta information for rccontrol to nix-support/rccontrol"
114 mkdir -p $out/nix-support/rccontrol
118 mkdir -p $out/nix-support/rccontrol
115 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
119 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
116 echo "DONE: vcsserver meta information for rccontrol written"
120 echo "DONE: vcsserver meta information for rccontrol written"
117
121
118 mkdir -p $out/etc
122 mkdir -p $out/etc
119 cp configs/production.ini $out/etc
123 cp configs/production.ini $out/etc
120 echo "DONE: saved vcsserver production.ini into $out/etc"
124 echo "DONE: saved vcsserver production.ini into $out/etc"
121
125
122 # python based programs need to be wrapped
126 # python based programs need to be wrapped
123 mkdir -p $out/bin
127 mkdir -p $out/bin
124 ln -s ${self.python}/bin/python $out/bin
128 ln -s ${self.python}/bin/python $out/bin/
125 ln -s ${self.pyramid}/bin/* $out/bin/
126 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
129 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
130 ln -s ${self.pyramid}/bin/prequest $out/bin/
131 ln -s ${self.pyramid}/bin/pserve $out/bin/
127
132
128 # Symlink version control utilities
133 # Symlink version control utilities
129 # We ensure that always the correct version is available as a symlink.
134 # We ensure that always the correct version is available as a symlink.
130 # So that users calling them via the profile path will always use the
135 # So that users calling them via the profile path will always use the
131 # correct version.
136 # correct version. Wrapping is required so those can "import"
137 # vcsserver python hooks.
132
138
133 ln -s ${pkgs.git}/bin/git $out/bin
139 ln -s ${pkgs.git}/bin/git $out/bin
134 ln -s ${self.mercurial}/bin/hg $out/bin
140 ln -s ${self.mercurial}/bin/hg $out/bin
135 ln -s ${pkgs.subversion}/bin/svn* $out/bin
141 ln -s ${pkgs.subversion}/bin/svn* $out/bin
142
136 echo "DONE: created symlinks into $out/bin"
143 echo "DONE: created symlinks into $out/bin"
144 DEPS="$out/bin/*"
137
145
138 for file in $out/bin/*;
146 # wrap only dependency scripts, they require to have full PYTHONPATH set
147 # to be able to import all packages
148 for file in $DEPS;
139 do
149 do
140 wrapProgram $file \
150 wrapProgram $file \
141 --prefix PATH : $PATH \
151 --prefix PATH : $PATH \
142 --prefix PYTHONPATH : $PYTHONPATH \
152 --prefix PYTHONPATH : $PYTHONPATH \
143 --set PYTHONHASHSEED random
153 --set PYTHONHASHSEED random
144 done
154 done
155
145 echo "DONE: vcsserver binary wrapping"
156 echo "DONE: vcsserver binary wrapping"
146
157
147 '';
158 '';
148
159
149 });
160 });
150 };
161 };
151
162
152 basePythonPackages = with builtins;
163 basePythonPackages = with builtins;
153 if isAttrs pythonPackages then
164 if isAttrs pythonPackages then
154 pythonPackages
165 pythonPackages
155 else
166 else
156 getAttr pythonPackages pkgs;
167 getAttr pythonPackages pkgs;
157
168
158 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
169 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
159 inherit pkgs;
170 inherit
160 inherit (pkgs) fetchurl fetchgit fetchhg;
171 pkgs;
172 inherit
173 (pkgs)
174 fetchurl
175 fetchgit
176 fetchhg;
161 };
177 };
162
178
163 pythonVCSServerOverrides = import ./pkgs/python-packages-overrides.nix {
179 pythonVCSServerOverrides = import ./pkgs/python-packages-overrides.nix {
164 inherit pkgs basePythonPackages;
180 inherit
181 pkgs
182 basePythonPackages;
165 };
183 };
166
184
167
185
168 # Apply all overrides and fix the final package set
186 # Apply all overrides and fix the final package set
169 myPythonPackagesUnfix = with pkgs.lib;
187 myPythonPackagesUnfix = with pkgs.lib;
170 (extends pythonExternalOverrides
188 (extends pythonExternalOverrides
171 (extends pythonLocalOverrides
189 (extends pythonLocalOverrides
172 (extends pythonVCSServerOverrides
190 (extends pythonVCSServerOverrides
173 (extends pythonGeneratedPackages
191 (extends pythonGeneratedPackages
174 basePythonPackagesUnfix))));
192 basePythonPackagesUnfix))));
175
193
176 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
194 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
177
195
178 in myPythonPackages.rhodecode-vcsserver
196 in myPythonPackages.rhodecode-vcsserver
@@ -1,45 +1,45 b''
1 self: super: {
1 self: super: {
2 # bump GIT version
2 # bump GIT version
3 git = super.lib.overrideDerivation super.git (oldAttrs: {
3 git = super.lib.overrideDerivation super.git (oldAttrs: {
4 name = "git-2.17.2";
4 name = "git-2.17.2";
5 src = self.fetchurl {
5 src = self.fetchurl {
6 url = "https://www.kernel.org/pub/software/scm/git/git-2.17.2.tar.xz";
6 url = "https://www.kernel.org/pub/software/scm/git/git-2.17.2.tar.xz";
7 sha256 = "1ghljlxmyqphx13qspy382cpl2pbkbwbhqm7w7z57r9mkhswx668";
7 sha256 = "1ghljlxmyqphx13qspy382cpl2pbkbwbhqm7w7z57r9mkhswx668";
8 };
8 };
9
9
10 patches = [
10 patches = [
11 ./git_patches/docbook2texi.patch
11 ./patches/git/docbook2texi.patch
12 ./git_patches/symlinks-in-bin.patch
12 ./patches/git/symlinks-in-bin.patch
13 ./git_patches/git-sh-i18n.patch
13 ./patches/git/git-sh-i18n.patch
14 ./git_patches/ssh-path.patch
14 ./patches/git/ssh-path.patch
15 ];
15 ];
16
16
17 });
17 });
18
18
19 # Override subversion derivation to
19 # Override subversion derivation to
20 # - activate python bindings
20 # - activate python bindings
21 subversion =
21 subversion =
22 let
22 let
23 subversionWithPython = super.subversion.override {
23 subversionWithPython = super.subversion.override {
24 httpSupport = true;
24 httpSupport = true;
25 pythonBindings = true;
25 pythonBindings = true;
26 python = self.python27Packages.python;
26 python = self.python27Packages.python;
27 };
27 };
28 in
28 in
29 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
29 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
30 name = "subversion-1.10.2";
30 name = "subversion-1.10.2";
31 src = self.fetchurl {
31 src = self.fetchurl {
32 url = "https://archive.apache.org/dist/subversion/subversion-1.10.2.tar.gz";
32 url = "https://archive.apache.org/dist/subversion/subversion-1.10.2.tar.gz";
33 sha256 = "0xv5z2bg0lw7057g913yc13f60nfj257wvmsq22pr33m4syf26sg";
33 sha256 = "0xv5z2bg0lw7057g913yc13f60nfj257wvmsq22pr33m4syf26sg";
34 };
34 };
35
35
36 ## use internal lz4/utf8proc because it is stable and shipped with SVN
36 ## use internal lz4/utf8proc because it is stable and shipped with SVN
37 configureFlags = oldAttrs.configureFlags ++ [
37 configureFlags = oldAttrs.configureFlags ++ [
38 " --with-lz4=internal"
38 " --with-lz4=internal"
39 " --with-utf8proc=internal"
39 " --with-utf8proc=internal"
40 ];
40 ];
41
41
42
42
43 });
43 });
44
44
45 }
45 }
1 NO CONTENT: file renamed from pkgs/git_patches/docbook2texi.patch to pkgs/patches/git/docbook2texi.patch
NO CONTENT: file renamed from pkgs/git_patches/docbook2texi.patch to pkgs/patches/git/docbook2texi.patch
1 NO CONTENT: file renamed from pkgs/git_patches/git-sh-i18n.patch to pkgs/patches/git/git-sh-i18n.patch
NO CONTENT: file renamed from pkgs/git_patches/git-sh-i18n.patch to pkgs/patches/git/git-sh-i18n.patch
1 NO CONTENT: file renamed from pkgs/git_patches/ssh-path.patch to pkgs/patches/git/ssh-path.patch
NO CONTENT: file renamed from pkgs/git_patches/ssh-path.patch to pkgs/patches/git/ssh-path.patch
@@ -1,949 +1,950 b''
1 # Generated by pip2nix 0.8.0.dev1
1 # Generated by pip2nix 0.8.0.dev1
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 { pkgs, fetchurl, fetchgit, fetchhg }:
4 { pkgs, fetchurl, fetchgit, fetchhg }:
5
5
6 self: super: {
6 self: super: {
7 "atomicwrites" = super.buildPythonPackage {
7 "atomicwrites" = super.buildPythonPackage {
8 name = "atomicwrites-1.1.5";
8 name = "atomicwrites-1.2.1";
9 doCheck = false;
9 doCheck = false;
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://files.pythonhosted.org/packages/a1/e1/2d9bc76838e6e6667fde5814aa25d7feb93d6fa471bf6816daac2596e8b2/atomicwrites-1.1.5.tar.gz";
11 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
12 sha256 = "11bm90fwm2avvf4f3ib8g925w7jr4m11vcsinn1bi6ns4bm32214";
12 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.mit ];
15 license = [ pkgs.lib.licenses.mit ];
16 };
16 };
17 };
17 };
18 "attrs" = super.buildPythonPackage {
18 "attrs" = super.buildPythonPackage {
19 name = "attrs-18.1.0";
19 name = "attrs-18.2.0";
20 doCheck = false;
20 doCheck = false;
21 src = fetchurl {
21 src = fetchurl {
22 url = "https://files.pythonhosted.org/packages/e4/ac/a04671e118b57bee87dabca1e0f2d3bda816b7a551036012d0ca24190e71/attrs-18.1.0.tar.gz";
22 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
23 sha256 = "0yzqz8wv3w1srav5683a55v49i0szkm47dyrnkd56fqs8j8ypl70";
23 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
24 };
24 };
25 meta = {
25 meta = {
26 license = [ pkgs.lib.licenses.mit ];
26 license = [ pkgs.lib.licenses.mit ];
27 };
27 };
28 };
28 };
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 name = "backports.shutil-get-terminal-size-1.0.0";
30 name = "backports.shutil-get-terminal-size-1.0.0";
31 doCheck = false;
31 doCheck = false;
32 src = fetchurl {
32 src = fetchurl {
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 };
35 };
36 meta = {
36 meta = {
37 license = [ pkgs.lib.licenses.mit ];
37 license = [ pkgs.lib.licenses.mit ];
38 };
38 };
39 };
39 };
40 "beautifulsoup4" = super.buildPythonPackage {
40 "beautifulsoup4" = super.buildPythonPackage {
41 name = "beautifulsoup4-4.6.3";
41 name = "beautifulsoup4-4.6.3";
42 doCheck = false;
42 doCheck = false;
43 src = fetchurl {
43 src = fetchurl {
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 };
46 };
47 meta = {
47 meta = {
48 license = [ pkgs.lib.licenses.mit ];
48 license = [ pkgs.lib.licenses.mit ];
49 };
49 };
50 };
50 };
51 "configobj" = super.buildPythonPackage {
51 "configobj" = super.buildPythonPackage {
52 name = "configobj-5.0.6";
52 name = "configobj-5.0.6";
53 doCheck = false;
53 doCheck = false;
54 propagatedBuildInputs = [
54 propagatedBuildInputs = [
55 self."six"
55 self."six"
56 ];
56 ];
57 src = fetchurl {
57 src = fetchurl {
58 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
58 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
59 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
59 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
60 };
60 };
61 meta = {
61 meta = {
62 license = [ pkgs.lib.licenses.bsdOriginal ];
62 license = [ pkgs.lib.licenses.bsdOriginal ];
63 };
63 };
64 };
64 };
65 "cov-core" = super.buildPythonPackage {
65 "cov-core" = super.buildPythonPackage {
66 name = "cov-core-1.15.0";
66 name = "cov-core-1.15.0";
67 doCheck = false;
67 doCheck = false;
68 propagatedBuildInputs = [
68 propagatedBuildInputs = [
69 self."coverage"
69 self."coverage"
70 ];
70 ];
71 src = fetchurl {
71 src = fetchurl {
72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
74 };
74 };
75 meta = {
75 meta = {
76 license = [ pkgs.lib.licenses.mit ];
76 license = [ pkgs.lib.licenses.mit ];
77 };
77 };
78 };
78 };
79 "coverage" = super.buildPythonPackage {
79 "coverage" = super.buildPythonPackage {
80 name = "coverage-3.7.1";
80 name = "coverage-4.5.1";
81 doCheck = false;
81 doCheck = false;
82 src = fetchurl {
82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
83 url = "https://files.pythonhosted.org/packages/35/fe/e7df7289d717426093c68d156e0fd9117c8f4872b6588e8a8928a0f68424/coverage-4.5.1.tar.gz";
84 sha256 = "0knlbq79g2ww6xzsyknj9rirrgrgc983dpa2d9nkdf31mb2a3bni";
84 sha256 = "1wbrzpxka3xd4nmmkc6q0ir343d91kymwsm8pbmwa0d2a7q4ir2n";
85 };
85 };
86 meta = {
86 meta = {
87 license = [ pkgs.lib.licenses.bsdOriginal ];
87 license = [ pkgs.lib.licenses.asl20 ];
88 };
88 };
89 };
89 };
90 "decorator" = super.buildPythonPackage {
90 "decorator" = super.buildPythonPackage {
91 name = "decorator-4.1.2";
91 name = "decorator-4.1.2";
92 doCheck = false;
92 doCheck = false;
93 src = fetchurl {
93 src = fetchurl {
94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
96 };
96 };
97 meta = {
97 meta = {
98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
99 };
99 };
100 };
100 };
101 "dogpile.cache" = super.buildPythonPackage {
101 "dogpile.cache" = super.buildPythonPackage {
102 name = "dogpile.cache-0.6.6";
102 name = "dogpile.cache-0.6.7";
103 doCheck = false;
103 doCheck = false;
104 src = fetchurl {
104 src = fetchurl {
105 url = "https://files.pythonhosted.org/packages/48/ca/604154d835c3668efb8a31bd979b0ea4bf39c2934a40ffecc0662296cb51/dogpile.cache-0.6.6.tar.gz";
105 url = "https://files.pythonhosted.org/packages/ee/bd/440da735a11c6087eed7cc8747fc4b995cbac2464168682f8ee1c8e43844/dogpile.cache-0.6.7.tar.gz";
106 sha256 = "1h8n1lxd4l2qvahfkiinljkqz7pww7w3sgag0j8j9ixbl2h4wk84";
106 sha256 = "1aw8rx8vhb75y7zc6gi67g21sw057jdx7i8m3jq7kf3nqavxx9zw";
107 };
107 };
108 meta = {
108 meta = {
109 license = [ pkgs.lib.licenses.bsdOriginal ];
109 license = [ pkgs.lib.licenses.bsdOriginal ];
110 };
110 };
111 };
111 };
112 "dogpile.core" = super.buildPythonPackage {
112 "dogpile.core" = super.buildPythonPackage {
113 name = "dogpile.core-0.4.1";
113 name = "dogpile.core-0.4.1";
114 doCheck = false;
114 doCheck = false;
115 src = fetchurl {
115 src = fetchurl {
116 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
116 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
117 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
117 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
118 };
118 };
119 meta = {
119 meta = {
120 license = [ pkgs.lib.licenses.bsdOriginal ];
120 license = [ pkgs.lib.licenses.bsdOriginal ];
121 };
121 };
122 };
122 };
123 "dulwich" = super.buildPythonPackage {
123 "dulwich" = super.buildPythonPackage {
124 name = "dulwich-0.13.0";
124 name = "dulwich-0.13.0";
125 doCheck = false;
125 doCheck = false;
126 src = fetchurl {
126 src = fetchurl {
127 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
127 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
128 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
128 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
129 };
129 };
130 meta = {
130 meta = {
131 license = [ pkgs.lib.licenses.gpl2Plus ];
131 license = [ pkgs.lib.licenses.gpl2Plus ];
132 };
132 };
133 };
133 };
134 "enum34" = super.buildPythonPackage {
134 "enum34" = super.buildPythonPackage {
135 name = "enum34-1.1.6";
135 name = "enum34-1.1.6";
136 doCheck = false;
136 doCheck = false;
137 src = fetchurl {
137 src = fetchurl {
138 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
138 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
139 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
139 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
140 };
140 };
141 meta = {
141 meta = {
142 license = [ pkgs.lib.licenses.bsdOriginal ];
142 license = [ pkgs.lib.licenses.bsdOriginal ];
143 };
143 };
144 };
144 };
145 "funcsigs" = super.buildPythonPackage {
145 "funcsigs" = super.buildPythonPackage {
146 name = "funcsigs-1.0.2";
146 name = "funcsigs-1.0.2";
147 doCheck = false;
147 doCheck = false;
148 src = fetchurl {
148 src = fetchurl {
149 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
149 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
150 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
150 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
151 };
151 };
152 meta = {
152 meta = {
153 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
153 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
154 };
154 };
155 };
155 };
156 "gevent" = super.buildPythonPackage {
156 "gevent" = super.buildPythonPackage {
157 name = "gevent-1.3.5";
157 name = "gevent-1.3.6";
158 doCheck = false;
158 doCheck = false;
159 propagatedBuildInputs = [
159 propagatedBuildInputs = [
160 self."greenlet"
160 self."greenlet"
161 ];
161 ];
162 src = fetchurl {
162 src = fetchurl {
163 url = "https://files.pythonhosted.org/packages/e6/0a/fc345c6e6161f84484870dbcaa58e427c10bd9bdcd08a69bed3d6b398bf1/gevent-1.3.5.tar.gz";
163 url = "https://files.pythonhosted.org/packages/49/13/aa4bb3640b5167fe58875d3d7e65390cdb14f9682a41a741a566bb560842/gevent-1.3.6.tar.gz";
164 sha256 = "1w3gydxirgd2f60c5yv579w4903ds9s4g3587ik4jby97hgqc5bz";
164 sha256 = "1ih4k73dqz2zb561hda99vbanja3m6cdch3mgxxn1mla3qwkqhbv";
165 };
165 };
166 meta = {
166 meta = {
167 license = [ pkgs.lib.licenses.mit ];
167 license = [ pkgs.lib.licenses.mit ];
168 };
168 };
169 };
169 };
170 "gprof2dot" = super.buildPythonPackage {
170 "gprof2dot" = super.buildPythonPackage {
171 name = "gprof2dot-2017.9.19";
171 name = "gprof2dot-2017.9.19";
172 doCheck = false;
172 doCheck = false;
173 src = fetchurl {
173 src = fetchurl {
174 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
174 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
175 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
175 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
176 };
176 };
177 meta = {
177 meta = {
178 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
178 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
179 };
179 };
180 };
180 };
181 "greenlet" = super.buildPythonPackage {
181 "greenlet" = super.buildPythonPackage {
182 name = "greenlet-0.4.13";
182 name = "greenlet-0.4.15";
183 doCheck = false;
183 doCheck = false;
184 src = fetchurl {
184 src = fetchurl {
185 url = "https://files.pythonhosted.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
185 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
186 sha256 = "1r412gfx25jrdiv444prmz5a8igrfabwnwqyr6b52ypq7ga87vqg";
186 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
187 };
187 };
188 meta = {
188 meta = {
189 license = [ pkgs.lib.licenses.mit ];
189 license = [ pkgs.lib.licenses.mit ];
190 };
190 };
191 };
191 };
192 "gunicorn" = super.buildPythonPackage {
192 "gunicorn" = super.buildPythonPackage {
193 name = "gunicorn-19.9.0";
193 name = "gunicorn-19.9.0";
194 doCheck = false;
194 doCheck = false;
195 src = fetchurl {
195 src = fetchurl {
196 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
196 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
197 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
197 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
198 };
198 };
199 meta = {
199 meta = {
200 license = [ pkgs.lib.licenses.mit ];
200 license = [ pkgs.lib.licenses.mit ];
201 };
201 };
202 };
202 };
203 "hg-evolve" = super.buildPythonPackage {
203 "hg-evolve" = super.buildPythonPackage {
204 name = "hg-evolve-8.0.1";
204 name = "hg-evolve-8.0.1";
205 doCheck = false;
205 doCheck = false;
206 src = fetchurl {
206 src = fetchurl {
207 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
207 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
208 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
208 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
209 };
209 };
210 meta = {
210 meta = {
211 license = [ { fullName = "GPLv2+"; } ];
211 license = [ { fullName = "GPLv2+"; } ];
212 };
212 };
213 };
213 };
214 "hgsubversion" = super.buildPythonPackage {
214 "hgsubversion" = super.buildPythonPackage {
215 name = "hgsubversion-1.9.2";
215 name = "hgsubversion-1.9.2";
216 doCheck = false;
216 doCheck = false;
217 propagatedBuildInputs = [
217 propagatedBuildInputs = [
218 self."mercurial"
218 self."mercurial"
219 self."subvertpy"
219 self."subvertpy"
220 ];
220 ];
221 src = fetchurl {
221 src = fetchurl {
222 url = "https://files.pythonhosted.org/packages/05/80/3a3cef10dd65e86528ef8d7ac57a41ebc782d0f3c6cfa4fed021aa9fbee0/hgsubversion-1.9.2.tar.gz";
222 url = "https://files.pythonhosted.org/packages/05/80/3a3cef10dd65e86528ef8d7ac57a41ebc782d0f3c6cfa4fed021aa9fbee0/hgsubversion-1.9.2.tar.gz";
223 sha256 = "16490narhq14vskml3dam8g5y3w3hdqj3g8bgm2b0c0i85l1xvcz";
223 sha256 = "16490narhq14vskml3dam8g5y3w3hdqj3g8bgm2b0c0i85l1xvcz";
224 };
224 };
225 meta = {
225 meta = {
226 license = [ pkgs.lib.licenses.gpl1 ];
226 license = [ pkgs.lib.licenses.gpl1 ];
227 };
227 };
228 };
228 };
229 "hupper" = super.buildPythonPackage {
229 "hupper" = super.buildPythonPackage {
230 name = "hupper-1.3";
230 name = "hupper-1.3.1";
231 doCheck = false;
231 doCheck = false;
232 src = fetchurl {
232 src = fetchurl {
233 url = "https://files.pythonhosted.org/packages/51/0c/96335b1f2f32245fb871eea5bb9773196505ddb71fad15190056a282df9e/hupper-1.3.tar.gz";
233 url = "https://files.pythonhosted.org/packages/cf/4b/467b826a84c8594b81f414b5ab6794e981951dac90ca40abaf9ea1cb36b0/hupper-1.3.1.tar.gz";
234 sha256 = "1pkyrm9c2crc32ps00k1ahnc5clj3pjwiarc7j0x8aykwih7ff10";
234 sha256 = "03mf13n6i4dd60wlb9m99ddl4m3lmly70cjp7f82vdkibfl1v6l9";
235 };
235 };
236 meta = {
236 meta = {
237 license = [ pkgs.lib.licenses.mit ];
237 license = [ pkgs.lib.licenses.mit ];
238 };
238 };
239 };
239 };
240 "ipdb" = super.buildPythonPackage {
240 "ipdb" = super.buildPythonPackage {
241 name = "ipdb-0.11";
241 name = "ipdb-0.11";
242 doCheck = false;
242 doCheck = false;
243 propagatedBuildInputs = [
243 propagatedBuildInputs = [
244 self."setuptools"
244 self."setuptools"
245 self."ipython"
245 self."ipython"
246 ];
246 ];
247 src = fetchurl {
247 src = fetchurl {
248 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
248 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
249 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
249 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
250 };
250 };
251 meta = {
251 meta = {
252 license = [ pkgs.lib.licenses.bsdOriginal ];
252 license = [ pkgs.lib.licenses.bsdOriginal ];
253 };
253 };
254 };
254 };
255 "ipython" = super.buildPythonPackage {
255 "ipython" = super.buildPythonPackage {
256 name = "ipython-5.1.0";
256 name = "ipython-5.1.0";
257 doCheck = false;
257 doCheck = false;
258 propagatedBuildInputs = [
258 propagatedBuildInputs = [
259 self."setuptools"
259 self."setuptools"
260 self."decorator"
260 self."decorator"
261 self."pickleshare"
261 self."pickleshare"
262 self."simplegeneric"
262 self."simplegeneric"
263 self."traitlets"
263 self."traitlets"
264 self."prompt-toolkit"
264 self."prompt-toolkit"
265 self."pygments"
265 self."pygments"
266 self."pexpect"
266 self."pexpect"
267 self."backports.shutil-get-terminal-size"
267 self."backports.shutil-get-terminal-size"
268 self."pathlib2"
268 self."pathlib2"
269 self."pexpect"
269 self."pexpect"
270 ];
270 ];
271 src = fetchurl {
271 src = fetchurl {
272 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
272 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
273 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
273 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
274 };
274 };
275 meta = {
275 meta = {
276 license = [ pkgs.lib.licenses.bsdOriginal ];
276 license = [ pkgs.lib.licenses.bsdOriginal ];
277 };
277 };
278 };
278 };
279 "ipython-genutils" = super.buildPythonPackage {
279 "ipython-genutils" = super.buildPythonPackage {
280 name = "ipython-genutils-0.2.0";
280 name = "ipython-genutils-0.2.0";
281 doCheck = false;
281 doCheck = false;
282 src = fetchurl {
282 src = fetchurl {
283 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
283 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
284 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
284 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
285 };
285 };
286 meta = {
286 meta = {
287 license = [ pkgs.lib.licenses.bsdOriginal ];
287 license = [ pkgs.lib.licenses.bsdOriginal ];
288 };
288 };
289 };
289 };
290 "mako" = super.buildPythonPackage {
290 "mako" = super.buildPythonPackage {
291 name = "mako-1.0.7";
291 name = "mako-1.0.7";
292 doCheck = false;
292 doCheck = false;
293 propagatedBuildInputs = [
293 propagatedBuildInputs = [
294 self."markupsafe"
294 self."markupsafe"
295 ];
295 ];
296 src = fetchurl {
296 src = fetchurl {
297 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
297 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
298 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
298 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
299 };
299 };
300 meta = {
300 meta = {
301 license = [ pkgs.lib.licenses.mit ];
301 license = [ pkgs.lib.licenses.mit ];
302 };
302 };
303 };
303 };
304 "markupsafe" = super.buildPythonPackage {
304 "markupsafe" = super.buildPythonPackage {
305 name = "markupsafe-1.0";
305 name = "markupsafe-1.0";
306 doCheck = false;
306 doCheck = false;
307 src = fetchurl {
307 src = fetchurl {
308 url = "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
308 url = "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
309 sha256 = "0rdn1s8x9ni7ss8rfiacj7x1085lx8mh2zdwqslnw8xc3l4nkgm6";
309 sha256 = "0rdn1s8x9ni7ss8rfiacj7x1085lx8mh2zdwqslnw8xc3l4nkgm6";
310 };
310 };
311 meta = {
311 meta = {
312 license = [ pkgs.lib.licenses.bsdOriginal ];
312 license = [ pkgs.lib.licenses.bsdOriginal ];
313 };
313 };
314 };
314 };
315 "mercurial" = super.buildPythonPackage {
315 "mercurial" = super.buildPythonPackage {
316 name = "mercurial-4.6.2";
316 name = "mercurial-4.6.2";
317 doCheck = false;
317 doCheck = false;
318 src = fetchurl {
318 src = fetchurl {
319 url = "https://files.pythonhosted.org/packages/d9/fb/c7ecf2b7fd349878dbf45b8390b8db735cef73d49dd9ce8a364b4ca3a846/mercurial-4.6.2.tar.gz";
319 url = "https://files.pythonhosted.org/packages/d9/fb/c7ecf2b7fd349878dbf45b8390b8db735cef73d49dd9ce8a364b4ca3a846/mercurial-4.6.2.tar.gz";
320 sha256 = "1bv6wgcdx8glihjjfg22khhc52mclsn4kwfqvzbzlg0b42h4xl0w";
320 sha256 = "1bv6wgcdx8glihjjfg22khhc52mclsn4kwfqvzbzlg0b42h4xl0w";
321 };
321 };
322 meta = {
322 meta = {
323 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
323 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
324 };
324 };
325 };
325 };
326 "mock" = super.buildPythonPackage {
326 "mock" = super.buildPythonPackage {
327 name = "mock-1.0.1";
327 name = "mock-1.0.1";
328 doCheck = false;
328 doCheck = false;
329 src = fetchurl {
329 src = fetchurl {
330 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
330 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
331 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
331 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
332 };
332 };
333 meta = {
333 meta = {
334 license = [ pkgs.lib.licenses.bsdOriginal ];
334 license = [ pkgs.lib.licenses.bsdOriginal ];
335 };
335 };
336 };
336 };
337 "more-itertools" = super.buildPythonPackage {
337 "more-itertools" = super.buildPythonPackage {
338 name = "more-itertools-4.3.0";
338 name = "more-itertools-4.3.0";
339 doCheck = false;
339 doCheck = false;
340 propagatedBuildInputs = [
340 propagatedBuildInputs = [
341 self."six"
341 self."six"
342 ];
342 ];
343 src = fetchurl {
343 src = fetchurl {
344 url = "https://files.pythonhosted.org/packages/88/ff/6d485d7362f39880810278bdc906c13300db05485d9c65971dec1142da6a/more-itertools-4.3.0.tar.gz";
344 url = "https://files.pythonhosted.org/packages/88/ff/6d485d7362f39880810278bdc906c13300db05485d9c65971dec1142da6a/more-itertools-4.3.0.tar.gz";
345 sha256 = "17h3na0rdh8xq30w4b9pizgkdxmm51896bxw600x84jflg9vaxn4";
345 sha256 = "17h3na0rdh8xq30w4b9pizgkdxmm51896bxw600x84jflg9vaxn4";
346 };
346 };
347 meta = {
347 meta = {
348 license = [ pkgs.lib.licenses.mit ];
348 license = [ pkgs.lib.licenses.mit ];
349 };
349 };
350 };
350 };
351 "msgpack-python" = super.buildPythonPackage {
351 "msgpack-python" = super.buildPythonPackage {
352 name = "msgpack-python-0.5.6";
352 name = "msgpack-python-0.5.6";
353 doCheck = false;
353 doCheck = false;
354 src = fetchurl {
354 src = fetchurl {
355 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
355 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
356 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
356 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
357 };
357 };
358 meta = {
358 meta = {
359 license = [ pkgs.lib.licenses.asl20 ];
359 license = [ pkgs.lib.licenses.asl20 ];
360 };
360 };
361 };
361 };
362 "pastedeploy" = super.buildPythonPackage {
362 "pastedeploy" = super.buildPythonPackage {
363 name = "pastedeploy-1.5.2";
363 name = "pastedeploy-1.5.2";
364 doCheck = false;
364 doCheck = false;
365 src = fetchurl {
365 src = fetchurl {
366 url = "https://files.pythonhosted.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
366 url = "https://files.pythonhosted.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
367 sha256 = "1jz3m4hq8v6hyhfjz9425nd3nvn52cvbfipdcd72krjmla4qz1fm";
367 sha256 = "1jz3m4hq8v6hyhfjz9425nd3nvn52cvbfipdcd72krjmla4qz1fm";
368 };
368 };
369 meta = {
369 meta = {
370 license = [ pkgs.lib.licenses.mit ];
370 license = [ pkgs.lib.licenses.mit ];
371 };
371 };
372 };
372 };
373 "pathlib2" = super.buildPythonPackage {
373 "pathlib2" = super.buildPythonPackage {
374 name = "pathlib2-2.3.0";
374 name = "pathlib2-2.3.2";
375 doCheck = false;
375 doCheck = false;
376 propagatedBuildInputs = [
376 propagatedBuildInputs = [
377 self."six"
377 self."six"
378 self."scandir"
378 self."scandir"
379 ];
379 ];
380 src = fetchurl {
380 src = fetchurl {
381 url = "https://files.pythonhosted.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
381 url = "https://files.pythonhosted.org/packages/db/a8/7d6439c1aec525ed70810abee5b7d7f3aa35347f59bc28343e8f62019aa2/pathlib2-2.3.2.tar.gz";
382 sha256 = "1cx5gs2v9j2vnzmcrbq5l8fq2mwrr1h6pyf1sjdji2w1bavm09fk";
382 sha256 = "10yb0iv5x2hs631rcppkhbddx799d3h8pcwmkbh2a66ns3w71ccf";
383 };
383 };
384 meta = {
384 meta = {
385 license = [ pkgs.lib.licenses.mit ];
385 license = [ pkgs.lib.licenses.mit ];
386 };
386 };
387 };
387 };
388 "pexpect" = super.buildPythonPackage {
388 "pexpect" = super.buildPythonPackage {
389 name = "pexpect-4.6.0";
389 name = "pexpect-4.6.0";
390 doCheck = false;
390 doCheck = false;
391 propagatedBuildInputs = [
391 propagatedBuildInputs = [
392 self."ptyprocess"
392 self."ptyprocess"
393 ];
393 ];
394 src = fetchurl {
394 src = fetchurl {
395 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
395 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
396 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
396 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
397 };
397 };
398 meta = {
398 meta = {
399 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
399 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
400 };
400 };
401 };
401 };
402 "pickleshare" = super.buildPythonPackage {
402 "pickleshare" = super.buildPythonPackage {
403 name = "pickleshare-0.7.4";
403 name = "pickleshare-0.7.5";
404 doCheck = false;
404 doCheck = false;
405 propagatedBuildInputs = [
405 propagatedBuildInputs = [
406 self."pathlib2"
406 self."pathlib2"
407 ];
407 ];
408 src = fetchurl {
408 src = fetchurl {
409 url = "https://files.pythonhosted.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
409 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
410 sha256 = "0yvk14dzxk7g6qpr7iw23vzqbsr0dh4ij4xynkhnzpfz4xr2bac4";
410 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
411 };
411 };
412 meta = {
412 meta = {
413 license = [ pkgs.lib.licenses.mit ];
413 license = [ pkgs.lib.licenses.mit ];
414 };
414 };
415 };
415 };
416 "plaster" = super.buildPythonPackage {
416 "plaster" = super.buildPythonPackage {
417 name = "plaster-1.0";
417 name = "plaster-1.0";
418 doCheck = false;
418 doCheck = false;
419 propagatedBuildInputs = [
419 propagatedBuildInputs = [
420 self."setuptools"
420 self."setuptools"
421 ];
421 ];
422 src = fetchurl {
422 src = fetchurl {
423 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
423 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
424 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
424 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
425 };
425 };
426 meta = {
426 meta = {
427 license = [ pkgs.lib.licenses.mit ];
427 license = [ pkgs.lib.licenses.mit ];
428 };
428 };
429 };
429 };
430 "plaster-pastedeploy" = super.buildPythonPackage {
430 "plaster-pastedeploy" = super.buildPythonPackage {
431 name = "plaster-pastedeploy-0.6";
431 name = "plaster-pastedeploy-0.6";
432 doCheck = false;
432 doCheck = false;
433 propagatedBuildInputs = [
433 propagatedBuildInputs = [
434 self."pastedeploy"
434 self."pastedeploy"
435 self."plaster"
435 self."plaster"
436 ];
436 ];
437 src = fetchurl {
437 src = fetchurl {
438 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
438 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
439 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
439 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
440 };
440 };
441 meta = {
441 meta = {
442 license = [ pkgs.lib.licenses.mit ];
442 license = [ pkgs.lib.licenses.mit ];
443 };
443 };
444 };
444 };
445 "pluggy" = super.buildPythonPackage {
445 "pluggy" = super.buildPythonPackage {
446 name = "pluggy-0.6.0";
446 name = "pluggy-0.8.0";
447 doCheck = false;
447 doCheck = false;
448 src = fetchurl {
448 src = fetchurl {
449 url = "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz";
449 url = "https://files.pythonhosted.org/packages/65/25/81d0de17cd00f8ca994a4e74e3c4baf7cd25072c0b831dad5c7d9d6138f8/pluggy-0.8.0.tar.gz";
450 sha256 = "1zqckndfn85l1cd8pndw212zg1bq9fkg1nnj32kp2mppppsyg2kz";
450 sha256 = "1580p47l2zqzsza8jcnw1h2wh3vvmygk6ly8bvi4w0g8j14sjys4";
451 };
451 };
452 meta = {
452 meta = {
453 license = [ pkgs.lib.licenses.mit ];
453 license = [ pkgs.lib.licenses.mit ];
454 };
454 };
455 };
455 };
456 "prompt-toolkit" = super.buildPythonPackage {
456 "prompt-toolkit" = super.buildPythonPackage {
457 name = "prompt-toolkit-1.0.15";
457 name = "prompt-toolkit-1.0.15";
458 doCheck = false;
458 doCheck = false;
459 propagatedBuildInputs = [
459 propagatedBuildInputs = [
460 self."six"
460 self."six"
461 self."wcwidth"
461 self."wcwidth"
462 ];
462 ];
463 src = fetchurl {
463 src = fetchurl {
464 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
464 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
465 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
465 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
466 };
466 };
467 meta = {
467 meta = {
468 license = [ pkgs.lib.licenses.bsdOriginal ];
468 license = [ pkgs.lib.licenses.bsdOriginal ];
469 };
469 };
470 };
470 };
471 "psutil" = super.buildPythonPackage {
471 "psutil" = super.buildPythonPackage {
472 name = "psutil-5.4.6";
472 name = "psutil-5.4.7";
473 doCheck = false;
473 doCheck = false;
474 src = fetchurl {
474 src = fetchurl {
475 url = "https://files.pythonhosted.org/packages/51/9e/0f8f5423ce28c9109807024f7bdde776ed0b1161de20b408875de7e030c3/psutil-5.4.6.tar.gz";
475 url = "https://files.pythonhosted.org/packages/7d/9a/1e93d41708f8ed2b564395edfa3389f0fd6d567597401c2e5e2775118d8b/psutil-5.4.7.tar.gz";
476 sha256 = "1xmw4qi6hnrhw81xqzkvmsm9im7j2vkk4v26ycjwq2jczqsmlvk8";
476 sha256 = "0fsgmvzwbdbszkwfnqhib8jcxm4w6zyhvlxlcda0rfm5cyqj4qsv";
477 };
477 };
478 meta = {
478 meta = {
479 license = [ pkgs.lib.licenses.bsdOriginal ];
479 license = [ pkgs.lib.licenses.bsdOriginal ];
480 };
480 };
481 };
481 };
482 "ptyprocess" = super.buildPythonPackage {
482 "ptyprocess" = super.buildPythonPackage {
483 name = "ptyprocess-0.6.0";
483 name = "ptyprocess-0.6.0";
484 doCheck = false;
484 doCheck = false;
485 src = fetchurl {
485 src = fetchurl {
486 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
486 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
487 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
487 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
488 };
488 };
489 meta = {
489 meta = {
490 license = [ ];
490 license = [ ];
491 };
491 };
492 };
492 };
493 "py" = super.buildPythonPackage {
493 "py" = super.buildPythonPackage {
494 name = "py-1.5.3";
494 name = "py-1.6.0";
495 doCheck = false;
495 doCheck = false;
496 src = fetchurl {
496 src = fetchurl {
497 url = "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz";
497 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
498 sha256 = "10gq2lckvgwlk9w6yzijhzkarx44hsaknd0ypa08wlnpjnsgmj99";
498 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
499 };
499 };
500 meta = {
500 meta = {
501 license = [ pkgs.lib.licenses.mit ];
501 license = [ pkgs.lib.licenses.mit ];
502 };
502 };
503 };
503 };
504 "pygments" = super.buildPythonPackage {
504 "pygments" = super.buildPythonPackage {
505 name = "pygments-2.2.0";
505 name = "pygments-2.2.0";
506 doCheck = false;
506 doCheck = false;
507 src = fetchurl {
507 src = fetchurl {
508 url = "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
508 url = "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
509 sha256 = "1k78qdvir1yb1c634nkv6rbga8wv4289xarghmsbbvzhvr311bnv";
509 sha256 = "1k78qdvir1yb1c634nkv6rbga8wv4289xarghmsbbvzhvr311bnv";
510 };
510 };
511 meta = {
511 meta = {
512 license = [ pkgs.lib.licenses.bsdOriginal ];
512 license = [ pkgs.lib.licenses.bsdOriginal ];
513 };
513 };
514 };
514 };
515 "pyramid" = super.buildPythonPackage {
515 "pyramid" = super.buildPythonPackage {
516 name = "pyramid-1.9.2";
516 name = "pyramid-1.9.2";
517 doCheck = false;
517 doCheck = false;
518 propagatedBuildInputs = [
518 propagatedBuildInputs = [
519 self."setuptools"
519 self."setuptools"
520 self."webob"
520 self."webob"
521 self."repoze.lru"
521 self."repoze.lru"
522 self."zope.interface"
522 self."zope.interface"
523 self."zope.deprecation"
523 self."zope.deprecation"
524 self."venusian"
524 self."venusian"
525 self."translationstring"
525 self."translationstring"
526 self."pastedeploy"
526 self."pastedeploy"
527 self."plaster"
527 self."plaster"
528 self."plaster-pastedeploy"
528 self."plaster-pastedeploy"
529 self."hupper"
529 self."hupper"
530 ];
530 ];
531 src = fetchurl {
531 src = fetchurl {
532 url = "https://files.pythonhosted.org/packages/a0/c1/b321d07cfc4870541989ad131c86a1d593bfe802af0eca9718a0dadfb97a/pyramid-1.9.2.tar.gz";
532 url = "https://files.pythonhosted.org/packages/a0/c1/b321d07cfc4870541989ad131c86a1d593bfe802af0eca9718a0dadfb97a/pyramid-1.9.2.tar.gz";
533 sha256 = "09drsl0346nchgxp2j7sa5hlk7mkhfld9wvbd0wicacrp26a92fg";
533 sha256 = "09drsl0346nchgxp2j7sa5hlk7mkhfld9wvbd0wicacrp26a92fg";
534 };
534 };
535 meta = {
535 meta = {
536 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
536 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
537 };
537 };
538 };
538 };
539 "pyramid-mako" = super.buildPythonPackage {
539 "pyramid-mako" = super.buildPythonPackage {
540 name = "pyramid-mako-1.0.2";
540 name = "pyramid-mako-1.0.2";
541 doCheck = false;
541 doCheck = false;
542 propagatedBuildInputs = [
542 propagatedBuildInputs = [
543 self."pyramid"
543 self."pyramid"
544 self."mako"
544 self."mako"
545 ];
545 ];
546 src = fetchurl {
546 src = fetchurl {
547 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
547 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
548 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
548 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
549 };
549 };
550 meta = {
550 meta = {
551 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
551 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
552 };
552 };
553 };
553 };
554 "pytest" = super.buildPythonPackage {
554 "pytest" = super.buildPythonPackage {
555 name = "pytest-3.6.0";
555 name = "pytest-3.8.2";
556 doCheck = false;
556 doCheck = false;
557 propagatedBuildInputs = [
557 propagatedBuildInputs = [
558 self."py"
558 self."py"
559 self."six"
559 self."six"
560 self."setuptools"
560 self."setuptools"
561 self."attrs"
561 self."attrs"
562 self."more-itertools"
562 self."more-itertools"
563 self."atomicwrites"
563 self."atomicwrites"
564 self."pluggy"
564 self."pluggy"
565 self."funcsigs"
565 self."funcsigs"
566 self."pathlib2"
566 ];
567 ];
567 src = fetchurl {
568 src = fetchurl {
568 url = "https://files.pythonhosted.org/packages/67/6a/5bcdc22f8dbada1d2910d6e1a3a03f6b14306c78f81122890735b28be4bf/pytest-3.6.0.tar.gz";
569 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
569 sha256 = "0bdfazvjjbxssqzyvkb3m2x2in7xv56ipr899l00s87k7815sm9r";
570 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
570 };
571 };
571 meta = {
572 meta = {
572 license = [ pkgs.lib.licenses.mit ];
573 license = [ pkgs.lib.licenses.mit ];
573 };
574 };
574 };
575 };
575 "pytest-cov" = super.buildPythonPackage {
576 "pytest-cov" = super.buildPythonPackage {
576 name = "pytest-cov-2.5.1";
577 name = "pytest-cov-2.6.0";
577 doCheck = false;
578 doCheck = false;
578 propagatedBuildInputs = [
579 propagatedBuildInputs = [
579 self."pytest"
580 self."pytest"
580 self."coverage"
581 self."coverage"
581 ];
582 ];
582 src = fetchurl {
583 src = fetchurl {
583 url = "https://files.pythonhosted.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
584 sha256 = "0bbfpwdh9k3636bxc88vz9fa7vf4akchgn513ql1vd0xy4n7bah3";
585 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
585 };
586 };
586 meta = {
587 meta = {
587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
588 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
588 };
589 };
589 };
590 };
590 "pytest-profiling" = super.buildPythonPackage {
591 "pytest-profiling" = super.buildPythonPackage {
591 name = "pytest-profiling-1.3.0";
592 name = "pytest-profiling-1.3.0";
592 doCheck = false;
593 doCheck = false;
593 propagatedBuildInputs = [
594 propagatedBuildInputs = [
594 self."six"
595 self."six"
595 self."pytest"
596 self."pytest"
596 self."gprof2dot"
597 self."gprof2dot"
597 ];
598 ];
598 src = fetchurl {
599 src = fetchurl {
599 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
600 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
600 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
601 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
601 };
602 };
602 meta = {
603 meta = {
603 license = [ pkgs.lib.licenses.mit ];
604 license = [ pkgs.lib.licenses.mit ];
604 };
605 };
605 };
606 };
606 "pytest-runner" = super.buildPythonPackage {
607 "pytest-runner" = super.buildPythonPackage {
607 name = "pytest-runner-4.2";
608 name = "pytest-runner-4.2";
608 doCheck = false;
609 doCheck = false;
609 src = fetchurl {
610 src = fetchurl {
610 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
611 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
611 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
612 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
612 };
613 };
613 meta = {
614 meta = {
614 license = [ pkgs.lib.licenses.mit ];
615 license = [ pkgs.lib.licenses.mit ];
615 };
616 };
616 };
617 };
617 "pytest-sugar" = super.buildPythonPackage {
618 "pytest-sugar" = super.buildPythonPackage {
618 name = "pytest-sugar-0.9.1";
619 name = "pytest-sugar-0.9.1";
619 doCheck = false;
620 doCheck = false;
620 propagatedBuildInputs = [
621 propagatedBuildInputs = [
621 self."pytest"
622 self."pytest"
622 self."termcolor"
623 self."termcolor"
623 ];
624 ];
624 src = fetchurl {
625 src = fetchurl {
625 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
626 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
626 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
627 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
627 };
628 };
628 meta = {
629 meta = {
629 license = [ pkgs.lib.licenses.bsdOriginal ];
630 license = [ pkgs.lib.licenses.bsdOriginal ];
630 };
631 };
631 };
632 };
632 "pytest-timeout" = super.buildPythonPackage {
633 "pytest-timeout" = super.buildPythonPackage {
633 name = "pytest-timeout-1.2.1";
634 name = "pytest-timeout-1.3.2";
634 doCheck = false;
635 doCheck = false;
635 propagatedBuildInputs = [
636 propagatedBuildInputs = [
636 self."pytest"
637 self."pytest"
637 ];
638 ];
638 src = fetchurl {
639 src = fetchurl {
639 url = "https://files.pythonhosted.org/packages/be/e9/a9106b8bc87521c6813060f50f7d1fdc15665bc1bbbe71c0ffc1c571aaa2/pytest-timeout-1.2.1.tar.gz";
640 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
640 sha256 = "1kdp6qbh5v1168l99rba5yfzvy05gmzkmkhldgp36p9xcdjd5dv8";
641 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
641 };
642 };
642 meta = {
643 meta = {
643 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
644 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
644 };
645 };
645 };
646 };
646 "repoze.lru" = super.buildPythonPackage {
647 "repoze.lru" = super.buildPythonPackage {
647 name = "repoze.lru-0.7";
648 name = "repoze.lru-0.7";
648 doCheck = false;
649 doCheck = false;
649 src = fetchurl {
650 src = fetchurl {
650 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
651 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
651 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
652 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
652 };
653 };
653 meta = {
654 meta = {
654 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
655 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
655 };
656 };
656 };
657 };
657 "rhodecode-vcsserver" = super.buildPythonPackage {
658 "rhodecode-vcsserver" = super.buildPythonPackage {
658 name = "rhodecode-vcsserver-4.13.3";
659 name = "rhodecode-vcsserver-4.14.0";
659 buildInputs = [
660 buildInputs = [
660 self."pytest"
661 self."pytest"
661 self."py"
662 self."py"
662 self."pytest-cov"
663 self."pytest-cov"
663 self."pytest-sugar"
664 self."pytest-sugar"
664 self."pytest-runner"
665 self."pytest-runner"
665 self."pytest-profiling"
666 self."pytest-profiling"
667 self."pytest-timeout"
666 self."gprof2dot"
668 self."gprof2dot"
667 self."pytest-timeout"
668 self."mock"
669 self."mock"
669 self."webtest"
670 self."webtest"
670 self."cov-core"
671 self."cov-core"
671 self."coverage"
672 self."coverage"
672 self."configobj"
673 self."configobj"
673 ];
674 ];
674 doCheck = true;
675 doCheck = true;
675 propagatedBuildInputs = [
676 propagatedBuildInputs = [
676 self."configobj"
677 self."configobj"
677 self."atomicwrites"
678 self."atomicwrites"
678 self."attrs"
679 self."attrs"
679 self."dogpile.cache"
680 self."dogpile.cache"
680 self."dogpile.core"
681 self."dogpile.core"
681 self."decorator"
682 self."decorator"
682 self."dulwich"
683 self."dulwich"
683 self."hgsubversion"
684 self."hgsubversion"
684 self."hg-evolve"
685 self."hg-evolve"
685 self."mako"
686 self."mako"
686 self."markupsafe"
687 self."markupsafe"
687 self."mercurial"
688 self."mercurial"
688 self."msgpack-python"
689 self."msgpack-python"
689 self."pastedeploy"
690 self."pastedeploy"
690 self."psutil"
691 self."psutil"
691 self."pyramid"
692 self."pyramid"
692 self."pyramid-mako"
693 self."pyramid-mako"
693 self."pygments"
694 self."pygments"
694 self."pathlib2"
695 self."pathlib2"
695 self."repoze.lru"
696 self."repoze.lru"
696 self."simplejson"
697 self."simplejson"
697 self."subprocess32"
698 self."subprocess32"
698 self."setproctitle"
699 self."subvertpy"
699 self."subvertpy"
700 self."six"
700 self."six"
701 self."translationstring"
701 self."translationstring"
702 self."webob"
702 self."webob"
703 self."zope.deprecation"
703 self."zope.deprecation"
704 self."zope.interface"
704 self."zope.interface"
705 self."gevent"
705 self."gevent"
706 self."greenlet"
706 self."greenlet"
707 self."gunicorn"
707 self."gunicorn"
708 self."waitress"
708 self."waitress"
709 self."setproctitle"
709 self."ipdb"
710 self."ipdb"
710 self."ipython"
711 self."ipython"
711 self."pytest"
712 self."pytest"
712 self."py"
713 self."py"
713 self."pytest-cov"
714 self."pytest-cov"
714 self."pytest-sugar"
715 self."pytest-sugar"
715 self."pytest-runner"
716 self."pytest-runner"
716 self."pytest-profiling"
717 self."pytest-profiling"
718 self."pytest-timeout"
717 self."gprof2dot"
719 self."gprof2dot"
718 self."pytest-timeout"
719 self."mock"
720 self."mock"
720 self."webtest"
721 self."webtest"
721 self."cov-core"
722 self."cov-core"
722 self."coverage"
723 self."coverage"
723 ];
724 ];
724 src = ./.;
725 src = ./.;
725 meta = {
726 meta = {
726 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
727 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
727 };
728 };
728 };
729 };
729 "scandir" = super.buildPythonPackage {
730 "scandir" = super.buildPythonPackage {
730 name = "scandir-1.9.0";
731 name = "scandir-1.9.0";
731 doCheck = false;
732 doCheck = false;
732 src = fetchurl {
733 src = fetchurl {
733 url = "https://files.pythonhosted.org/packages/16/2a/557af1181e6b4e30254d5a6163b18f5053791ca66e251e77ab08887e8fe3/scandir-1.9.0.tar.gz";
734 url = "https://files.pythonhosted.org/packages/16/2a/557af1181e6b4e30254d5a6163b18f5053791ca66e251e77ab08887e8fe3/scandir-1.9.0.tar.gz";
734 sha256 = "0r3hvf1a9jm1rkqgx40gxkmccknkaiqjavs8lccgq9s8khh5x5s4";
735 sha256 = "0r3hvf1a9jm1rkqgx40gxkmccknkaiqjavs8lccgq9s8khh5x5s4";
735 };
736 };
736 meta = {
737 meta = {
737 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
738 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
738 };
739 };
739 };
740 };
740 "setproctitle" = super.buildPythonPackage {
741 "setproctitle" = super.buildPythonPackage {
741 name = "setproctitle-1.1.10";
742 name = "setproctitle-1.1.10";
742 doCheck = false;
743 doCheck = false;
743 src = fetchurl {
744 src = fetchurl {
744 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
745 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
745 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
746 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
746 };
747 };
747 meta = {
748 meta = {
748 license = [ pkgs.lib.licenses.bsdOriginal ];
749 license = [ pkgs.lib.licenses.bsdOriginal ];
749 };
750 };
750 };
751 };
751 "setuptools" = super.buildPythonPackage {
752 "setuptools" = super.buildPythonPackage {
752 name = "setuptools-40.1.0";
753 name = "setuptools-40.4.3";
753 doCheck = false;
754 doCheck = false;
754 src = fetchurl {
755 src = fetchurl {
755 url = "https://files.pythonhosted.org/packages/5a/df/b2e3d9693bb0dcbeac516a73dd7a9eb82b126ae52e4a74605a9b01beddd5/setuptools-40.1.0.zip";
756 url = "https://files.pythonhosted.org/packages/6e/9c/6a003320b00ef237f94aa74e4ad66c57a7618f6c79d67527136e2544b728/setuptools-40.4.3.zip";
756 sha256 = "0w1blx5ajga5y15dci0mddk49cf2xpq0mp7rp7jrqr2diqk00ib6";
757 sha256 = "058v6zns4634n4al2nmmvp15j8nrgwn8wjrbdks47wk3vm05gg5c";
757 };
758 };
758 meta = {
759 meta = {
759 license = [ pkgs.lib.licenses.mit ];
760 license = [ pkgs.lib.licenses.mit ];
760 };
761 };
761 };
762 };
762 "simplegeneric" = super.buildPythonPackage {
763 "simplegeneric" = super.buildPythonPackage {
763 name = "simplegeneric-0.8.1";
764 name = "simplegeneric-0.8.1";
764 doCheck = false;
765 doCheck = false;
765 src = fetchurl {
766 src = fetchurl {
766 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
767 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
767 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
768 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
768 };
769 };
769 meta = {
770 meta = {
770 license = [ pkgs.lib.licenses.zpl21 ];
771 license = [ pkgs.lib.licenses.zpl21 ];
771 };
772 };
772 };
773 };
773 "simplejson" = super.buildPythonPackage {
774 "simplejson" = super.buildPythonPackage {
774 name = "simplejson-3.11.1";
775 name = "simplejson-3.11.1";
775 doCheck = false;
776 doCheck = false;
776 src = fetchurl {
777 src = fetchurl {
777 url = "https://files.pythonhosted.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
778 url = "https://files.pythonhosted.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
778 sha256 = "1rr58dppsq73p0qcd9bsw066cdd3v63sqv7j6sqni8frvm4jv8h1";
779 sha256 = "1rr58dppsq73p0qcd9bsw066cdd3v63sqv7j6sqni8frvm4jv8h1";
779 };
780 };
780 meta = {
781 meta = {
781 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
782 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
782 };
783 };
783 };
784 };
784 "six" = super.buildPythonPackage {
785 "six" = super.buildPythonPackage {
785 name = "six-1.11.0";
786 name = "six-1.11.0";
786 doCheck = false;
787 doCheck = false;
787 src = fetchurl {
788 src = fetchurl {
788 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
789 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
789 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
790 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
790 };
791 };
791 meta = {
792 meta = {
792 license = [ pkgs.lib.licenses.mit ];
793 license = [ pkgs.lib.licenses.mit ];
793 };
794 };
794 };
795 };
795 "subprocess32" = super.buildPythonPackage {
796 "subprocess32" = super.buildPythonPackage {
796 name = "subprocess32-3.5.1";
797 name = "subprocess32-3.5.2";
797 doCheck = false;
798 doCheck = false;
798 src = fetchurl {
799 src = fetchurl {
799 url = "https://files.pythonhosted.org/packages/de/fb/fd3e91507021e2aecdb081d1b920082628d6b8869ead845e3e87b3d2e2ca/subprocess32-3.5.1.tar.gz";
800 url = "https://files.pythonhosted.org/packages/c3/5f/7117737fc7114061837a4f51670d863dd7f7f9c762a6546fa8a0dcfe61c8/subprocess32-3.5.2.tar.gz";
800 sha256 = "0wgi3bfnssid1g6h0v803z3k1wjal6il16nr3r9c587cfzwfkv0q";
801 sha256 = "11v62shwmdys48g7ncs3a8jwwnkcl8d4zcwy6dk73z1zy2f9hazb";
801 };
802 };
802 meta = {
803 meta = {
803 license = [ pkgs.lib.licenses.psfl ];
804 license = [ pkgs.lib.licenses.psfl ];
804 };
805 };
805 };
806 };
806 "subvertpy" = super.buildPythonPackage {
807 "subvertpy" = super.buildPythonPackage {
807 name = "subvertpy-0.10.1";
808 name = "subvertpy-0.10.1";
808 doCheck = false;
809 doCheck = false;
809 src = fetchurl {
810 src = fetchurl {
810 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
811 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
811 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
812 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
812 };
813 };
813 meta = {
814 meta = {
814 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
815 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
815 };
816 };
816 };
817 };
817 "termcolor" = super.buildPythonPackage {
818 "termcolor" = super.buildPythonPackage {
818 name = "termcolor-1.1.0";
819 name = "termcolor-1.1.0";
819 doCheck = false;
820 doCheck = false;
820 src = fetchurl {
821 src = fetchurl {
821 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
822 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
822 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
823 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
823 };
824 };
824 meta = {
825 meta = {
825 license = [ pkgs.lib.licenses.mit ];
826 license = [ pkgs.lib.licenses.mit ];
826 };
827 };
827 };
828 };
828 "traitlets" = super.buildPythonPackage {
829 "traitlets" = super.buildPythonPackage {
829 name = "traitlets-4.3.2";
830 name = "traitlets-4.3.2";
830 doCheck = false;
831 doCheck = false;
831 propagatedBuildInputs = [
832 propagatedBuildInputs = [
832 self."ipython-genutils"
833 self."ipython-genutils"
833 self."six"
834 self."six"
834 self."decorator"
835 self."decorator"
835 self."enum34"
836 self."enum34"
836 ];
837 ];
837 src = fetchurl {
838 src = fetchurl {
838 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
839 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
839 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
840 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
840 };
841 };
841 meta = {
842 meta = {
842 license = [ pkgs.lib.licenses.bsdOriginal ];
843 license = [ pkgs.lib.licenses.bsdOriginal ];
843 };
844 };
844 };
845 };
845 "translationstring" = super.buildPythonPackage {
846 "translationstring" = super.buildPythonPackage {
846 name = "translationstring-1.3";
847 name = "translationstring-1.3";
847 doCheck = false;
848 doCheck = false;
848 src = fetchurl {
849 src = fetchurl {
849 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
850 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
850 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
851 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
851 };
852 };
852 meta = {
853 meta = {
853 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
854 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
854 };
855 };
855 };
856 };
856 "venusian" = super.buildPythonPackage {
857 "venusian" = super.buildPythonPackage {
857 name = "venusian-1.1.0";
858 name = "venusian-1.1.0";
858 doCheck = false;
859 doCheck = false;
859 src = fetchurl {
860 src = fetchurl {
860 url = "https://files.pythonhosted.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
861 url = "https://files.pythonhosted.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
861 sha256 = "0zapz131686qm0gazwy8bh11vr57pr89jbwbl50s528sqy9f80lr";
862 sha256 = "0zapz131686qm0gazwy8bh11vr57pr89jbwbl50s528sqy9f80lr";
862 };
863 };
863 meta = {
864 meta = {
864 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
865 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
865 };
866 };
866 };
867 };
867 "waitress" = super.buildPythonPackage {
868 "waitress" = super.buildPythonPackage {
868 name = "waitress-1.1.0";
869 name = "waitress-1.1.0";
869 doCheck = false;
870 doCheck = false;
870 src = fetchurl {
871 src = fetchurl {
871 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
872 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
872 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
873 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
873 };
874 };
874 meta = {
875 meta = {
875 license = [ pkgs.lib.licenses.zpl21 ];
876 license = [ pkgs.lib.licenses.zpl21 ];
876 };
877 };
877 };
878 };
878 "wcwidth" = super.buildPythonPackage {
879 "wcwidth" = super.buildPythonPackage {
879 name = "wcwidth-0.1.7";
880 name = "wcwidth-0.1.7";
880 doCheck = false;
881 doCheck = false;
881 src = fetchurl {
882 src = fetchurl {
882 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
883 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
883 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
884 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
884 };
885 };
885 meta = {
886 meta = {
886 license = [ pkgs.lib.licenses.mit ];
887 license = [ pkgs.lib.licenses.mit ];
887 };
888 };
888 };
889 };
889 "webob" = super.buildPythonPackage {
890 "webob" = super.buildPythonPackage {
890 name = "webob-1.7.4";
891 name = "webob-1.7.4";
891 doCheck = false;
892 doCheck = false;
892 src = fetchurl {
893 src = fetchurl {
893 url = "https://files.pythonhosted.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
894 url = "https://files.pythonhosted.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
894 sha256 = "1na01ljg04z40il7vcrn8g29vaw7nvg1xvhk64cr4jys5wcay44d";
895 sha256 = "1na01ljg04z40il7vcrn8g29vaw7nvg1xvhk64cr4jys5wcay44d";
895 };
896 };
896 meta = {
897 meta = {
897 license = [ pkgs.lib.licenses.mit ];
898 license = [ pkgs.lib.licenses.mit ];
898 };
899 };
899 };
900 };
900 "webtest" = super.buildPythonPackage {
901 "webtest" = super.buildPythonPackage {
901 name = "webtest-2.0.29";
902 name = "webtest-2.0.29";
902 doCheck = false;
903 doCheck = false;
903 propagatedBuildInputs = [
904 propagatedBuildInputs = [
904 self."six"
905 self."six"
905 self."webob"
906 self."webob"
906 self."waitress"
907 self."waitress"
907 self."beautifulsoup4"
908 self."beautifulsoup4"
908 ];
909 ];
909 src = fetchurl {
910 src = fetchurl {
910 url = "https://files.pythonhosted.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
911 url = "https://files.pythonhosted.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
911 sha256 = "0bcj1ica5lnmj5zbvk46x28kgphcsgh7sfnwjmn0cr94mhawrg6v";
912 sha256 = "0bcj1ica5lnmj5zbvk46x28kgphcsgh7sfnwjmn0cr94mhawrg6v";
912 };
913 };
913 meta = {
914 meta = {
914 license = [ pkgs.lib.licenses.mit ];
915 license = [ pkgs.lib.licenses.mit ];
915 };
916 };
916 };
917 };
917 "zope.deprecation" = super.buildPythonPackage {
918 "zope.deprecation" = super.buildPythonPackage {
918 name = "zope.deprecation-4.3.0";
919 name = "zope.deprecation-4.3.0";
919 doCheck = false;
920 doCheck = false;
920 propagatedBuildInputs = [
921 propagatedBuildInputs = [
921 self."setuptools"
922 self."setuptools"
922 ];
923 ];
923 src = fetchurl {
924 src = fetchurl {
924 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
925 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
925 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
926 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
926 };
927 };
927 meta = {
928 meta = {
928 license = [ pkgs.lib.licenses.zpl21 ];
929 license = [ pkgs.lib.licenses.zpl21 ];
929 };
930 };
930 };
931 };
931 "zope.interface" = super.buildPythonPackage {
932 "zope.interface" = super.buildPythonPackage {
932 name = "zope.interface-4.5.0";
933 name = "zope.interface-4.5.0";
933 doCheck = false;
934 doCheck = false;
934 propagatedBuildInputs = [
935 propagatedBuildInputs = [
935 self."setuptools"
936 self."setuptools"
936 ];
937 ];
937 src = fetchurl {
938 src = fetchurl {
938 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
939 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
939 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
940 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
940 };
941 };
941 meta = {
942 meta = {
942 license = [ pkgs.lib.licenses.zpl21 ];
943 license = [ pkgs.lib.licenses.zpl21 ];
943 };
944 };
944 };
945 };
945
946
946 ### Test requirements
947 ### Test requirements
947
948
948
949
949 }
950 }
@@ -1,16 +1,14 b''
1 # This file defines how to "build" for packaging.
1 # This file defines how to "build" for packaging.
2
2
3 { pkgs ? import <nixpkgs> {}
3 { doCheck ? true
4 , doCheck ? true
5 }:
4 }:
6
5
7 let
6 let
8 vcsserver = import ./default.nix {
7 vcsserver = import ./default.nix {
9 inherit
8 inherit
10 doCheck
9 doCheck;
11 pkgs;
12 };
10 };
13
11
14 in {
12 in {
15 build = vcsserver;
13 build = vcsserver;
16 }
14 }
@@ -1,48 +1,48 b''
1 ## dependencies
1 ## dependencies
2
2
3 # our custom configobj
3 # our custom configobj
4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
5 atomicwrites==1.1.5
5 atomicwrites==1.2.1
6 attrs==18.1.0
6 attrs==18.2.0
7 dogpile.cache==0.6.6
7 dogpile.cache==0.6.7
8 dogpile.core==0.4.1
8 dogpile.core==0.4.1
9 decorator==4.1.2
9 decorator==4.1.2
10 dulwich==0.13.0
10 dulwich==0.13.0
11 hgsubversion==1.9.2
11 hgsubversion==1.9.2
12 hg-evolve==8.0.1
12 hg-evolve==8.0.1
13 mako==1.0.7
13 mako==1.0.7
14 markupsafe==1.0.0
14 markupsafe==1.0.0
15 mercurial==4.6.2
15 mercurial==4.6.2
16 msgpack-python==0.5.6
16 msgpack-python==0.5.6
17
17
18 pastedeploy==1.5.2
18 pastedeploy==1.5.2
19 psutil==5.4.6
19 psutil==5.4.7
20 pyramid==1.9.2
20 pyramid==1.9.2
21 pyramid-mako==1.0.2
21 pyramid-mako==1.0.2
22
22
23 pygments==2.2.0
23 pygments==2.2.0
24 pathlib2==2.3.0
24 pathlib2==2.3.2
25 repoze.lru==0.7
25 repoze.lru==0.7
26 simplejson==3.11.1
26 simplejson==3.11.1
27 subprocess32==3.5.1
27 subprocess32==3.5.2
28 setproctitle==1.1.10
29 subvertpy==0.10.1
28 subvertpy==0.10.1
30
29
31 six==1.11.0
30 six==1.11.0
32 translationstring==1.3
31 translationstring==1.3
33 webob==1.7.4
32 webob==1.7.4
34 zope.deprecation==4.3.0
33 zope.deprecation==4.3.0
35 zope.interface==4.5.0
34 zope.interface==4.5.0
36
35
37 ## http servers
36 ## http servers
38 gevent==1.3.5
37 gevent==1.3.6
39 greenlet==0.4.13
38 greenlet==0.4.15
40 gunicorn==19.9.0
39 gunicorn==19.9.0
41 waitress==1.1.0
40 waitress==1.1.0
41 setproctitle==1.1.10
42
42
43 ## debug
43 ## debug
44 ipdb==0.11.0
44 ipdb==0.11.0
45 ipython==5.1.0
45 ipython==5.1.0
46
46
47 ## test related requirements
47 ## test related requirements
48 -r requirements_test.txt
48 -r requirements_test.txt
@@ -1,14 +1,14 b''
1 # test related requirements
1 # test related requirements
2 pytest==3.6.0
2 pytest==3.8.2
3 py==1.5.3
3 py==1.6.0
4 pytest-cov==2.5.1
4 pytest-cov==2.6.0
5 pytest-sugar==0.9.1
5 pytest-sugar==0.9.1
6 pytest-runner==4.2.0
6 pytest-runner==4.2.0
7 pytest-profiling==1.3.0
7 pytest-profiling==1.3.0
8 pytest-timeout==1.3.2
8 gprof2dot==2017.9.19
9 gprof2dot==2017.9.19
9 pytest-timeout==1.2.1
10
10
11 mock==1.0.1
11 mock==1.0.1
12 webtest==2.0.29
12 webtest==2.0.29
13 cov-core==1.15.0
13 cov-core==1.15.0
14 coverage==3.7.1
14 coverage==4.5.1
@@ -1,139 +1,136 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # Copyright (C) 2014-2017 RodeCode GmbH
3 # Copyright (C) 2014-2017 RodeCode GmbH
4 #
4 #
5 # This program is free software; you can redistribute it and/or modify
5 # This program is free software; you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
6 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation; either version 3 of the License, or
7 # the Free Software Foundation; either version 3 of the License, or
8 # (at your option) any later version.
8 # (at your option) any later version.
9 #
9 #
10 # This program is distributed in the hope that it will be useful,
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
13 # GNU General Public License for more details.
14 #
14 #
15 # You should have received a copy of the GNU General Public License
15 # You should have received a copy of the GNU General Public License
16 # along with this program; if not, write to the Free Software Foundation,
16 # along with this program; if not, write to the Free Software Foundation,
17 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
18
19 # Import early to make sure things are patched up properly
19 # Import early to make sure things are patched up properly
20 from setuptools import setup, find_packages
20 from setuptools import setup, find_packages
21
21
22 import os
22 import os
23 import sys
23 import sys
24 import pkgutil
24 import pkgutil
25 import platform
25 import platform
26 import codecs
26 import codecs
27
27
28 try: # for pip >= 10
28 try: # for pip >= 10
29 from pip._internal.req import parse_requirements
29 from pip._internal.req import parse_requirements
30 except ImportError: # for pip <= 9.0.3
30 except ImportError: # for pip <= 9.0.3
31 from pip.req import parse_requirements
31 from pip.req import parse_requirements
32
32
33 try: # for pip >= 10
33 try: # for pip >= 10
34 from pip._internal.download import PipSession
34 from pip._internal.download import PipSession
35 except ImportError: # for pip <= 9.0.3
35 except ImportError: # for pip <= 9.0.3
36 from pip.download import PipSession
36 from pip.download import PipSession
37
37
38
38
39
39
40 if sys.version_info < (2, 7):
40 if sys.version_info < (2, 7):
41 raise Exception('VCSServer requires Python 2.7 or later')
41 raise Exception('VCSServer requires Python 2.7 or later')
42
42
43 here = os.path.abspath(os.path.dirname(__file__))
43 here = os.path.abspath(os.path.dirname(__file__))
44
44
45 # defines current platform
45 # defines current platform
46 __platform__ = platform.system()
46 __platform__ = platform.system()
47 __license__ = 'GPL V3'
47 __license__ = 'GPL V3'
48 __author__ = 'RhodeCode GmbH'
48 __author__ = 'RhodeCode GmbH'
49 __url__ = 'https://code.rhodecode.com'
49 __url__ = 'https://code.rhodecode.com'
50 is_windows = __platform__ in ('Windows',)
50 is_windows = __platform__ in ('Windows',)
51
51
52
52
53 def _get_requirements(req_filename, exclude=None, extras=None):
53 def _get_requirements(req_filename, exclude=None, extras=None):
54 extras = extras or []
54 extras = extras or []
55 exclude = exclude or []
55 exclude = exclude or []
56
56
57 try:
57 try:
58 parsed = parse_requirements(
58 parsed = parse_requirements(
59 os.path.join(here, req_filename), session=PipSession())
59 os.path.join(here, req_filename), session=PipSession())
60 except TypeError:
60 except TypeError:
61 # try pip < 6.0.0, that doesn't support session
61 # try pip < 6.0.0, that doesn't support session
62 parsed = parse_requirements(os.path.join(here, req_filename))
62 parsed = parse_requirements(os.path.join(here, req_filename))
63
63
64 requirements = []
64 requirements = []
65 for ir in parsed:
65 for ir in parsed:
66 if ir.req and ir.name not in exclude:
66 if ir.req and ir.name not in exclude:
67 requirements.append(str(ir.req))
67 requirements.append(str(ir.req))
68 return requirements + extras
68 return requirements + extras
69
69
70
70
71 # requirements extract
71 # requirements extract
72 setup_requirements = ['pytest-runner']
72 setup_requirements = ['pytest-runner']
73 install_requirements = _get_requirements(
73 install_requirements = _get_requirements(
74 'requirements.txt', exclude=['setuptools'])
74 'requirements.txt', exclude=['setuptools'])
75 test_requirements = _get_requirements(
75 test_requirements = _get_requirements(
76 'requirements_test.txt', extras=['configobj'])
76 'requirements_test.txt', extras=['configobj'])
77
77
78
78
79 def get_version():
79 def get_version():
80 version = pkgutil.get_data('vcsserver', 'VERSION')
80 version = pkgutil.get_data('vcsserver', 'VERSION')
81 return version.strip()
81 return version.strip()
82
82
83
83
84 # additional files that goes into package itself
84 # additional files that goes into package itself
85 package_data = {
85 package_data = {
86 '': ['*.txt', '*.rst'],
86 '': ['*.txt', '*.rst'],
87 'configs': ['*.ini'],
87 'configs': ['*.ini'],
88 'vcsserver': ['VERSION'],
88 'vcsserver': ['VERSION'],
89 }
89 }
90
90
91 description = 'Version Control System Server'
91 description = 'Version Control System Server'
92 keywords = ' '.join([
92 keywords = ' '.join([
93 'CLI', 'RhodeCode', 'RhodeCode Enterprise', 'RhodeCode Tools'])
93 'CLI', 'RhodeCode', 'RhodeCode Enterprise', 'RhodeCode Tools'])
94
94
95 # README/DESCRIPTION generation
95 # README/DESCRIPTION generation
96 readme_file = 'README.rst'
96 readme_file = 'README.rst'
97 changelog_file = 'CHANGES.rst'
97 changelog_file = 'CHANGES.rst'
98 try:
98 try:
99 long_description = codecs.open(readme_file).read() + '\n\n' + \
99 long_description = codecs.open(readme_file).read() + '\n\n' + \
100 codecs.open(changelog_file).read()
100 codecs.open(changelog_file).read()
101 except IOError as err:
101 except IOError as err:
102 sys.stderr.write(
102 sys.stderr.write(
103 "[WARNING] Cannot find file specified as long_description (%s)\n "
103 "[WARNING] Cannot find file specified as long_description (%s)\n "
104 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
104 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
105 long_description = description
105 long_description = description
106
106
107
107
108 setup(
108 setup(
109 name='rhodecode-vcsserver',
109 name='rhodecode-vcsserver',
110 version=get_version(),
110 version=get_version(),
111 description=description,
111 description=description,
112 long_description=long_description,
112 long_description=long_description,
113 keywords=keywords,
113 keywords=keywords,
114 license=__license__,
114 license=__license__,
115 author=__author__,
115 author=__author__,
116 author_email='admin@rhodecode.com',
116 author_email='admin@rhodecode.com',
117 url=__url__,
117 url=__url__,
118 setup_requires=setup_requirements,
118 setup_requires=setup_requirements,
119 install_requires=install_requirements,
119 install_requires=install_requirements,
120 tests_require=test_requirements,
120 tests_require=test_requirements,
121 zip_safe=False,
121 zip_safe=False,
122 packages=find_packages(exclude=["docs", "tests*"]),
122 packages=find_packages(exclude=["docs", "tests*"]),
123 package_data=package_data,
123 package_data=package_data,
124 include_package_data=True,
124 include_package_data=True,
125 classifiers=[
125 classifiers=[
126 'Development Status :: 6 - Mature',
126 'Development Status :: 6 - Mature',
127 'Intended Audience :: Developers',
127 'Intended Audience :: Developers',
128 'Operating System :: OS Independent',
128 'Operating System :: OS Independent',
129 'Topic :: Software Development :: Version Control',
129 'Topic :: Software Development :: Version Control',
130 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
130 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
131 'Programming Language :: Python :: 2.7',
131 'Programming Language :: Python :: 2.7',
132 ],
132 ],
133 entry_points={
133 entry_points={
134 'console_scripts': [
135 'vcsserver=vcsserver.main:main',
136 ],
137 'paste.app_factory': ['main=vcsserver.http_main:main']
134 'paste.app_factory': ['main=vcsserver.http_main:main']
138 },
135 },
139 )
136 )
@@ -1,67 +1,66 b''
1 # This file contains the adjustments which are desired for a development
1 # This file contains the adjustments which are desired for a development
2 # environment.
2 # environment.
3
3
4 { pkgs ? (import <nixpkgs> {})
4 { pkgs ? (import <nixpkgs> {})
5 , pythonPackages ? "python27Packages"
5 , pythonPackages ? "python27Packages"
6 , doCheck ? false
6 , doCheck ? false
7 }:
7 }:
8
8
9 let
9 let
10
10
11 vcsserver = import ./default.nix {
11 vcsserver = import ./default.nix {
12 inherit
12 inherit
13 pkgs
14 doCheck;
13 doCheck;
15 };
14 };
16
15
17 vcs-pythonPackages = vcsserver.pythonPackages;
16 vcs-pythonPackages = vcsserver.pythonPackages;
18
17
19 in vcsserver.override (attrs: {
18 in vcsserver.override (attrs: {
20 # Avoid that we dump any sources into the store when entering the shell and
19 # Avoid that we dump any sources into the store when entering the shell and
21 # make development a little bit more convenient.
20 # make development a little bit more convenient.
22 src = null;
21 src = null;
23
22
24 # Add dependencies which are useful for the development environment.
23 # Add dependencies which are useful for the development environment.
25 buildInputs =
24 buildInputs =
26 attrs.buildInputs ++
25 attrs.buildInputs ++
27 (with vcs-pythonPackages; [
26 (with vcs-pythonPackages; [
28 ipdb
27 ipdb
29 ]);
28 ]);
30
29
31 # place to inject some required libs from develop installs
30 # place to inject some required libs from develop installs
32 propagatedBuildInputs =
31 propagatedBuildInputs =
33 attrs.propagatedBuildInputs ++
32 attrs.propagatedBuildInputs ++
34 [];
33 [];
35
34
36
35
37 # Make sure we execute both hooks
36 # Make sure we execute both hooks
38 shellHook = ''
37 shellHook = ''
39 runHook preShellHook
38 runHook preShellHook
40 runHook postShellHook
39 runHook postShellHook
41 '';
40 '';
42
41
43 preShellHook = ''
42 preShellHook = ''
44 echo "Entering VCS-Shell"
43 echo "Entering VCS-Shell"
45
44
46 # Custom prompt to distinguish from other dev envs.
45 # Custom prompt to distinguish from other dev envs.
47 export PS1="\n\[\033[1;32m\][VCS-shell:\w]$\[\033[0m\] "
46 export PS1="\n\[\033[1;32m\][VCS-shell:\w]$\[\033[0m\] "
48
47
49 # Set locale
48 # Set locale
50 export LC_ALL="en_US.UTF-8"
49 export LC_ALL="en_US.UTF-8"
51
50
52 # Setup a temporary directory.
51 # Setup a temporary directory.
53 tmp_path=$(mktemp -d)
52 tmp_path=$(mktemp -d)
54 export PATH="$tmp_path/bin:$PATH"
53 export PATH="$tmp_path/bin:$PATH"
55 export PYTHONPATH="$tmp_path/${vcs-pythonPackages.python.sitePackages}:$PYTHONPATH"
54 export PYTHONPATH="$tmp_path/${vcs-pythonPackages.python.sitePackages}:$PYTHONPATH"
56 mkdir -p $tmp_path/${vcs-pythonPackages.python.sitePackages}
55 mkdir -p $tmp_path/${vcs-pythonPackages.python.sitePackages}
57
56
58 # Develop installation
57 # Develop installation
59 echo "[BEGIN]: develop install of rhodecode-vcsserver"
58 echo "[BEGIN]: develop install of rhodecode-vcsserver"
60 python setup.py develop --prefix $tmp_path --allow-hosts ""
59 python setup.py develop --prefix $tmp_path --allow-hosts ""
61 '';
60 '';
62
61
63 postShellHook = ''
62 postShellHook = ''
64
63
65 '';
64 '';
66
65
67 })
66 })
@@ -1,1 +1,1 b''
1 4.13.3 No newline at end of file
1 4.14.0 No newline at end of file
@@ -1,675 +1,728 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import collections
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import traceback
23 import traceback
24 import urllib
24 import urllib
25 import urllib2
25 import urllib2
26 from functools import wraps
26 from functools import wraps
27
27
28 from dulwich import index, objects
28 from dulwich import index, objects
29 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.client import HttpGitClient, LocalGitClient
30 from dulwich.errors import (
30 from dulwich.errors import (
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 MissingCommitError, ObjectMissing, HangupException,
32 MissingCommitError, ObjectMissing, HangupException,
33 UnexpectedCommandError)
33 UnexpectedCommandError)
34 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.repo import Repo as DulwichRepo, Tag
35 from dulwich.server import update_server_info
35 from dulwich.server import update_server_info
36
36
37 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver import exceptions, settings, subprocessio
38 from vcsserver.utils import safe_str
38 from vcsserver.utils import safe_str
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 from vcsserver.hgcompat import (
40 from vcsserver.hgcompat import (
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 from vcsserver.git_lfs.lib import LFSOidStore
42 from vcsserver.git_lfs.lib import LFSOidStore
43
43
44 DIR_STAT = stat.S_IFDIR
44 DIR_STAT = stat.S_IFDIR
45 FILE_MODE = stat.S_IFMT
45 FILE_MODE = stat.S_IFMT
46 GIT_LINK = objects.S_IFGITLINK
46 GIT_LINK = objects.S_IFGITLINK
47
47
48 log = logging.getLogger(__name__)
48 log = logging.getLogger(__name__)
49
49
50
50
51 def reraise_safe_exceptions(func):
51 def reraise_safe_exceptions(func):
52 """Converts Dulwich exceptions to something neutral."""
52 """Converts Dulwich exceptions to something neutral."""
53 @wraps(func)
53 @wraps(func)
54 def wrapper(*args, **kwargs):
54 def wrapper(*args, **kwargs):
55 try:
55 try:
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 ObjectMissing) as e:
58 ObjectMissing) as e:
59 raise exceptions.LookupException(e)(e.message)
59 exc = exceptions.LookupException(e)
60 raise exc(e)
60 except (HangupException, UnexpectedCommandError) as e:
61 except (HangupException, UnexpectedCommandError) as e:
61 raise exceptions.VcsException(e)(e.message)
62 exc = exceptions.VcsException(e)
63 raise exc(e)
62 except Exception as e:
64 except Exception as e:
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
65 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 # (KeyError on empty repos), we cannot track this and catch all
66 # (KeyError on empty repos), we cannot track this and catch all
65 # exceptions, it's an exceptions from other handlers
67 # exceptions, it's an exceptions from other handlers
66 #if not hasattr(e, '_vcs_kind'):
68 #if not hasattr(e, '_vcs_kind'):
67 #log.exception("Unhandled exception in git remote call")
69 #log.exception("Unhandled exception in git remote call")
68 #raise_from_original(exceptions.UnhandledException)
70 #raise_from_original(exceptions.UnhandledException)
69 raise
71 raise
70 return wrapper
72 return wrapper
71
73
72
74
73 class Repo(DulwichRepo):
75 class Repo(DulwichRepo):
74 """
76 """
75 A wrapper for dulwich Repo class.
77 A wrapper for dulwich Repo class.
76
78
77 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
79 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 "Too many open files" error. We need to close all opened file descriptors
80 "Too many open files" error. We need to close all opened file descriptors
79 once the repo object is destroyed.
81 once the repo object is destroyed.
80
82
81 TODO: mikhail: please check if we need this wrapper after updating dulwich
83 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 to 0.12.0 +
84 to 0.12.0 +
83 """
85 """
84 def __del__(self):
86 def __del__(self):
85 if hasattr(self, 'object_store'):
87 if hasattr(self, 'object_store'):
86 self.close()
88 self.close()
87
89
88
90
89 class GitFactory(RepoFactory):
91 class GitFactory(RepoFactory):
90 repo_type = 'git'
92 repo_type = 'git'
91
93
92 def _create_repo(self, wire, create):
94 def _create_repo(self, wire, create):
93 repo_path = str_to_dulwich(wire['path'])
95 repo_path = str_to_dulwich(wire['path'])
94 return Repo(repo_path)
96 return Repo(repo_path)
95
97
96
98
97 class GitRemote(object):
99 class GitRemote(object):
98
100
99 def __init__(self, factory):
101 def __init__(self, factory):
100 self._factory = factory
102 self._factory = factory
101
103 self.peeled_ref_marker = '^{}'
102 self._bulk_methods = {
104 self._bulk_methods = {
103 "author": self.commit_attribute,
105 "author": self.commit_attribute,
104 "date": self.get_object_attrs,
106 "date": self.get_object_attrs,
105 "message": self.commit_attribute,
107 "message": self.commit_attribute,
106 "parents": self.commit_attribute,
108 "parents": self.commit_attribute,
107 "_commit": self.revision,
109 "_commit": self.revision,
108 }
110 }
109
111
110 def _wire_to_config(self, wire):
112 def _wire_to_config(self, wire):
111 if 'config' in wire:
113 if 'config' in wire:
112 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
114 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
113 return {}
115 return {}
114
116
115 def _assign_ref(self, wire, ref, commit_id):
117 def _assign_ref(self, wire, ref, commit_id):
116 repo = self._factory.repo(wire)
118 repo = self._factory.repo(wire)
117 repo[ref] = commit_id
119 repo[ref] = commit_id
118
120
119 @reraise_safe_exceptions
121 @reraise_safe_exceptions
120 def add_object(self, wire, content):
122 def add_object(self, wire, content):
121 repo = self._factory.repo(wire)
123 repo = self._factory.repo(wire)
122 blob = objects.Blob()
124 blob = objects.Blob()
123 blob.set_raw_string(content)
125 blob.set_raw_string(content)
124 repo.object_store.add_object(blob)
126 repo.object_store.add_object(blob)
125 return blob.id
127 return blob.id
126
128
127 @reraise_safe_exceptions
129 @reraise_safe_exceptions
128 def assert_correct_path(self, wire):
130 def assert_correct_path(self, wire):
129 path = wire.get('path')
131 path = wire.get('path')
130 try:
132 try:
131 self._factory.repo(wire)
133 self._factory.repo(wire)
132 except NotGitRepository as e:
134 except NotGitRepository as e:
133 tb = traceback.format_exc()
135 tb = traceback.format_exc()
134 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
136 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
135 return False
137 return False
136
138
137 return True
139 return True
138
140
139 @reraise_safe_exceptions
141 @reraise_safe_exceptions
140 def bare(self, wire):
142 def bare(self, wire):
141 repo = self._factory.repo(wire)
143 repo = self._factory.repo(wire)
142 return repo.bare
144 return repo.bare
143
145
144 @reraise_safe_exceptions
146 @reraise_safe_exceptions
145 def blob_as_pretty_string(self, wire, sha):
147 def blob_as_pretty_string(self, wire, sha):
146 repo = self._factory.repo(wire)
148 repo = self._factory.repo(wire)
147 return repo[sha].as_pretty_string()
149 return repo[sha].as_pretty_string()
148
150
149 @reraise_safe_exceptions
151 @reraise_safe_exceptions
150 def blob_raw_length(self, wire, sha):
152 def blob_raw_length(self, wire, sha):
151 repo = self._factory.repo(wire)
153 repo = self._factory.repo(wire)
152 blob = repo[sha]
154 blob = repo[sha]
153 return blob.raw_length()
155 return blob.raw_length()
154
156
155 def _parse_lfs_pointer(self, raw_content):
157 def _parse_lfs_pointer(self, raw_content):
156
158
157 spec_string = 'version https://git-lfs.github.com/spec'
159 spec_string = 'version https://git-lfs.github.com/spec'
158 if raw_content and raw_content.startswith(spec_string):
160 if raw_content and raw_content.startswith(spec_string):
159 pattern = re.compile(r"""
161 pattern = re.compile(r"""
160 (?:\n)?
162 (?:\n)?
161 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
163 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
162 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
164 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
163 ^size[ ](?P<oid_size>[0-9]+)\n
165 ^size[ ](?P<oid_size>[0-9]+)\n
164 (?:\n)?
166 (?:\n)?
165 """, re.VERBOSE | re.MULTILINE)
167 """, re.VERBOSE | re.MULTILINE)
166 match = pattern.match(raw_content)
168 match = pattern.match(raw_content)
167 if match:
169 if match:
168 return match.groupdict()
170 return match.groupdict()
169
171
170 return {}
172 return {}
171
173
172 @reraise_safe_exceptions
174 @reraise_safe_exceptions
173 def is_large_file(self, wire, sha):
175 def is_large_file(self, wire, sha):
174 repo = self._factory.repo(wire)
176 repo = self._factory.repo(wire)
175 blob = repo[sha]
177 blob = repo[sha]
176 return self._parse_lfs_pointer(blob.as_raw_string())
178 return self._parse_lfs_pointer(blob.as_raw_string())
177
179
178 @reraise_safe_exceptions
180 @reraise_safe_exceptions
179 def in_largefiles_store(self, wire, oid):
181 def in_largefiles_store(self, wire, oid):
180 repo = self._factory.repo(wire)
182 repo = self._factory.repo(wire)
181 conf = self._wire_to_config(wire)
183 conf = self._wire_to_config(wire)
182
184
183 store_location = conf.get('vcs_git_lfs_store_location')
185 store_location = conf.get('vcs_git_lfs_store_location')
184 if store_location:
186 if store_location:
185 repo_name = repo.path
187 repo_name = repo.path
186 store = LFSOidStore(
188 store = LFSOidStore(
187 oid=oid, repo=repo_name, store_location=store_location)
189 oid=oid, repo=repo_name, store_location=store_location)
188 return store.has_oid()
190 return store.has_oid()
189
191
190 return False
192 return False
191
193
192 @reraise_safe_exceptions
194 @reraise_safe_exceptions
193 def store_path(self, wire, oid):
195 def store_path(self, wire, oid):
194 repo = self._factory.repo(wire)
196 repo = self._factory.repo(wire)
195 conf = self._wire_to_config(wire)
197 conf = self._wire_to_config(wire)
196
198
197 store_location = conf.get('vcs_git_lfs_store_location')
199 store_location = conf.get('vcs_git_lfs_store_location')
198 if store_location:
200 if store_location:
199 repo_name = repo.path
201 repo_name = repo.path
200 store = LFSOidStore(
202 store = LFSOidStore(
201 oid=oid, repo=repo_name, store_location=store_location)
203 oid=oid, repo=repo_name, store_location=store_location)
202 return store.oid_path
204 return store.oid_path
203 raise ValueError('Unable to fetch oid with path {}'.format(oid))
205 raise ValueError('Unable to fetch oid with path {}'.format(oid))
204
206
205 @reraise_safe_exceptions
207 @reraise_safe_exceptions
206 def bulk_request(self, wire, rev, pre_load):
208 def bulk_request(self, wire, rev, pre_load):
207 result = {}
209 result = {}
208 for attr in pre_load:
210 for attr in pre_load:
209 try:
211 try:
210 method = self._bulk_methods[attr]
212 method = self._bulk_methods[attr]
211 args = [wire, rev]
213 args = [wire, rev]
212 if attr == "date":
214 if attr == "date":
213 args.extend(["commit_time", "commit_timezone"])
215 args.extend(["commit_time", "commit_timezone"])
214 elif attr in ["author", "message", "parents"]:
216 elif attr in ["author", "message", "parents"]:
215 args.append(attr)
217 args.append(attr)
216 result[attr] = method(*args)
218 result[attr] = method(*args)
217 except KeyError as e:
219 except KeyError as e:
218 raise exceptions.VcsException(e)(
220 raise exceptions.VcsException(e)(
219 "Unknown bulk attribute: %s" % attr)
221 "Unknown bulk attribute: %s" % attr)
220 return result
222 return result
221
223
222 def _build_opener(self, url):
224 def _build_opener(self, url):
223 handlers = []
225 handlers = []
224 url_obj = url_parser(url)
226 url_obj = url_parser(url)
225 _, authinfo = url_obj.authinfo()
227 _, authinfo = url_obj.authinfo()
226
228
227 if authinfo:
229 if authinfo:
228 # create a password manager
230 # create a password manager
229 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
231 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
230 passmgr.add_password(*authinfo)
232 passmgr.add_password(*authinfo)
231
233
232 handlers.extend((httpbasicauthhandler(passmgr),
234 handlers.extend((httpbasicauthhandler(passmgr),
233 httpdigestauthhandler(passmgr)))
235 httpdigestauthhandler(passmgr)))
234
236
235 return urllib2.build_opener(*handlers)
237 return urllib2.build_opener(*handlers)
236
238
237 @reraise_safe_exceptions
239 @reraise_safe_exceptions
238 def check_url(self, url, config):
240 def check_url(self, url, config):
239 url_obj = url_parser(url)
241 url_obj = url_parser(url)
240 test_uri, _ = url_obj.authinfo()
242 test_uri, _ = url_obj.authinfo()
241 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
243 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
242 url_obj.query = obfuscate_qs(url_obj.query)
244 url_obj.query = obfuscate_qs(url_obj.query)
243 cleaned_uri = str(url_obj)
245 cleaned_uri = str(url_obj)
244 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
246 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
245
247
246 if not test_uri.endswith('info/refs'):
248 if not test_uri.endswith('info/refs'):
247 test_uri = test_uri.rstrip('/') + '/info/refs'
249 test_uri = test_uri.rstrip('/') + '/info/refs'
248
250
249 o = self._build_opener(url)
251 o = self._build_opener(url)
250 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
252 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
251
253
252 q = {"service": 'git-upload-pack'}
254 q = {"service": 'git-upload-pack'}
253 qs = '?%s' % urllib.urlencode(q)
255 qs = '?%s' % urllib.urlencode(q)
254 cu = "%s%s" % (test_uri, qs)
256 cu = "%s%s" % (test_uri, qs)
255 req = urllib2.Request(cu, None, {})
257 req = urllib2.Request(cu, None, {})
256
258
257 try:
259 try:
258 log.debug("Trying to open URL %s", cleaned_uri)
260 log.debug("Trying to open URL %s", cleaned_uri)
259 resp = o.open(req)
261 resp = o.open(req)
260 if resp.code != 200:
262 if resp.code != 200:
261 raise exceptions.URLError()('Return Code is not 200')
263 raise exceptions.URLError()('Return Code is not 200')
262 except Exception as e:
264 except Exception as e:
263 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
265 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
264 # means it cannot be cloned
266 # means it cannot be cloned
265 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
267 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
266
268
267 # now detect if it's proper git repo
269 # now detect if it's proper git repo
268 gitdata = resp.read()
270 gitdata = resp.read()
269 if 'service=git-upload-pack' in gitdata:
271 if 'service=git-upload-pack' in gitdata:
270 pass
272 pass
271 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
273 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
272 # old style git can return some other format !
274 # old style git can return some other format !
273 pass
275 pass
274 else:
276 else:
275 raise exceptions.URLError()(
277 raise exceptions.URLError()(
276 "url [%s] does not look like an git" % (cleaned_uri,))
278 "url [%s] does not look like an git" % (cleaned_uri,))
277
279
278 return True
280 return True
279
281
280 @reraise_safe_exceptions
282 @reraise_safe_exceptions
281 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
283 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
282 remote_refs = self.fetch(wire, url, apply_refs=False)
284 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
285 remote_refs = self.pull(wire, url, apply_refs=False)
283 repo = self._factory.repo(wire)
286 repo = self._factory.repo(wire)
284 if isinstance(valid_refs, list):
287 if isinstance(valid_refs, list):
285 valid_refs = tuple(valid_refs)
288 valid_refs = tuple(valid_refs)
286
289
287 for k in remote_refs:
290 for k in remote_refs:
288 # only parse heads/tags and skip so called deferred tags
291 # only parse heads/tags and skip so called deferred tags
289 if k.startswith(valid_refs) and not k.endswith(deferred):
292 if k.startswith(valid_refs) and not k.endswith(deferred):
290 repo[k] = remote_refs[k]
293 repo[k] = remote_refs[k]
291
294
292 if update_after_clone:
295 if update_after_clone:
293 # we want to checkout HEAD
296 # we want to checkout HEAD
294 repo["HEAD"] = remote_refs["HEAD"]
297 repo["HEAD"] = remote_refs["HEAD"]
295 index.build_index_from_tree(repo.path, repo.index_path(),
298 index.build_index_from_tree(repo.path, repo.index_path(),
296 repo.object_store, repo["HEAD"].tree)
299 repo.object_store, repo["HEAD"].tree)
297
300
298 # TODO: this is quite complex, check if that can be simplified
301 # TODO: this is quite complex, check if that can be simplified
299 @reraise_safe_exceptions
302 @reraise_safe_exceptions
300 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
303 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
301 repo = self._factory.repo(wire)
304 repo = self._factory.repo(wire)
302 object_store = repo.object_store
305 object_store = repo.object_store
303
306
304 # Create tree and populates it with blobs
307 # Create tree and populates it with blobs
305 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
308 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
306
309
307 for node in updated:
310 for node in updated:
308 # Compute subdirs if needed
311 # Compute subdirs if needed
309 dirpath, nodename = vcspath.split(node['path'])
312 dirpath, nodename = vcspath.split(node['path'])
310 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
313 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
311 parent = commit_tree
314 parent = commit_tree
312 ancestors = [('', parent)]
315 ancestors = [('', parent)]
313
316
314 # Tries to dig for the deepest existing tree
317 # Tries to dig for the deepest existing tree
315 while dirnames:
318 while dirnames:
316 curdir = dirnames.pop(0)
319 curdir = dirnames.pop(0)
317 try:
320 try:
318 dir_id = parent[curdir][1]
321 dir_id = parent[curdir][1]
319 except KeyError:
322 except KeyError:
320 # put curdir back into dirnames and stops
323 # put curdir back into dirnames and stops
321 dirnames.insert(0, curdir)
324 dirnames.insert(0, curdir)
322 break
325 break
323 else:
326 else:
324 # If found, updates parent
327 # If found, updates parent
325 parent = repo[dir_id]
328 parent = repo[dir_id]
326 ancestors.append((curdir, parent))
329 ancestors.append((curdir, parent))
327 # Now parent is deepest existing tree and we need to create
330 # Now parent is deepest existing tree and we need to create
328 # subtrees for dirnames (in reverse order)
331 # subtrees for dirnames (in reverse order)
329 # [this only applies for nodes from added]
332 # [this only applies for nodes from added]
330 new_trees = []
333 new_trees = []
331
334
332 blob = objects.Blob.from_string(node['content'])
335 blob = objects.Blob.from_string(node['content'])
333
336
334 if dirnames:
337 if dirnames:
335 # If there are trees which should be created we need to build
338 # If there are trees which should be created we need to build
336 # them now (in reverse order)
339 # them now (in reverse order)
337 reversed_dirnames = list(reversed(dirnames))
340 reversed_dirnames = list(reversed(dirnames))
338 curtree = objects.Tree()
341 curtree = objects.Tree()
339 curtree[node['node_path']] = node['mode'], blob.id
342 curtree[node['node_path']] = node['mode'], blob.id
340 new_trees.append(curtree)
343 new_trees.append(curtree)
341 for dirname in reversed_dirnames[:-1]:
344 for dirname in reversed_dirnames[:-1]:
342 newtree = objects.Tree()
345 newtree = objects.Tree()
343 newtree[dirname] = (DIR_STAT, curtree.id)
346 newtree[dirname] = (DIR_STAT, curtree.id)
344 new_trees.append(newtree)
347 new_trees.append(newtree)
345 curtree = newtree
348 curtree = newtree
346 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
349 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
347 else:
350 else:
348 parent.add(
351 parent.add(
349 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
352 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
350
353
351 new_trees.append(parent)
354 new_trees.append(parent)
352 # Update ancestors
355 # Update ancestors
353 reversed_ancestors = reversed(
356 reversed_ancestors = reversed(
354 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
357 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
355 for parent, tree, path in reversed_ancestors:
358 for parent, tree, path in reversed_ancestors:
356 parent[path] = (DIR_STAT, tree.id)
359 parent[path] = (DIR_STAT, tree.id)
357 object_store.add_object(tree)
360 object_store.add_object(tree)
358
361
359 object_store.add_object(blob)
362 object_store.add_object(blob)
360 for tree in new_trees:
363 for tree in new_trees:
361 object_store.add_object(tree)
364 object_store.add_object(tree)
362
365
363 for node_path in removed:
366 for node_path in removed:
364 paths = node_path.split('/')
367 paths = node_path.split('/')
365 tree = commit_tree
368 tree = commit_tree
366 trees = [tree]
369 trees = [tree]
367 # Traverse deep into the forest...
370 # Traverse deep into the forest...
368 for path in paths:
371 for path in paths:
369 try:
372 try:
370 obj = repo[tree[path][1]]
373 obj = repo[tree[path][1]]
371 if isinstance(obj, objects.Tree):
374 if isinstance(obj, objects.Tree):
372 trees.append(obj)
375 trees.append(obj)
373 tree = obj
376 tree = obj
374 except KeyError:
377 except KeyError:
375 break
378 break
376 # Cut down the blob and all rotten trees on the way back...
379 # Cut down the blob and all rotten trees on the way back...
377 for path, tree in reversed(zip(paths, trees)):
380 for path, tree in reversed(zip(paths, trees)):
378 del tree[path]
381 del tree[path]
379 if tree:
382 if tree:
380 # This tree still has elements - don't remove it or any
383 # This tree still has elements - don't remove it or any
381 # of it's parents
384 # of it's parents
382 break
385 break
383
386
384 object_store.add_object(commit_tree)
387 object_store.add_object(commit_tree)
385
388
386 # Create commit
389 # Create commit
387 commit = objects.Commit()
390 commit = objects.Commit()
388 commit.tree = commit_tree.id
391 commit.tree = commit_tree.id
389 for k, v in commit_data.iteritems():
392 for k, v in commit_data.iteritems():
390 setattr(commit, k, v)
393 setattr(commit, k, v)
391 object_store.add_object(commit)
394 object_store.add_object(commit)
392
395
393 ref = 'refs/heads/%s' % branch
396 ref = 'refs/heads/%s' % branch
394 repo.refs[ref] = commit.id
397 repo.refs[ref] = commit.id
395
398
396 return commit.id
399 return commit.id
397
400
398 @reraise_safe_exceptions
401 @reraise_safe_exceptions
399 def fetch(self, wire, url, apply_refs=True, refs=None):
402 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
400 if url != 'default' and '://' not in url:
403 if url != 'default' and '://' not in url:
401 client = LocalGitClient(url)
404 client = LocalGitClient(url)
402 else:
405 else:
403 url_obj = url_parser(url)
406 url_obj = url_parser(url)
404 o = self._build_opener(url)
407 o = self._build_opener(url)
405 url, _ = url_obj.authinfo()
408 url, _ = url_obj.authinfo()
406 client = HttpGitClient(base_url=url, opener=o)
409 client = HttpGitClient(base_url=url, opener=o)
407 repo = self._factory.repo(wire)
410 repo = self._factory.repo(wire)
408
411
409 determine_wants = repo.object_store.determine_wants_all
412 determine_wants = repo.object_store.determine_wants_all
410 if refs:
413 if refs:
411 def determine_wants_requested(references):
414 def determine_wants_requested(references):
412 return [references[r] for r in references if r in refs]
415 return [references[r] for r in references if r in refs]
413 determine_wants = determine_wants_requested
416 determine_wants = determine_wants_requested
414
417
415 try:
418 try:
416 remote_refs = client.fetch(
419 remote_refs = client.fetch(
417 path=url, target=repo, determine_wants=determine_wants)
420 path=url, target=repo, determine_wants=determine_wants)
418 except NotGitRepository as e:
421 except NotGitRepository as e:
419 log.warning(
422 log.warning(
420 'Trying to fetch from "%s" failed, not a Git repository.', url)
423 'Trying to fetch from "%s" failed, not a Git repository.', url)
421 # Exception can contain unicode which we convert
424 # Exception can contain unicode which we convert
422 raise exceptions.AbortException(e)(repr(e))
425 raise exceptions.AbortException(e)(repr(e))
423
426
424 # mikhail: client.fetch() returns all the remote refs, but fetches only
427 # mikhail: client.fetch() returns all the remote refs, but fetches only
425 # refs filtered by `determine_wants` function. We need to filter result
428 # refs filtered by `determine_wants` function. We need to filter result
426 # as well
429 # as well
427 if refs:
430 if refs:
428 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
431 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
429
432
430 if apply_refs:
433 if apply_refs:
431 # TODO: johbo: Needs proper test coverage with a git repository
434 # TODO: johbo: Needs proper test coverage with a git repository
432 # that contains a tag object, so that we would end up with
435 # that contains a tag object, so that we would end up with
433 # a peeled ref at this point.
436 # a peeled ref at this point.
434 PEELED_REF_MARKER = '^{}'
435 for k in remote_refs:
437 for k in remote_refs:
436 if k.endswith(PEELED_REF_MARKER):
438 if k.endswith(self.peeled_ref_marker):
437 log.info("Skipping peeled reference %s", k)
439 log.debug("Skipping peeled reference %s", k)
438 continue
440 continue
439 repo[k] = remote_refs[k]
441 repo[k] = remote_refs[k]
440
442
441 if refs:
443 if refs and not update_after:
442 # mikhail: explicitly set the head to the last ref.
444 # mikhail: explicitly set the head to the last ref.
443 repo['HEAD'] = remote_refs[refs[-1]]
445 repo['HEAD'] = remote_refs[refs[-1]]
444
446
445 # TODO: mikhail: should we return remote_refs here to be
447 if update_after:
446 # consistent?
448 # we want to checkout HEAD
447 else:
449 repo["HEAD"] = remote_refs["HEAD"]
450 index.build_index_from_tree(repo.path, repo.index_path(),
451 repo.object_store, repo["HEAD"].tree)
452 return remote_refs
453
454 @reraise_safe_exceptions
455 def sync_fetch(self, wire, url, refs=None):
456 repo = self._factory.repo(wire)
457 if refs and not isinstance(refs, (list, tuple)):
458 refs = [refs]
459
460 # get all remote refs we'll use to fetch later
461 output, __ = self.run_git_command(
462 wire, ['ls-remote', url], fail_on_stderr=False,
463 _copts=['-c', 'core.askpass=""'],
464 extra_env={'GIT_TERMINAL_PROMPT': '0'})
465
466 remote_refs = collections.OrderedDict()
467 fetch_refs = []
468
469 for ref_line in output.splitlines():
470 sha, ref = ref_line.split('\t')
471 sha = sha.strip()
472 if ref in remote_refs:
473 # duplicate, skip
474 continue
475 if ref.endswith(self.peeled_ref_marker):
476 log.debug("Skipping peeled reference %s", ref)
477 continue
478 # don't sync HEAD
479 if ref in ['HEAD']:
480 continue
481
482 remote_refs[ref] = sha
483
484 if refs and sha in refs:
485 # we filter fetch using our specified refs
486 fetch_refs.append('{}:{}'.format(ref, ref))
487 elif not refs:
488 fetch_refs.append('{}:{}'.format(ref, ref))
489
490 if fetch_refs:
491 _out, _err = self.run_git_command(
492 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs,
493 fail_on_stderr=False,
494 _copts=['-c', 'core.askpass=""'],
495 extra_env={'GIT_TERMINAL_PROMPT': '0'})
496
448 return remote_refs
497 return remote_refs
449
498
450 @reraise_safe_exceptions
499 @reraise_safe_exceptions
451 def sync_push(self, wire, url, refs=None):
500 def sync_push(self, wire, url, refs=None):
452 if self.check_url(url, wire):
501 if not self.check_url(url, wire):
502 return
503
453 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
454 self.run_git_command(
505 self.run_git_command(
455 wire, ['push', url, '--mirror'], fail_on_stderr=False,
506 wire, ['push', url, '--mirror'], fail_on_stderr=False,
456 _copts=['-c', 'core.askpass=""'],
507 _copts=['-c', 'core.askpass=""'],
457 extra_env={'GIT_TERMINAL_PROMPT': '0'})
508 extra_env={'GIT_TERMINAL_PROMPT': '0'})
458
509
459 @reraise_safe_exceptions
510 @reraise_safe_exceptions
460 def get_remote_refs(self, wire, url):
511 def get_remote_refs(self, wire, url):
461 repo = Repo(url)
512 repo = Repo(url)
462 return repo.get_refs()
513 return repo.get_refs()
463
514
464 @reraise_safe_exceptions
515 @reraise_safe_exceptions
465 def get_description(self, wire):
516 def get_description(self, wire):
466 repo = self._factory.repo(wire)
517 repo = self._factory.repo(wire)
467 return repo.get_description()
518 return repo.get_description()
468
519
469 @reraise_safe_exceptions
520 @reraise_safe_exceptions
470 def get_file_history(self, wire, file_path, commit_id, limit):
521 def get_file_history(self, wire, file_path, commit_id, limit):
471 repo = self._factory.repo(wire)
522 repo = self._factory.repo(wire)
472 include = [commit_id]
523 include = [commit_id]
473 paths = [file_path]
524 paths = [file_path]
474
525
475 walker = repo.get_walker(include, paths=paths, max_entries=limit)
526 walker = repo.get_walker(include, paths=paths, max_entries=limit)
476 return [x.commit.id for x in walker]
527 return [x.commit.id for x in walker]
477
528
478 @reraise_safe_exceptions
529 @reraise_safe_exceptions
479 def get_missing_revs(self, wire, rev1, rev2, path2):
530 def get_missing_revs(self, wire, rev1, rev2, path2):
480 repo = self._factory.repo(wire)
531 repo = self._factory.repo(wire)
481 LocalGitClient(thin_packs=False).fetch(path2, repo)
532 LocalGitClient(thin_packs=False).fetch(path2, repo)
482
533
483 wire_remote = wire.copy()
534 wire_remote = wire.copy()
484 wire_remote['path'] = path2
535 wire_remote['path'] = path2
485 repo_remote = self._factory.repo(wire_remote)
536 repo_remote = self._factory.repo(wire_remote)
486 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
537 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
487
538
488 revs = [
539 revs = [
489 x.commit.id
540 x.commit.id
490 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
541 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
491 return revs
542 return revs
492
543
493 @reraise_safe_exceptions
544 @reraise_safe_exceptions
494 def get_object(self, wire, sha):
545 def get_object(self, wire, sha):
495 repo = self._factory.repo(wire)
546 repo = self._factory.repo(wire)
496 obj = repo.get_object(sha)
547 obj = repo.get_object(sha)
497 commit_id = obj.id
548 commit_id = obj.id
498
549
499 if isinstance(obj, Tag):
550 if isinstance(obj, Tag):
500 commit_id = obj.object[1]
551 commit_id = obj.object[1]
501
552
502 return {
553 return {
503 'id': obj.id,
554 'id': obj.id,
504 'type': obj.type_name,
555 'type': obj.type_name,
505 'commit_id': commit_id
556 'commit_id': commit_id
506 }
557 }
507
558
508 @reraise_safe_exceptions
559 @reraise_safe_exceptions
509 def get_object_attrs(self, wire, sha, *attrs):
560 def get_object_attrs(self, wire, sha, *attrs):
510 repo = self._factory.repo(wire)
561 repo = self._factory.repo(wire)
511 obj = repo.get_object(sha)
562 obj = repo.get_object(sha)
512 return list(getattr(obj, a) for a in attrs)
563 return list(getattr(obj, a) for a in attrs)
513
564
514 @reraise_safe_exceptions
565 @reraise_safe_exceptions
515 def get_refs(self, wire):
566 def get_refs(self, wire):
516 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
517 result = {}
568 result = {}
518 for ref, sha in repo.refs.as_dict().items():
569 for ref, sha in repo.refs.as_dict().items():
519 peeled_sha = repo.get_peeled(ref)
570 peeled_sha = repo.get_peeled(ref)
520 result[ref] = peeled_sha
571 result[ref] = peeled_sha
521 return result
572 return result
522
573
523 @reraise_safe_exceptions
574 @reraise_safe_exceptions
524 def get_refs_path(self, wire):
575 def get_refs_path(self, wire):
525 repo = self._factory.repo(wire)
576 repo = self._factory.repo(wire)
526 return repo.refs.path
577 return repo.refs.path
527
578
528 @reraise_safe_exceptions
579 @reraise_safe_exceptions
529 def head(self, wire, show_exc=True):
580 def head(self, wire, show_exc=True):
530 repo = self._factory.repo(wire)
581 repo = self._factory.repo(wire)
531 try:
582 try:
532 return repo.head()
583 return repo.head()
533 except Exception:
584 except Exception:
534 if show_exc:
585 if show_exc:
535 raise
586 raise
536
587
537 @reraise_safe_exceptions
588 @reraise_safe_exceptions
538 def init(self, wire):
589 def init(self, wire):
539 repo_path = str_to_dulwich(wire['path'])
590 repo_path = str_to_dulwich(wire['path'])
540 self.repo = Repo.init(repo_path)
591 self.repo = Repo.init(repo_path)
541
592
542 @reraise_safe_exceptions
593 @reraise_safe_exceptions
543 def init_bare(self, wire):
594 def init_bare(self, wire):
544 repo_path = str_to_dulwich(wire['path'])
595 repo_path = str_to_dulwich(wire['path'])
545 self.repo = Repo.init_bare(repo_path)
596 self.repo = Repo.init_bare(repo_path)
546
597
547 @reraise_safe_exceptions
598 @reraise_safe_exceptions
548 def revision(self, wire, rev):
599 def revision(self, wire, rev):
549 repo = self._factory.repo(wire)
600 repo = self._factory.repo(wire)
550 obj = repo[rev]
601 obj = repo[rev]
551 obj_data = {
602 obj_data = {
552 'id': obj.id,
603 'id': obj.id,
553 }
604 }
554 try:
605 try:
555 obj_data['tree'] = obj.tree
606 obj_data['tree'] = obj.tree
556 except AttributeError:
607 except AttributeError:
557 pass
608 pass
558 return obj_data
609 return obj_data
559
610
560 @reraise_safe_exceptions
611 @reraise_safe_exceptions
561 def commit_attribute(self, wire, rev, attr):
612 def commit_attribute(self, wire, rev, attr):
562 repo = self._factory.repo(wire)
613 repo = self._factory.repo(wire)
563 obj = repo[rev]
614 obj = repo[rev]
564 return getattr(obj, attr)
615 return getattr(obj, attr)
565
616
566 @reraise_safe_exceptions
617 @reraise_safe_exceptions
567 def set_refs(self, wire, key, value):
618 def set_refs(self, wire, key, value):
568 repo = self._factory.repo(wire)
619 repo = self._factory.repo(wire)
569 repo.refs[key] = value
620 repo.refs[key] = value
570
621
571 @reraise_safe_exceptions
622 @reraise_safe_exceptions
572 def remove_ref(self, wire, key):
623 def remove_ref(self, wire, key):
573 repo = self._factory.repo(wire)
624 repo = self._factory.repo(wire)
574 del repo.refs[key]
625 del repo.refs[key]
575
626
576 @reraise_safe_exceptions
627 @reraise_safe_exceptions
577 def tree_changes(self, wire, source_id, target_id):
628 def tree_changes(self, wire, source_id, target_id):
578 repo = self._factory.repo(wire)
629 repo = self._factory.repo(wire)
579 source = repo[source_id].tree if source_id else None
630 source = repo[source_id].tree if source_id else None
580 target = repo[target_id].tree
631 target = repo[target_id].tree
581 result = repo.object_store.tree_changes(source, target)
632 result = repo.object_store.tree_changes(source, target)
582 return list(result)
633 return list(result)
583
634
584 @reraise_safe_exceptions
635 @reraise_safe_exceptions
585 def tree_items(self, wire, tree_id):
636 def tree_items(self, wire, tree_id):
586 repo = self._factory.repo(wire)
637 repo = self._factory.repo(wire)
587 tree = repo[tree_id]
638 tree = repo[tree_id]
588
639
589 result = []
640 result = []
590 for item in tree.iteritems():
641 for item in tree.iteritems():
591 item_sha = item.sha
642 item_sha = item.sha
592 item_mode = item.mode
643 item_mode = item.mode
593
644
594 if FILE_MODE(item_mode) == GIT_LINK:
645 if FILE_MODE(item_mode) == GIT_LINK:
595 item_type = "link"
646 item_type = "link"
596 else:
647 else:
597 item_type = repo[item_sha].type_name
648 item_type = repo[item_sha].type_name
598
649
599 result.append((item.path, item_mode, item_sha, item_type))
650 result.append((item.path, item_mode, item_sha, item_type))
600 return result
651 return result
601
652
602 @reraise_safe_exceptions
653 @reraise_safe_exceptions
603 def update_server_info(self, wire):
654 def update_server_info(self, wire):
604 repo = self._factory.repo(wire)
655 repo = self._factory.repo(wire)
605 update_server_info(repo)
656 update_server_info(repo)
606
657
607 @reraise_safe_exceptions
658 @reraise_safe_exceptions
608 def discover_git_version(self):
659 def discover_git_version(self):
609 stdout, _ = self.run_git_command(
660 stdout, _ = self.run_git_command(
610 {}, ['--version'], _bare=True, _safe=True)
661 {}, ['--version'], _bare=True, _safe=True)
611 prefix = 'git version'
662 prefix = 'git version'
612 if stdout.startswith(prefix):
663 if stdout.startswith(prefix):
613 stdout = stdout[len(prefix):]
664 stdout = stdout[len(prefix):]
614 return stdout.strip()
665 return stdout.strip()
615
666
616 @reraise_safe_exceptions
667 @reraise_safe_exceptions
617 def run_git_command(self, wire, cmd, **opts):
668 def run_git_command(self, wire, cmd, **opts):
618 path = wire.get('path', None)
669 path = wire.get('path', None)
619
670
620 if path and os.path.isdir(path):
671 if path and os.path.isdir(path):
621 opts['cwd'] = path
672 opts['cwd'] = path
622
673
623 if '_bare' in opts:
674 if '_bare' in opts:
624 _copts = []
675 _copts = []
625 del opts['_bare']
676 del opts['_bare']
626 else:
677 else:
627 _copts = ['-c', 'core.quotepath=false', ]
678 _copts = ['-c', 'core.quotepath=false', ]
628 safe_call = False
679 safe_call = False
629 if '_safe' in opts:
680 if '_safe' in opts:
630 # no exc on failure
681 # no exc on failure
631 del opts['_safe']
682 del opts['_safe']
632 safe_call = True
683 safe_call = True
633
684
634 if '_copts' in opts:
685 if '_copts' in opts:
635 _copts.extend(opts['_copts'] or [])
686 _copts.extend(opts['_copts'] or [])
636 del opts['_copts']
687 del opts['_copts']
637
688
638 gitenv = os.environ.copy()
689 gitenv = os.environ.copy()
639 gitenv.update(opts.pop('extra_env', {}))
690 gitenv.update(opts.pop('extra_env', {}))
640 # need to clean fix GIT_DIR !
691 # need to clean fix GIT_DIR !
641 if 'GIT_DIR' in gitenv:
692 if 'GIT_DIR' in gitenv:
642 del gitenv['GIT_DIR']
693 del gitenv['GIT_DIR']
643 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
694 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
644 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
695 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
645
696
646 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
697 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
698 _opts = {'env': gitenv, 'shell': False}
647
699
648 try:
700 try:
649 _opts = {'env': gitenv, 'shell': False}
650 _opts.update(opts)
701 _opts.update(opts)
651 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
702 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
652
703
653 return ''.join(p), ''.join(p.error)
704 return ''.join(p), ''.join(p.error)
654 except (EnvironmentError, OSError) as err:
705 except (EnvironmentError, OSError) as err:
655 cmd = ' '.join(cmd) # human friendly CMD
706 cmd = ' '.join(cmd) # human friendly CMD
656 tb_err = ("Couldn't run git command (%s).\n"
707 tb_err = ("Couldn't run git command (%s).\n"
657 "Original error was:%s\n" % (cmd, err))
708 "Original error was:%s\n"
709 "Call options:%s\n"
710 % (cmd, err, _opts))
658 log.exception(tb_err)
711 log.exception(tb_err)
659 if safe_call:
712 if safe_call:
660 return '', err
713 return '', err
661 else:
714 else:
662 raise exceptions.VcsException()(tb_err)
715 raise exceptions.VcsException()(tb_err)
663
716
664 @reraise_safe_exceptions
717 @reraise_safe_exceptions
665 def install_hooks(self, wire, force=False):
718 def install_hooks(self, wire, force=False):
666 from vcsserver.hook_utils import install_git_hooks
719 from vcsserver.hook_utils import install_git_hooks
667 repo = self._factory.repo(wire)
720 repo = self._factory.repo(wire)
668 return install_git_hooks(repo.path, repo.bare, force_create=force)
721 return install_git_hooks(repo.path, repo.bare, force_create=force)
669
722
670
723
671 def str_to_dulwich(value):
724 def str_to_dulwich(value):
672 """
725 """
673 Dulwich 0.10.1a requires `unicode` objects to be passed in.
726 Dulwich 0.10.1a requires `unicode` objects to be passed in.
674 """
727 """
675 return value.decode(settings.WIRE_ENCODING)
728 return value.decode(settings.WIRE_ENCODING)
@@ -1,793 +1,795 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23
23
24 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
26 from mercurial import commands
27 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
28 from mercurial import verify
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 RepoLookupError, InterventionRequired, RequirementError)
37 RepoLookupError, InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 baseui.setconfig('ui', 'paginate', 'never')
56 baseui.setconfig('ui', 'paginate', 'never')
57 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 # signal in a non-main thread, thus generating a ValueError.
58 # signal in a non-main thread, thus generating a ValueError.
59 baseui.setconfig('worker', 'numcpus', 1)
59 baseui.setconfig('worker', 'numcpus', 1)
60
60
61 # If there is no config for the largefiles extension, we explicitly disable
61 # If there is no config for the largefiles extension, we explicitly disable
62 # it here. This overrides settings from repositories hgrc file. Recent
62 # it here. This overrides settings from repositories hgrc file. Recent
63 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 # repo.
64 # repo.
65 if not baseui.hasconfig('extensions', 'largefiles'):
65 if not baseui.hasconfig('extensions', 'largefiles'):
66 log.debug('Explicitly disable largefiles extension for repo.')
66 log.debug('Explicitly disable largefiles extension for repo.')
67 baseui.setconfig('extensions', 'largefiles', '!')
67 baseui.setconfig('extensions', 'largefiles', '!')
68
68
69 return baseui
69 return baseui
70
70
71
71
72 def reraise_safe_exceptions(func):
72 def reraise_safe_exceptions(func):
73 """Decorator for converting mercurial exceptions to something neutral."""
73 """Decorator for converting mercurial exceptions to something neutral."""
74 def wrapper(*args, **kwargs):
74 def wrapper(*args, **kwargs):
75 try:
75 try:
76 return func(*args, **kwargs)
76 return func(*args, **kwargs)
77 except (Abort, InterventionRequired) as e:
77 except (Abort, InterventionRequired) as e:
78 raise_from_original(exceptions.AbortException(e))
78 raise_from_original(exceptions.AbortException(e))
79 except RepoLookupError as e:
79 except RepoLookupError as e:
80 raise_from_original(exceptions.LookupException(e))
80 raise_from_original(exceptions.LookupException(e))
81 except RequirementError as e:
81 except RequirementError as e:
82 raise_from_original(exceptions.RequirementException(e))
82 raise_from_original(exceptions.RequirementException(e))
83 except RepoError as e:
83 except RepoError as e:
84 raise_from_original(exceptions.VcsException(e))
84 raise_from_original(exceptions.VcsException(e))
85 except LookupError as e:
85 except LookupError as e:
86 raise_from_original(exceptions.LookupException(e))
86 raise_from_original(exceptions.LookupException(e))
87 except Exception as e:
87 except Exception as e:
88 if not hasattr(e, '_vcs_kind'):
88 if not hasattr(e, '_vcs_kind'):
89 log.exception("Unhandled exception in hg remote call")
89 log.exception("Unhandled exception in hg remote call")
90 raise_from_original(exceptions.UnhandledException(e))
90 raise_from_original(exceptions.UnhandledException(e))
91
91
92 raise
92 raise
93 return wrapper
93 return wrapper
94
94
95
95
96 class MercurialFactory(RepoFactory):
96 class MercurialFactory(RepoFactory):
97 repo_type = 'hg'
97 repo_type = 'hg'
98
98
99 def _create_config(self, config, hooks=True):
99 def _create_config(self, config, hooks=True):
100 if not hooks:
100 if not hooks:
101 hooks_to_clean = frozenset((
101 hooks_to_clean = frozenset((
102 'changegroup.repo_size', 'preoutgoing.pre_pull',
102 'changegroup.repo_size', 'preoutgoing.pre_pull',
103 'outgoing.pull_logger', 'prechangegroup.pre_push'))
103 'outgoing.pull_logger', 'prechangegroup.pre_push'))
104 new_config = []
104 new_config = []
105 for section, option, value in config:
105 for section, option, value in config:
106 if section == 'hooks' and option in hooks_to_clean:
106 if section == 'hooks' and option in hooks_to_clean:
107 continue
107 continue
108 new_config.append((section, option, value))
108 new_config.append((section, option, value))
109 config = new_config
109 config = new_config
110
110
111 baseui = make_ui_from_config(config)
111 baseui = make_ui_from_config(config)
112 return baseui
112 return baseui
113
113
114 def _create_repo(self, wire, create):
114 def _create_repo(self, wire, create):
115 baseui = self._create_config(wire["config"])
115 baseui = self._create_config(wire["config"])
116 return localrepository(baseui, wire["path"], create)
116 return localrepository(baseui, wire["path"], create)
117
117
118
118
119 class HgRemote(object):
119 class HgRemote(object):
120
120
121 def __init__(self, factory):
121 def __init__(self, factory):
122 self._factory = factory
122 self._factory = factory
123
123
124 self._bulk_methods = {
124 self._bulk_methods = {
125 "affected_files": self.ctx_files,
125 "affected_files": self.ctx_files,
126 "author": self.ctx_user,
126 "author": self.ctx_user,
127 "branch": self.ctx_branch,
127 "branch": self.ctx_branch,
128 "children": self.ctx_children,
128 "children": self.ctx_children,
129 "date": self.ctx_date,
129 "date": self.ctx_date,
130 "message": self.ctx_description,
130 "message": self.ctx_description,
131 "parents": self.ctx_parents,
131 "parents": self.ctx_parents,
132 "status": self.ctx_status,
132 "status": self.ctx_status,
133 "obsolete": self.ctx_obsolete,
133 "obsolete": self.ctx_obsolete,
134 "phase": self.ctx_phase,
134 "phase": self.ctx_phase,
135 "hidden": self.ctx_hidden,
135 "hidden": self.ctx_hidden,
136 "_file_paths": self.ctx_list,
136 "_file_paths": self.ctx_list,
137 }
137 }
138
138
139 @reraise_safe_exceptions
139 @reraise_safe_exceptions
140 def discover_hg_version(self):
140 def discover_hg_version(self):
141 from mercurial import util
141 from mercurial import util
142 return util.version()
142 return util.version()
143
143
144 @reraise_safe_exceptions
144 @reraise_safe_exceptions
145 def archive_repo(self, archive_path, mtime, file_info, kind):
145 def archive_repo(self, archive_path, mtime, file_info, kind):
146 if kind == "tgz":
146 if kind == "tgz":
147 archiver = archival.tarit(archive_path, mtime, "gz")
147 archiver = archival.tarit(archive_path, mtime, "gz")
148 elif kind == "tbz2":
148 elif kind == "tbz2":
149 archiver = archival.tarit(archive_path, mtime, "bz2")
149 archiver = archival.tarit(archive_path, mtime, "bz2")
150 elif kind == 'zip':
150 elif kind == 'zip':
151 archiver = archival.zipit(archive_path, mtime)
151 archiver = archival.zipit(archive_path, mtime)
152 else:
152 else:
153 raise exceptions.ArchiveException()(
153 raise exceptions.ArchiveException()(
154 'Remote does not support: "%s".' % kind)
154 'Remote does not support: "%s".' % kind)
155
155
156 for f_path, f_mode, f_is_link, f_content in file_info:
156 for f_path, f_mode, f_is_link, f_content in file_info:
157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
158 archiver.done()
158 archiver.done()
159
159
160 @reraise_safe_exceptions
160 @reraise_safe_exceptions
161 def bookmarks(self, wire):
161 def bookmarks(self, wire):
162 repo = self._factory.repo(wire)
162 repo = self._factory.repo(wire)
163 return dict(repo._bookmarks)
163 return dict(repo._bookmarks)
164
164
165 @reraise_safe_exceptions
165 @reraise_safe_exceptions
166 def branches(self, wire, normal, closed):
166 def branches(self, wire, normal, closed):
167 repo = self._factory.repo(wire)
167 repo = self._factory.repo(wire)
168 iter_branches = repo.branchmap().iterbranches()
168 iter_branches = repo.branchmap().iterbranches()
169 bt = {}
169 bt = {}
170 for branch_name, _heads, tip, is_closed in iter_branches:
170 for branch_name, _heads, tip, is_closed in iter_branches:
171 if normal and not is_closed:
171 if normal and not is_closed:
172 bt[branch_name] = tip
172 bt[branch_name] = tip
173 if closed and is_closed:
173 if closed and is_closed:
174 bt[branch_name] = tip
174 bt[branch_name] = tip
175
175
176 return bt
176 return bt
177
177
178 @reraise_safe_exceptions
178 @reraise_safe_exceptions
179 def bulk_request(self, wire, rev, pre_load):
179 def bulk_request(self, wire, rev, pre_load):
180 result = {}
180 result = {}
181 for attr in pre_load:
181 for attr in pre_load:
182 try:
182 try:
183 method = self._bulk_methods[attr]
183 method = self._bulk_methods[attr]
184 result[attr] = method(wire, rev)
184 result[attr] = method(wire, rev)
185 except KeyError as e:
185 except KeyError as e:
186 raise exceptions.VcsException(e)(
186 raise exceptions.VcsException(e)(
187 'Unknown bulk attribute: "%s"' % attr)
187 'Unknown bulk attribute: "%s"' % attr)
188 return result
188 return result
189
189
190 @reraise_safe_exceptions
190 @reraise_safe_exceptions
191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
193 clone(baseui, source, dest, noupdate=not update_after_clone)
193 clone(baseui, source, dest, noupdate=not update_after_clone)
194
194
195 @reraise_safe_exceptions
195 @reraise_safe_exceptions
196 def commitctx(
196 def commitctx(
197 self, wire, message, parents, commit_time, commit_timezone,
197 self, wire, message, parents, commit_time, commit_timezone,
198 user, files, extra, removed, updated):
198 user, files, extra, removed, updated):
199
199
200 def _filectxfn(_repo, memctx, path):
200 def _filectxfn(_repo, memctx, path):
201 """
201 """
202 Marks given path as added/changed/removed in a given _repo. This is
202 Marks given path as added/changed/removed in a given _repo. This is
203 for internal mercurial commit function.
203 for internal mercurial commit function.
204 """
204 """
205
205
206 # check if this path is removed
206 # check if this path is removed
207 if path in removed:
207 if path in removed:
208 # returning None is a way to mark node for removal
208 # returning None is a way to mark node for removal
209 return None
209 return None
210
210
211 # check if this path is added
211 # check if this path is added
212 for node in updated:
212 for node in updated:
213 if node['path'] == path:
213 if node['path'] == path:
214 return memfilectx(
214 return memfilectx(
215 _repo,
215 _repo,
216 changectx=memctx,
216 changectx=memctx,
217 path=node['path'],
217 path=node['path'],
218 data=node['content'],
218 data=node['content'],
219 islink=False,
219 islink=False,
220 isexec=bool(node['mode'] & stat.S_IXUSR),
220 isexec=bool(node['mode'] & stat.S_IXUSR),
221 copied=False)
221 copied=False)
222
222
223 raise exceptions.AbortException()(
223 raise exceptions.AbortException()(
224 "Given path haven't been marked as added, "
224 "Given path haven't been marked as added, "
225 "changed or removed (%s)" % path)
225 "changed or removed (%s)" % path)
226
226
227 repo = self._factory.repo(wire)
227 repo = self._factory.repo(wire)
228
228
229 commit_ctx = memctx(
229 commit_ctx = memctx(
230 repo=repo,
230 repo=repo,
231 parents=parents,
231 parents=parents,
232 text=message,
232 text=message,
233 files=files,
233 files=files,
234 filectxfn=_filectxfn,
234 filectxfn=_filectxfn,
235 user=user,
235 user=user,
236 date=(commit_time, commit_timezone),
236 date=(commit_time, commit_timezone),
237 extra=extra)
237 extra=extra)
238
238
239 n = repo.commitctx(commit_ctx)
239 n = repo.commitctx(commit_ctx)
240 new_id = hex(n)
240 new_id = hex(n)
241
241
242 return new_id
242 return new_id
243
243
244 @reraise_safe_exceptions
244 @reraise_safe_exceptions
245 def ctx_branch(self, wire, revision):
245 def ctx_branch(self, wire, revision):
246 repo = self._factory.repo(wire)
246 repo = self._factory.repo(wire)
247 ctx = repo[revision]
247 ctx = repo[revision]
248 return ctx.branch()
248 return ctx.branch()
249
249
250 @reraise_safe_exceptions
250 @reraise_safe_exceptions
251 def ctx_children(self, wire, revision):
251 def ctx_children(self, wire, revision):
252 repo = self._factory.repo(wire)
252 repo = self._factory.repo(wire)
253 ctx = repo[revision]
253 ctx = repo[revision]
254 return [child.rev() for child in ctx.children()]
254 return [child.rev() for child in ctx.children()]
255
255
256 @reraise_safe_exceptions
256 @reraise_safe_exceptions
257 def ctx_date(self, wire, revision):
257 def ctx_date(self, wire, revision):
258 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
259 ctx = repo[revision]
259 ctx = repo[revision]
260 return ctx.date()
260 return ctx.date()
261
261
262 @reraise_safe_exceptions
262 @reraise_safe_exceptions
263 def ctx_description(self, wire, revision):
263 def ctx_description(self, wire, revision):
264 repo = self._factory.repo(wire)
264 repo = self._factory.repo(wire)
265 ctx = repo[revision]
265 ctx = repo[revision]
266 return ctx.description()
266 return ctx.description()
267
267
268 @reraise_safe_exceptions
268 @reraise_safe_exceptions
269 def ctx_diff(
269 def ctx_diff(
270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
271 repo = self._factory.repo(wire)
271 repo = self._factory.repo(wire)
272 ctx = repo[revision]
272 ctx = repo[revision]
273 result = ctx.diff(
273 result = ctx.diff(
274 git=git, ignore_whitespace=ignore_whitespace, context=context)
274 git=git, ignore_whitespace=ignore_whitespace, context=context)
275 return list(result)
275 return list(result)
276
276
277 @reraise_safe_exceptions
277 @reraise_safe_exceptions
278 def ctx_files(self, wire, revision):
278 def ctx_files(self, wire, revision):
279 repo = self._factory.repo(wire)
279 repo = self._factory.repo(wire)
280 ctx = repo[revision]
280 ctx = repo[revision]
281 return ctx.files()
281 return ctx.files()
282
282
283 @reraise_safe_exceptions
283 @reraise_safe_exceptions
284 def ctx_list(self, path, revision):
284 def ctx_list(self, path, revision):
285 repo = self._factory.repo(path)
285 repo = self._factory.repo(path)
286 ctx = repo[revision]
286 ctx = repo[revision]
287 return list(ctx)
287 return list(ctx)
288
288
289 @reraise_safe_exceptions
289 @reraise_safe_exceptions
290 def ctx_parents(self, wire, revision):
290 def ctx_parents(self, wire, revision):
291 repo = self._factory.repo(wire)
291 repo = self._factory.repo(wire)
292 ctx = repo[revision]
292 ctx = repo[revision]
293 return [parent.rev() for parent in ctx.parents()]
293 return [parent.rev() for parent in ctx.parents()]
294
294
295 @reraise_safe_exceptions
295 @reraise_safe_exceptions
296 def ctx_phase(self, wire, revision):
296 def ctx_phase(self, wire, revision):
297 repo = self._factory.repo(wire)
297 repo = self._factory.repo(wire)
298 ctx = repo[revision]
298 ctx = repo[revision]
299 # public=0, draft=1, secret=3
299 # public=0, draft=1, secret=3
300 return ctx.phase()
300 return ctx.phase()
301
301
302 @reraise_safe_exceptions
302 @reraise_safe_exceptions
303 def ctx_obsolete(self, wire, revision):
303 def ctx_obsolete(self, wire, revision):
304 repo = self._factory.repo(wire)
304 repo = self._factory.repo(wire)
305 ctx = repo[revision]
305 ctx = repo[revision]
306 return ctx.obsolete()
306 return ctx.obsolete()
307
307
308 @reraise_safe_exceptions
308 @reraise_safe_exceptions
309 def ctx_hidden(self, wire, revision):
309 def ctx_hidden(self, wire, revision):
310 repo = self._factory.repo(wire)
310 repo = self._factory.repo(wire)
311 ctx = repo[revision]
311 ctx = repo[revision]
312 return ctx.hidden()
312 return ctx.hidden()
313
313
314 @reraise_safe_exceptions
314 @reraise_safe_exceptions
315 def ctx_substate(self, wire, revision):
315 def ctx_substate(self, wire, revision):
316 repo = self._factory.repo(wire)
316 repo = self._factory.repo(wire)
317 ctx = repo[revision]
317 ctx = repo[revision]
318 return ctx.substate
318 return ctx.substate
319
319
320 @reraise_safe_exceptions
320 @reraise_safe_exceptions
321 def ctx_status(self, wire, revision):
321 def ctx_status(self, wire, revision):
322 repo = self._factory.repo(wire)
322 repo = self._factory.repo(wire)
323 ctx = repo[revision]
323 ctx = repo[revision]
324 status = repo[ctx.p1().node()].status(other=ctx.node())
324 status = repo[ctx.p1().node()].status(other=ctx.node())
325 # object of status (odd, custom named tuple in mercurial) is not
325 # object of status (odd, custom named tuple in mercurial) is not
326 # correctly serializable, we make it a list, as the underling
326 # correctly serializable, we make it a list, as the underling
327 # API expects this to be a list
327 # API expects this to be a list
328 return list(status)
328 return list(status)
329
329
330 @reraise_safe_exceptions
330 @reraise_safe_exceptions
331 def ctx_user(self, wire, revision):
331 def ctx_user(self, wire, revision):
332 repo = self._factory.repo(wire)
332 repo = self._factory.repo(wire)
333 ctx = repo[revision]
333 ctx = repo[revision]
334 return ctx.user()
334 return ctx.user()
335
335
336 @reraise_safe_exceptions
336 @reraise_safe_exceptions
337 def check_url(self, url, config):
337 def check_url(self, url, config):
338 _proto = None
338 _proto = None
339 if '+' in url[:url.find('://')]:
339 if '+' in url[:url.find('://')]:
340 _proto = url[0:url.find('+')]
340 _proto = url[0:url.find('+')]
341 url = url[url.find('+') + 1:]
341 url = url[url.find('+') + 1:]
342 handlers = []
342 handlers = []
343 url_obj = url_parser(url)
343 url_obj = url_parser(url)
344 test_uri, authinfo = url_obj.authinfo()
344 test_uri, authinfo = url_obj.authinfo()
345 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
345 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
346 url_obj.query = obfuscate_qs(url_obj.query)
346 url_obj.query = obfuscate_qs(url_obj.query)
347
347
348 cleaned_uri = str(url_obj)
348 cleaned_uri = str(url_obj)
349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
350
350
351 if authinfo:
351 if authinfo:
352 # create a password manager
352 # create a password manager
353 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
353 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
354 passmgr.add_password(*authinfo)
354 passmgr.add_password(*authinfo)
355
355
356 handlers.extend((httpbasicauthhandler(passmgr),
356 handlers.extend((httpbasicauthhandler(passmgr),
357 httpdigestauthhandler(passmgr)))
357 httpdigestauthhandler(passmgr)))
358
358
359 o = urllib2.build_opener(*handlers)
359 o = urllib2.build_opener(*handlers)
360 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
360 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
361 ('Accept', 'application/mercurial-0.1')]
361 ('Accept', 'application/mercurial-0.1')]
362
362
363 q = {"cmd": 'between'}
363 q = {"cmd": 'between'}
364 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
364 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
365 qs = '?%s' % urllib.urlencode(q)
365 qs = '?%s' % urllib.urlencode(q)
366 cu = "%s%s" % (test_uri, qs)
366 cu = "%s%s" % (test_uri, qs)
367 req = urllib2.Request(cu, None, {})
367 req = urllib2.Request(cu, None, {})
368
368
369 try:
369 try:
370 log.debug("Trying to open URL %s", cleaned_uri)
370 log.debug("Trying to open URL %s", cleaned_uri)
371 resp = o.open(req)
371 resp = o.open(req)
372 if resp.code != 200:
372 if resp.code != 200:
373 raise exceptions.URLError()('Return Code is not 200')
373 raise exceptions.URLError()('Return Code is not 200')
374 except Exception as e:
374 except Exception as e:
375 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
375 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
376 # means it cannot be cloned
376 # means it cannot be cloned
377 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
377 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
378
378
379 # now check if it's a proper hg repo, but don't do it for svn
379 # now check if it's a proper hg repo, but don't do it for svn
380 try:
380 try:
381 if _proto == 'svn':
381 if _proto == 'svn':
382 pass
382 pass
383 else:
383 else:
384 # check for pure hg repos
384 # check for pure hg repos
385 log.debug(
385 log.debug(
386 "Verifying if URL is a Mercurial repository: %s",
386 "Verifying if URL is a Mercurial repository: %s",
387 cleaned_uri)
387 cleaned_uri)
388 ui = make_ui_from_config(config)
388 ui = make_ui_from_config(config)
389 peer_checker = makepeer(ui, url)
389 peer_checker = makepeer(ui, url)
390 peer_checker.lookup('tip')
390 peer_checker.lookup('tip')
391 except Exception as e:
391 except Exception as e:
392 log.warning("URL is not a valid Mercurial repository: %s",
392 log.warning("URL is not a valid Mercurial repository: %s",
393 cleaned_uri)
393 cleaned_uri)
394 raise exceptions.URLError(e)(
394 raise exceptions.URLError(e)(
395 "url [%s] does not look like an hg repo org_exc: %s"
395 "url [%s] does not look like an hg repo org_exc: %s"
396 % (cleaned_uri, e))
396 % (cleaned_uri, e))
397
397
398 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
398 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
399 return True
399 return True
400
400
401 @reraise_safe_exceptions
401 @reraise_safe_exceptions
402 def diff(
402 def diff(
403 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
403 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
404 context):
404 context):
405 repo = self._factory.repo(wire)
405 repo = self._factory.repo(wire)
406
406
407 if file_filter:
407 if file_filter:
408 match_filter = match(file_filter[0], '', [file_filter[1]])
408 match_filter = match(file_filter[0], '', [file_filter[1]])
409 else:
409 else:
410 match_filter = file_filter
410 match_filter = file_filter
411 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
411 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
412
412
413 try:
413 try:
414 return "".join(patch.diff(
414 return "".join(patch.diff(
415 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
415 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
416 except RepoLookupError as e:
416 except RepoLookupError as e:
417 raise exceptions.LookupException(e)()
417 raise exceptions.LookupException(e)()
418
418
419 @reraise_safe_exceptions
419 @reraise_safe_exceptions
420 def file_history(self, wire, revision, path, limit):
420 def file_history(self, wire, revision, path, limit):
421 repo = self._factory.repo(wire)
421 repo = self._factory.repo(wire)
422
422
423 ctx = repo[revision]
423 ctx = repo[revision]
424 fctx = ctx.filectx(path)
424 fctx = ctx.filectx(path)
425
425
426 def history_iter():
426 def history_iter():
427 limit_rev = fctx.rev()
427 limit_rev = fctx.rev()
428 for obj in reversed(list(fctx.filelog())):
428 for obj in reversed(list(fctx.filelog())):
429 obj = fctx.filectx(obj)
429 obj = fctx.filectx(obj)
430 if limit_rev >= obj.rev():
430 if limit_rev >= obj.rev():
431 yield obj
431 yield obj
432
432
433 history = []
433 history = []
434 for cnt, obj in enumerate(history_iter()):
434 for cnt, obj in enumerate(history_iter()):
435 if limit and cnt >= limit:
435 if limit and cnt >= limit:
436 break
436 break
437 history.append(hex(obj.node()))
437 history.append(hex(obj.node()))
438
438
439 return [x for x in history]
439 return [x for x in history]
440
440
441 @reraise_safe_exceptions
441 @reraise_safe_exceptions
442 def file_history_untill(self, wire, revision, path, limit):
442 def file_history_untill(self, wire, revision, path, limit):
443 repo = self._factory.repo(wire)
443 repo = self._factory.repo(wire)
444 ctx = repo[revision]
444 ctx = repo[revision]
445 fctx = ctx.filectx(path)
445 fctx = ctx.filectx(path)
446
446
447 file_log = list(fctx.filelog())
447 file_log = list(fctx.filelog())
448 if limit:
448 if limit:
449 # Limit to the last n items
449 # Limit to the last n items
450 file_log = file_log[-limit:]
450 file_log = file_log[-limit:]
451
451
452 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
452 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
453
453
454 @reraise_safe_exceptions
454 @reraise_safe_exceptions
455 def fctx_annotate(self, wire, revision, path):
455 def fctx_annotate(self, wire, revision, path):
456 repo = self._factory.repo(wire)
456 repo = self._factory.repo(wire)
457 ctx = repo[revision]
457 ctx = repo[revision]
458 fctx = ctx.filectx(path)
458 fctx = ctx.filectx(path)
459
459
460 result = []
460 result = []
461 for i, annotate_obj in enumerate(fctx.annotate(), 1):
461 for i, annotate_obj in enumerate(fctx.annotate(), 1):
462 ln_no = i
462 ln_no = i
463 sha = hex(annotate_obj.fctx.node())
463 sha = hex(annotate_obj.fctx.node())
464 content = annotate_obj.text
464 content = annotate_obj.text
465 result.append((ln_no, sha, content))
465 result.append((ln_no, sha, content))
466 return result
466 return result
467
467
468 @reraise_safe_exceptions
468 @reraise_safe_exceptions
469 def fctx_data(self, wire, revision, path):
469 def fctx_data(self, wire, revision, path):
470 repo = self._factory.repo(wire)
470 repo = self._factory.repo(wire)
471 ctx = repo[revision]
471 ctx = repo[revision]
472 fctx = ctx.filectx(path)
472 fctx = ctx.filectx(path)
473 return fctx.data()
473 return fctx.data()
474
474
475 @reraise_safe_exceptions
475 @reraise_safe_exceptions
476 def fctx_flags(self, wire, revision, path):
476 def fctx_flags(self, wire, revision, path):
477 repo = self._factory.repo(wire)
477 repo = self._factory.repo(wire)
478 ctx = repo[revision]
478 ctx = repo[revision]
479 fctx = ctx.filectx(path)
479 fctx = ctx.filectx(path)
480 return fctx.flags()
480 return fctx.flags()
481
481
482 @reraise_safe_exceptions
482 @reraise_safe_exceptions
483 def fctx_size(self, wire, revision, path):
483 def fctx_size(self, wire, revision, path):
484 repo = self._factory.repo(wire)
484 repo = self._factory.repo(wire)
485 ctx = repo[revision]
485 ctx = repo[revision]
486 fctx = ctx.filectx(path)
486 fctx = ctx.filectx(path)
487 return fctx.size()
487 return fctx.size()
488
488
489 @reraise_safe_exceptions
489 @reraise_safe_exceptions
490 def get_all_commit_ids(self, wire, name):
490 def get_all_commit_ids(self, wire, name):
491 repo = self._factory.repo(wire)
491 repo = self._factory.repo(wire)
492 revs = repo.filtered(name).changelog.index
492 revs = repo.filtered(name).changelog.index
493 return map(lambda x: hex(x[7]), revs)[:-1]
493 return map(lambda x: hex(x[7]), revs)[:-1]
494
494
495 @reraise_safe_exceptions
495 @reraise_safe_exceptions
496 def get_config_value(self, wire, section, name, untrusted=False):
496 def get_config_value(self, wire, section, name, untrusted=False):
497 repo = self._factory.repo(wire)
497 repo = self._factory.repo(wire)
498 return repo.ui.config(section, name, untrusted=untrusted)
498 return repo.ui.config(section, name, untrusted=untrusted)
499
499
500 @reraise_safe_exceptions
500 @reraise_safe_exceptions
501 def get_config_bool(self, wire, section, name, untrusted=False):
501 def get_config_bool(self, wire, section, name, untrusted=False):
502 repo = self._factory.repo(wire)
502 repo = self._factory.repo(wire)
503 return repo.ui.configbool(section, name, untrusted=untrusted)
503 return repo.ui.configbool(section, name, untrusted=untrusted)
504
504
505 @reraise_safe_exceptions
505 @reraise_safe_exceptions
506 def get_config_list(self, wire, section, name, untrusted=False):
506 def get_config_list(self, wire, section, name, untrusted=False):
507 repo = self._factory.repo(wire)
507 repo = self._factory.repo(wire)
508 return repo.ui.configlist(section, name, untrusted=untrusted)
508 return repo.ui.configlist(section, name, untrusted=untrusted)
509
509
510 @reraise_safe_exceptions
510 @reraise_safe_exceptions
511 def is_large_file(self, wire, path):
511 def is_large_file(self, wire, path):
512 return largefiles.lfutil.isstandin(path)
512 return largefiles.lfutil.isstandin(path)
513
513
514 @reraise_safe_exceptions
514 @reraise_safe_exceptions
515 def in_largefiles_store(self, wire, sha):
515 def in_largefiles_store(self, wire, sha):
516 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
517 return largefiles.lfutil.instore(repo, sha)
517 return largefiles.lfutil.instore(repo, sha)
518
518
519 @reraise_safe_exceptions
519 @reraise_safe_exceptions
520 def in_user_cache(self, wire, sha):
520 def in_user_cache(self, wire, sha):
521 repo = self._factory.repo(wire)
521 repo = self._factory.repo(wire)
522 return largefiles.lfutil.inusercache(repo.ui, sha)
522 return largefiles.lfutil.inusercache(repo.ui, sha)
523
523
524 @reraise_safe_exceptions
524 @reraise_safe_exceptions
525 def store_path(self, wire, sha):
525 def store_path(self, wire, sha):
526 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
527 return largefiles.lfutil.storepath(repo, sha)
527 return largefiles.lfutil.storepath(repo, sha)
528
528
529 @reraise_safe_exceptions
529 @reraise_safe_exceptions
530 def link(self, wire, sha, path):
530 def link(self, wire, sha, path):
531 repo = self._factory.repo(wire)
531 repo = self._factory.repo(wire)
532 largefiles.lfutil.link(
532 largefiles.lfutil.link(
533 largefiles.lfutil.usercachepath(repo.ui, sha), path)
533 largefiles.lfutil.usercachepath(repo.ui, sha), path)
534
534
535 @reraise_safe_exceptions
535 @reraise_safe_exceptions
536 def localrepository(self, wire, create=False):
536 def localrepository(self, wire, create=False):
537 self._factory.repo(wire, create=create)
537 self._factory.repo(wire, create=create)
538
538
539 @reraise_safe_exceptions
539 @reraise_safe_exceptions
540 def lookup(self, wire, revision, both):
540 def lookup(self, wire, revision, both):
541
541
542 repo = self._factory.repo(wire)
542 repo = self._factory.repo(wire)
543
543
544 if isinstance(revision, int):
544 if isinstance(revision, int):
545 # NOTE(marcink):
545 # NOTE(marcink):
546 # since Mercurial doesn't support indexes properly
546 # since Mercurial doesn't support indexes properly
547 # we need to shift accordingly by one to get proper index, e.g
547 # we need to shift accordingly by one to get proper index, e.g
548 # repo[-1] => repo[-2]
548 # repo[-1] => repo[-2]
549 # repo[0] => repo[-1]
549 # repo[0] => repo[-1]
550 # repo[1] => repo[2] we also never call repo[0] because
550 # repo[1] => repo[2] we also never call repo[0] because
551 # it's actually second commit
551 # it's actually second commit
552 if revision <= 0:
552 if revision <= 0:
553 revision = revision + -1
553 revision = revision + -1
554 else:
554 else:
555 revision = revision + 1
555 revision = revision + 1
556
556
557 try:
557 try:
558 ctx = repo[revision]
558 ctx = repo[revision]
559 except RepoLookupError as e:
559 except RepoLookupError as e:
560 raise exceptions.LookupException(e)(revision)
560 raise exceptions.LookupException(e)(revision)
561 except LookupError as e:
561 except LookupError as e:
562 raise exceptions.LookupException(e)(e.name)
562 raise exceptions.LookupException(e)(e.name)
563
563
564 if not both:
564 if not both:
565 return ctx.hex()
565 return ctx.hex()
566
566
567 ctx = repo[ctx.hex()]
567 ctx = repo[ctx.hex()]
568 return ctx.hex(), ctx.rev()
568 return ctx.hex(), ctx.rev()
569
569
570 @reraise_safe_exceptions
570 @reraise_safe_exceptions
571 def pull(self, wire, url, commit_ids=None):
571 def pull(self, wire, url, commit_ids=None):
572 repo = self._factory.repo(wire)
572 repo = self._factory.repo(wire)
573 # Disable any prompts for this repo
573 # Disable any prompts for this repo
574 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
574 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
575
575
576 remote = peer(repo, {}, url)
576 remote = peer(repo, {}, url)
577 # Disable any prompts for this remote
577 # Disable any prompts for this remote
578 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
578 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
579
579
580 if commit_ids:
580 if commit_ids:
581 commit_ids = [bin(commit_id) for commit_id in commit_ids]
581 commit_ids = [bin(commit_id) for commit_id in commit_ids]
582
582
583 return exchange.pull(
583 return exchange.pull(
584 repo, remote, heads=commit_ids, force=None).cgresult
584 repo, remote, heads=commit_ids, force=None).cgresult
585
585
586 @reraise_safe_exceptions
586 @reraise_safe_exceptions
587 def sync_push(self, wire, url):
587 def sync_push(self, wire, url):
588 if self.check_url(url, wire['config']):
588 if not self.check_url(url, wire['config']):
589 return
590
589 repo = self._factory.repo(wire)
591 repo = self._factory.repo(wire)
590
592
591 # Disable any prompts for this repo
593 # Disable any prompts for this repo
592 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
594 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
593
595
594 bookmarks = dict(repo._bookmarks).keys()
596 bookmarks = dict(repo._bookmarks).keys()
595 remote = peer(repo, {}, url)
597 remote = peer(repo, {}, url)
596 # Disable any prompts for this remote
598 # Disable any prompts for this remote
597 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
599 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
598
600
599 return exchange.push(
601 return exchange.push(
600 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
602 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
601
603
602 @reraise_safe_exceptions
604 @reraise_safe_exceptions
603 def revision(self, wire, rev):
605 def revision(self, wire, rev):
604 repo = self._factory.repo(wire)
606 repo = self._factory.repo(wire)
605 ctx = repo[rev]
607 ctx = repo[rev]
606 return ctx.rev()
608 return ctx.rev()
607
609
608 @reraise_safe_exceptions
610 @reraise_safe_exceptions
609 def rev_range(self, wire, filter):
611 def rev_range(self, wire, filter):
610 repo = self._factory.repo(wire)
612 repo = self._factory.repo(wire)
611 revisions = [rev for rev in revrange(repo, filter)]
613 revisions = [rev for rev in revrange(repo, filter)]
612 return revisions
614 return revisions
613
615
614 @reraise_safe_exceptions
616 @reraise_safe_exceptions
615 def rev_range_hash(self, wire, node):
617 def rev_range_hash(self, wire, node):
616 repo = self._factory.repo(wire)
618 repo = self._factory.repo(wire)
617
619
618 def get_revs(repo, rev_opt):
620 def get_revs(repo, rev_opt):
619 if rev_opt:
621 if rev_opt:
620 revs = revrange(repo, rev_opt)
622 revs = revrange(repo, rev_opt)
621 if len(revs) == 0:
623 if len(revs) == 0:
622 return (nullrev, nullrev)
624 return (nullrev, nullrev)
623 return max(revs), min(revs)
625 return max(revs), min(revs)
624 else:
626 else:
625 return len(repo) - 1, 0
627 return len(repo) - 1, 0
626
628
627 stop, start = get_revs(repo, [node + ':'])
629 stop, start = get_revs(repo, [node + ':'])
628 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
630 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
629 return revs
631 return revs
630
632
631 @reraise_safe_exceptions
633 @reraise_safe_exceptions
632 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
634 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
633 other_path = kwargs.pop('other_path', None)
635 other_path = kwargs.pop('other_path', None)
634
636
635 # case when we want to compare two independent repositories
637 # case when we want to compare two independent repositories
636 if other_path and other_path != wire["path"]:
638 if other_path and other_path != wire["path"]:
637 baseui = self._factory._create_config(wire["config"])
639 baseui = self._factory._create_config(wire["config"])
638 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
640 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
639 else:
641 else:
640 repo = self._factory.repo(wire)
642 repo = self._factory.repo(wire)
641 return list(repo.revs(rev_spec, *args))
643 return list(repo.revs(rev_spec, *args))
642
644
643 @reraise_safe_exceptions
645 @reraise_safe_exceptions
644 def strip(self, wire, revision, update, backup):
646 def strip(self, wire, revision, update, backup):
645 repo = self._factory.repo(wire)
647 repo = self._factory.repo(wire)
646 ctx = repo[revision]
648 ctx = repo[revision]
647 hgext_strip(
649 hgext_strip(
648 repo.baseui, repo, ctx.node(), update=update, backup=backup)
650 repo.baseui, repo, ctx.node(), update=update, backup=backup)
649
651
650 @reraise_safe_exceptions
652 @reraise_safe_exceptions
651 def verify(self, wire,):
653 def verify(self, wire,):
652 repo = self._factory.repo(wire)
654 repo = self._factory.repo(wire)
653 baseui = self._factory._create_config(wire['config'])
655 baseui = self._factory._create_config(wire['config'])
654 baseui.setconfig('ui', 'quiet', 'false')
656 baseui.setconfig('ui', 'quiet', 'false')
655 output = io.BytesIO()
657 output = io.BytesIO()
656
658
657 def write(data, **unused_kwargs):
659 def write(data, **unused_kwargs):
658 output.write(data)
660 output.write(data)
659 baseui.write = write
661 baseui.write = write
660
662
661 repo.ui = baseui
663 repo.ui = baseui
662 verify.verify(repo)
664 verify.verify(repo)
663 return output.getvalue()
665 return output.getvalue()
664
666
665 @reraise_safe_exceptions
667 @reraise_safe_exceptions
666 def tag(self, wire, name, revision, message, local, user,
668 def tag(self, wire, name, revision, message, local, user,
667 tag_time, tag_timezone):
669 tag_time, tag_timezone):
668 repo = self._factory.repo(wire)
670 repo = self._factory.repo(wire)
669 ctx = repo[revision]
671 ctx = repo[revision]
670 node = ctx.node()
672 node = ctx.node()
671
673
672 date = (tag_time, tag_timezone)
674 date = (tag_time, tag_timezone)
673 try:
675 try:
674 hg_tag.tag(repo, name, node, message, local, user, date)
676 hg_tag.tag(repo, name, node, message, local, user, date)
675 except Abort as e:
677 except Abort as e:
676 log.exception("Tag operation aborted")
678 log.exception("Tag operation aborted")
677 # Exception can contain unicode which we convert
679 # Exception can contain unicode which we convert
678 raise exceptions.AbortException(e)(repr(e))
680 raise exceptions.AbortException(e)(repr(e))
679
681
680 @reraise_safe_exceptions
682 @reraise_safe_exceptions
681 def tags(self, wire):
683 def tags(self, wire):
682 repo = self._factory.repo(wire)
684 repo = self._factory.repo(wire)
683 return repo.tags()
685 return repo.tags()
684
686
685 @reraise_safe_exceptions
687 @reraise_safe_exceptions
686 def update(self, wire, node=None, clean=False):
688 def update(self, wire, node=None, clean=False):
687 repo = self._factory.repo(wire)
689 repo = self._factory.repo(wire)
688 baseui = self._factory._create_config(wire['config'])
690 baseui = self._factory._create_config(wire['config'])
689 commands.update(baseui, repo, node=node, clean=clean)
691 commands.update(baseui, repo, node=node, clean=clean)
690
692
691 @reraise_safe_exceptions
693 @reraise_safe_exceptions
692 def identify(self, wire):
694 def identify(self, wire):
693 repo = self._factory.repo(wire)
695 repo = self._factory.repo(wire)
694 baseui = self._factory._create_config(wire['config'])
696 baseui = self._factory._create_config(wire['config'])
695 output = io.BytesIO()
697 output = io.BytesIO()
696 baseui.write = output.write
698 baseui.write = output.write
697 # This is required to get a full node id
699 # This is required to get a full node id
698 baseui.debugflag = True
700 baseui.debugflag = True
699 commands.identify(baseui, repo, id=True)
701 commands.identify(baseui, repo, id=True)
700
702
701 return output.getvalue()
703 return output.getvalue()
702
704
703 @reraise_safe_exceptions
705 @reraise_safe_exceptions
704 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
706 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
705 hooks=True):
707 hooks=True):
706 repo = self._factory.repo(wire)
708 repo = self._factory.repo(wire)
707 baseui = self._factory._create_config(wire['config'], hooks=hooks)
709 baseui = self._factory._create_config(wire['config'], hooks=hooks)
708
710
709 # Mercurial internally has a lot of logic that checks ONLY if
711 # Mercurial internally has a lot of logic that checks ONLY if
710 # option is defined, we just pass those if they are defined then
712 # option is defined, we just pass those if they are defined then
711 opts = {}
713 opts = {}
712 if bookmark:
714 if bookmark:
713 opts['bookmark'] = bookmark
715 opts['bookmark'] = bookmark
714 if branch:
716 if branch:
715 opts['branch'] = branch
717 opts['branch'] = branch
716 if revision:
718 if revision:
717 opts['rev'] = revision
719 opts['rev'] = revision
718
720
719 commands.pull(baseui, repo, source, **opts)
721 commands.pull(baseui, repo, source, **opts)
720
722
721 @reraise_safe_exceptions
723 @reraise_safe_exceptions
722 def heads(self, wire, branch=None):
724 def heads(self, wire, branch=None):
723 repo = self._factory.repo(wire)
725 repo = self._factory.repo(wire)
724 baseui = self._factory._create_config(wire['config'])
726 baseui = self._factory._create_config(wire['config'])
725 output = io.BytesIO()
727 output = io.BytesIO()
726
728
727 def write(data, **unused_kwargs):
729 def write(data, **unused_kwargs):
728 output.write(data)
730 output.write(data)
729
731
730 baseui.write = write
732 baseui.write = write
731 if branch:
733 if branch:
732 args = [branch]
734 args = [branch]
733 else:
735 else:
734 args = []
736 args = []
735 commands.heads(baseui, repo, template='{node} ', *args)
737 commands.heads(baseui, repo, template='{node} ', *args)
736
738
737 return output.getvalue()
739 return output.getvalue()
738
740
739 @reraise_safe_exceptions
741 @reraise_safe_exceptions
740 def ancestor(self, wire, revision1, revision2):
742 def ancestor(self, wire, revision1, revision2):
741 repo = self._factory.repo(wire)
743 repo = self._factory.repo(wire)
742 changelog = repo.changelog
744 changelog = repo.changelog
743 lookup = repo.lookup
745 lookup = repo.lookup
744 a = changelog.ancestor(lookup(revision1), lookup(revision2))
746 a = changelog.ancestor(lookup(revision1), lookup(revision2))
745 return hex(a)
747 return hex(a)
746
748
747 @reraise_safe_exceptions
749 @reraise_safe_exceptions
748 def push(self, wire, revisions, dest_path, hooks=True,
750 def push(self, wire, revisions, dest_path, hooks=True,
749 push_branches=False):
751 push_branches=False):
750 repo = self._factory.repo(wire)
752 repo = self._factory.repo(wire)
751 baseui = self._factory._create_config(wire['config'], hooks=hooks)
753 baseui = self._factory._create_config(wire['config'], hooks=hooks)
752 commands.push(baseui, repo, dest=dest_path, rev=revisions,
754 commands.push(baseui, repo, dest=dest_path, rev=revisions,
753 new_branch=push_branches)
755 new_branch=push_branches)
754
756
755 @reraise_safe_exceptions
757 @reraise_safe_exceptions
756 def merge(self, wire, revision):
758 def merge(self, wire, revision):
757 repo = self._factory.repo(wire)
759 repo = self._factory.repo(wire)
758 baseui = self._factory._create_config(wire['config'])
760 baseui = self._factory._create_config(wire['config'])
759 repo.ui.setconfig('ui', 'merge', 'internal:dump')
761 repo.ui.setconfig('ui', 'merge', 'internal:dump')
760
762
761 # In case of sub repositories are used mercurial prompts the user in
763 # In case of sub repositories are used mercurial prompts the user in
762 # case of merge conflicts or different sub repository sources. By
764 # case of merge conflicts or different sub repository sources. By
763 # setting the interactive flag to `False` mercurial doesn't prompt the
765 # setting the interactive flag to `False` mercurial doesn't prompt the
764 # used but instead uses a default value.
766 # used but instead uses a default value.
765 repo.ui.setconfig('ui', 'interactive', False)
767 repo.ui.setconfig('ui', 'interactive', False)
766
768
767 commands.merge(baseui, repo, rev=revision)
769 commands.merge(baseui, repo, rev=revision)
768
770
769 @reraise_safe_exceptions
771 @reraise_safe_exceptions
770 def commit(self, wire, message, username, close_branch=False):
772 def commit(self, wire, message, username, close_branch=False):
771 repo = self._factory.repo(wire)
773 repo = self._factory.repo(wire)
772 baseui = self._factory._create_config(wire['config'])
774 baseui = self._factory._create_config(wire['config'])
773 repo.ui.setconfig('ui', 'username', username)
775 repo.ui.setconfig('ui', 'username', username)
774 commands.commit(baseui, repo, message=message, close_branch=close_branch)
776 commands.commit(baseui, repo, message=message, close_branch=close_branch)
775
777
776 @reraise_safe_exceptions
778 @reraise_safe_exceptions
777 def rebase(self, wire, source=None, dest=None, abort=False):
779 def rebase(self, wire, source=None, dest=None, abort=False):
778 repo = self._factory.repo(wire)
780 repo = self._factory.repo(wire)
779 baseui = self._factory._create_config(wire['config'])
781 baseui = self._factory._create_config(wire['config'])
780 repo.ui.setconfig('ui', 'merge', 'internal:dump')
782 repo.ui.setconfig('ui', 'merge', 'internal:dump')
781 rebase.rebase(
783 rebase.rebase(
782 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
784 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
783
785
784 @reraise_safe_exceptions
786 @reraise_safe_exceptions
785 def bookmark(self, wire, bookmark, revision=None):
787 def bookmark(self, wire, bookmark, revision=None):
786 repo = self._factory.repo(wire)
788 repo = self._factory.repo(wire)
787 baseui = self._factory._create_config(wire['config'])
789 baseui = self._factory._create_config(wire['config'])
788 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
790 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
789
791
790 @reraise_safe_exceptions
792 @reraise_safe_exceptions
791 def install_hooks(self, wire, force=False):
793 def install_hooks(self, wire, force=False):
792 # we don't need any special hooks for Mercurial
794 # we don't need any special hooks for Mercurial
793 pass
795 pass
@@ -1,658 +1,700 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import os
22 import sys
22 import sys
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import base64
26 import base64
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import simplejson as json
33 import simplejson as json
34
34
35 from vcsserver import exceptions, subprocessio, settings
35 from vcsserver import exceptions, subprocessio, settings
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39
39
40 class HooksHttpClient(object):
40 class HooksHttpClient(object):
41 connection = None
41 connection = None
42
42
43 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
45
45
46 def __call__(self, method, extras):
46 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
49 try:
49 try:
50 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
51 except Exception:
51 except Exception:
52 log.error('Connection failed on %s', connection)
52 log.error('Connection failed on %s', connection)
53 raise
53 raise
54 response = connection.getresponse()
54 response = connection.getresponse()
55 return json.loads(response.read())
55 return json.loads(response.read())
56
56
57 def _serialize(self, hook_name, extras):
57 def _serialize(self, hook_name, extras):
58 data = {
58 data = {
59 'method': hook_name,
59 'method': hook_name,
60 'extras': extras
60 'extras': extras
61 }
61 }
62 return json.dumps(data)
62 return json.dumps(data)
63
63
64
64
65 class HooksDummyClient(object):
65 class HooksDummyClient(object):
66 def __init__(self, hooks_module):
66 def __init__(self, hooks_module):
67 self._hooks_module = importlib.import_module(hooks_module)
67 self._hooks_module = importlib.import_module(hooks_module)
68
68
69 def __call__(self, hook_name, extras):
69 def __call__(self, hook_name, extras):
70 with self._hooks_module.Hooks() as hooks:
70 with self._hooks_module.Hooks() as hooks:
71 return getattr(hooks, hook_name)(extras)
71 return getattr(hooks, hook_name)(extras)
72
72
73
73
74 class RemoteMessageWriter(object):
74 class RemoteMessageWriter(object):
75 """Writer base class."""
75 """Writer base class."""
76 def write(self, message):
76 def write(self, message):
77 raise NotImplementedError()
77 raise NotImplementedError()
78
78
79
79
80 class HgMessageWriter(RemoteMessageWriter):
80 class HgMessageWriter(RemoteMessageWriter):
81 """Writer that knows how to send messages to mercurial clients."""
81 """Writer that knows how to send messages to mercurial clients."""
82
82
83 def __init__(self, ui):
83 def __init__(self, ui):
84 self.ui = ui
84 self.ui = ui
85
85
86 def write(self, message):
86 def write(self, message):
87 # TODO: Check why the quiet flag is set by default.
87 # TODO: Check why the quiet flag is set by default.
88 old = self.ui.quiet
88 old = self.ui.quiet
89 self.ui.quiet = False
89 self.ui.quiet = False
90 self.ui.status(message.encode('utf-8'))
90 self.ui.status(message.encode('utf-8'))
91 self.ui.quiet = old
91 self.ui.quiet = old
92
92
93
93
94 class GitMessageWriter(RemoteMessageWriter):
94 class GitMessageWriter(RemoteMessageWriter):
95 """Writer that knows how to send messages to git clients."""
95 """Writer that knows how to send messages to git clients."""
96
96
97 def __init__(self, stdout=None):
97 def __init__(self, stdout=None):
98 self.stdout = stdout or sys.stdout
98 self.stdout = stdout or sys.stdout
99
99
100 def write(self, message):
100 def write(self, message):
101 self.stdout.write(message.encode('utf-8'))
101 self.stdout.write(message.encode('utf-8'))
102
102
103
103
104 class SvnMessageWriter(RemoteMessageWriter):
104 class SvnMessageWriter(RemoteMessageWriter):
105 """Writer that knows how to send messages to svn clients."""
105 """Writer that knows how to send messages to svn clients."""
106
106
107 def __init__(self, stderr=None):
107 def __init__(self, stderr=None):
108 # SVN needs data sent to stderr for back-to-client messaging
108 # SVN needs data sent to stderr for back-to-client messaging
109 self.stderr = stderr or sys.stderr
109 self.stderr = stderr or sys.stderr
110
110
111 def write(self, message):
111 def write(self, message):
112 self.stderr.write(message.encode('utf-8'))
112 self.stderr.write(message.encode('utf-8'))
113
113
114
114
115 def _handle_exception(result):
115 def _handle_exception(result):
116 exception_class = result.get('exception')
116 exception_class = result.get('exception')
117 exception_traceback = result.get('exception_traceback')
117 exception_traceback = result.get('exception_traceback')
118
118
119 if exception_traceback:
119 if exception_traceback:
120 log.error('Got traceback from remote call:%s', exception_traceback)
120 log.error('Got traceback from remote call:%s', exception_traceback)
121
121
122 if exception_class == 'HTTPLockedRC':
122 if exception_class == 'HTTPLockedRC':
123 raise exceptions.RepositoryLockedException()(*result['exception_args'])
123 raise exceptions.RepositoryLockedException()(*result['exception_args'])
124 elif exception_class == 'HTTPBranchProtected':
124 elif exception_class == 'HTTPBranchProtected':
125 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
125 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
126 elif exception_class == 'RepositoryError':
126 elif exception_class == 'RepositoryError':
127 raise exceptions.VcsException()(*result['exception_args'])
127 raise exceptions.VcsException()(*result['exception_args'])
128 elif exception_class:
128 elif exception_class:
129 raise Exception('Got remote exception "%s" with args "%s"' %
129 raise Exception('Got remote exception "%s" with args "%s"' %
130 (exception_class, result['exception_args']))
130 (exception_class, result['exception_args']))
131
131
132
132
133 def _get_hooks_client(extras):
133 def _get_hooks_client(extras):
134 if 'hooks_uri' in extras:
134 if 'hooks_uri' in extras:
135 protocol = extras.get('hooks_protocol')
135 protocol = extras.get('hooks_protocol')
136 return HooksHttpClient(extras['hooks_uri'])
136 return HooksHttpClient(extras['hooks_uri'])
137 else:
137 else:
138 return HooksDummyClient(extras['hooks_module'])
138 return HooksDummyClient(extras['hooks_module'])
139
139
140
140
141 def _call_hook(hook_name, extras, writer):
141 def _call_hook(hook_name, extras, writer):
142 hooks_client = _get_hooks_client(extras)
142 hooks_client = _get_hooks_client(extras)
143 log.debug('Hooks, using client:%s', hooks_client)
143 log.debug('Hooks, using client:%s', hooks_client)
144 result = hooks_client(hook_name, extras)
144 result = hooks_client(hook_name, extras)
145 log.debug('Hooks got result: %s', result)
145 log.debug('Hooks got result: %s', result)
146
146
147 _handle_exception(result)
147 _handle_exception(result)
148 writer.write(result['output'])
148 writer.write(result['output'])
149
149
150 return result['status']
150 return result['status']
151
151
152
152
153 def _extras_from_ui(ui):
153 def _extras_from_ui(ui):
154 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
154 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
155 if not hook_data:
155 if not hook_data:
156 # maybe it's inside environ ?
156 # maybe it's inside environ ?
157 env_hook_data = os.environ.get('RC_SCM_DATA')
157 env_hook_data = os.environ.get('RC_SCM_DATA')
158 if env_hook_data:
158 if env_hook_data:
159 hook_data = env_hook_data
159 hook_data = env_hook_data
160
160
161 extras = {}
161 extras = {}
162 if hook_data:
162 if hook_data:
163 extras = json.loads(hook_data)
163 extras = json.loads(hook_data)
164 return extras
164 return extras
165
165
166
166
167 def _rev_range_hash(repo, node, check_heads=False):
167 def _rev_range_hash(repo, node, check_heads=False):
168
168
169 commits = []
169 commits = []
170 revs = []
170 revs = []
171 start = repo[node].rev()
171 start = repo[node].rev()
172 end = len(repo)
172 end = len(repo)
173 for rev in range(start, end):
173 for rev in range(start, end):
174 revs.append(rev)
174 revs.append(rev)
175 ctx = repo[rev]
175 ctx = repo[rev]
176 commit_id = mercurial.node.hex(ctx.node())
176 commit_id = mercurial.node.hex(ctx.node())
177 branch = ctx.branch()
177 branch = ctx.branch()
178 commits.append((commit_id, branch))
178 commits.append((commit_id, branch))
179
179
180 parent_heads = []
180 parent_heads = []
181 if check_heads:
181 if check_heads:
182 parent_heads = _check_heads(repo, start, end, revs)
182 parent_heads = _check_heads(repo, start, end, revs)
183 return commits, parent_heads
183 return commits, parent_heads
184
184
185
185
186 def _check_heads(repo, start, end, commits):
186 def _check_heads(repo, start, end, commits):
187 changelog = repo.changelog
187 changelog = repo.changelog
188 parents = set()
188 parents = set()
189
189
190 for new_rev in commits:
190 for new_rev in commits:
191 for p in changelog.parentrevs(new_rev):
191 for p in changelog.parentrevs(new_rev):
192 if p == mercurial.node.nullrev:
192 if p == mercurial.node.nullrev:
193 continue
193 continue
194 if p < start:
194 if p < start:
195 parents.add(p)
195 parents.add(p)
196
196
197 for p in parents:
197 for p in parents:
198 branch = repo[p].branch()
198 branch = repo[p].branch()
199 # The heads descending from that parent, on the same branch
199 # The heads descending from that parent, on the same branch
200 parent_heads = set([p])
200 parent_heads = set([p])
201 reachable = set([p])
201 reachable = set([p])
202 for x in xrange(p + 1, end):
202 for x in xrange(p + 1, end):
203 if repo[x].branch() != branch:
203 if repo[x].branch() != branch:
204 continue
204 continue
205 for pp in changelog.parentrevs(x):
205 for pp in changelog.parentrevs(x):
206 if pp in reachable:
206 if pp in reachable:
207 reachable.add(x)
207 reachable.add(x)
208 parent_heads.discard(pp)
208 parent_heads.discard(pp)
209 parent_heads.add(x)
209 parent_heads.add(x)
210 # More than one head? Suggest merging
210 # More than one head? Suggest merging
211 if len(parent_heads) > 1:
211 if len(parent_heads) > 1:
212 return list(parent_heads)
212 return list(parent_heads)
213
213
214 return []
214 return []
215
215
216
216
217 def _get_git_env():
218 env = {}
219 for k, v in os.environ.items():
220 if k.startswith('GIT'):
221 env[k] = v
222
223 # serialized version
224 return [(k, v) for k, v in env.items()]
225
226
227 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
228 env = {}
229 for k, v in os.environ.items():
230 if k.startswith('HG'):
231 env[k] = v
232
233 env['HG_NODE'] = old_rev
234 env['HG_NODE_LAST'] = new_rev
235 env['HG_TXNID'] = txnid
236 env['HG_PENDING'] = repo_path
237
238 return [(k, v) for k, v in env.items()]
239
240
217 def repo_size(ui, repo, **kwargs):
241 def repo_size(ui, repo, **kwargs):
218 extras = _extras_from_ui(ui)
242 extras = _extras_from_ui(ui)
219 return _call_hook('repo_size', extras, HgMessageWriter(ui))
243 return _call_hook('repo_size', extras, HgMessageWriter(ui))
220
244
221
245
222 def pre_pull(ui, repo, **kwargs):
246 def pre_pull(ui, repo, **kwargs):
223 extras = _extras_from_ui(ui)
247 extras = _extras_from_ui(ui)
224 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
248 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
225
249
226
250
227 def pre_pull_ssh(ui, repo, **kwargs):
251 def pre_pull_ssh(ui, repo, **kwargs):
228 extras = _extras_from_ui(ui)
252 extras = _extras_from_ui(ui)
229 if extras and extras.get('SSH'):
253 if extras and extras.get('SSH'):
230 return pre_pull(ui, repo, **kwargs)
254 return pre_pull(ui, repo, **kwargs)
231 return 0
255 return 0
232
256
233
257
234 def post_pull(ui, repo, **kwargs):
258 def post_pull(ui, repo, **kwargs):
235 extras = _extras_from_ui(ui)
259 extras = _extras_from_ui(ui)
236 return _call_hook('post_pull', extras, HgMessageWriter(ui))
260 return _call_hook('post_pull', extras, HgMessageWriter(ui))
237
261
238
262
239 def post_pull_ssh(ui, repo, **kwargs):
263 def post_pull_ssh(ui, repo, **kwargs):
240 extras = _extras_from_ui(ui)
264 extras = _extras_from_ui(ui)
241 if extras and extras.get('SSH'):
265 if extras and extras.get('SSH'):
242 return post_pull(ui, repo, **kwargs)
266 return post_pull(ui, repo, **kwargs)
243 return 0
267 return 0
244
268
245
269
246 def pre_push(ui, repo, node=None, **kwargs):
270 def pre_push(ui, repo, node=None, **kwargs):
247 """
271 """
248 Mercurial pre_push hook
272 Mercurial pre_push hook
249 """
273 """
250 extras = _extras_from_ui(ui)
274 extras = _extras_from_ui(ui)
251 detect_force_push = extras.get('detect_force_push')
275 detect_force_push = extras.get('detect_force_push')
252
276
253 rev_data = []
277 rev_data = []
254 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
278 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
255 branches = collections.defaultdict(list)
279 branches = collections.defaultdict(list)
256 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
280 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
257 for commit_id, branch in commits:
281 for commit_id, branch in commits:
258 branches[branch].append(commit_id)
282 branches[branch].append(commit_id)
259
283
260 for branch, commits in branches.items():
284 for branch, commits in branches.items():
261 old_rev = kwargs.get('node_last') or commits[0]
285 old_rev = kwargs.get('node_last') or commits[0]
262 rev_data.append({
286 rev_data.append({
287 'total_commits': len(commits),
263 'old_rev': old_rev,
288 'old_rev': old_rev,
264 'new_rev': commits[-1],
289 'new_rev': commits[-1],
265 'ref': '',
290 'ref': '',
266 'type': 'branch',
291 'type': 'branch',
267 'name': branch,
292 'name': branch,
268 })
293 })
269
294
270 for push_ref in rev_data:
295 for push_ref in rev_data:
271 push_ref['multiple_heads'] = _heads
296 push_ref['multiple_heads'] = _heads
272
297
298 repo_path = os.path.join(
299 extras.get('repo_store', ''), extras.get('repository', ''))
300 push_ref['hg_env'] = _get_hg_env(
301 old_rev=push_ref['old_rev'],
302 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
303 repo_path=repo_path)
304
305 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
273 extras['commit_ids'] = rev_data
306 extras['commit_ids'] = rev_data
307
274 return _call_hook('pre_push', extras, HgMessageWriter(ui))
308 return _call_hook('pre_push', extras, HgMessageWriter(ui))
275
309
276
310
277 def pre_push_ssh(ui, repo, node=None, **kwargs):
311 def pre_push_ssh(ui, repo, node=None, **kwargs):
278 extras = _extras_from_ui(ui)
312 extras = _extras_from_ui(ui)
279 if extras.get('SSH'):
313 if extras.get('SSH'):
280 return pre_push(ui, repo, node, **kwargs)
314 return pre_push(ui, repo, node, **kwargs)
281
315
282 return 0
316 return 0
283
317
284
318
285 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
319 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
286 """
320 """
287 Mercurial pre_push hook for SSH
321 Mercurial pre_push hook for SSH
288 """
322 """
289 extras = _extras_from_ui(ui)
323 extras = _extras_from_ui(ui)
290 if extras.get('SSH'):
324 if extras.get('SSH'):
291 permission = extras['SSH_PERMISSIONS']
325 permission = extras['SSH_PERMISSIONS']
292
326
293 if 'repository.write' == permission or 'repository.admin' == permission:
327 if 'repository.write' == permission or 'repository.admin' == permission:
294 return 0
328 return 0
295
329
296 # non-zero ret code
330 # non-zero ret code
297 return 1
331 return 1
298
332
299 return 0
333 return 0
300
334
301
335
302 def post_push(ui, repo, node, **kwargs):
336 def post_push(ui, repo, node, **kwargs):
303 """
337 """
304 Mercurial post_push hook
338 Mercurial post_push hook
305 """
339 """
306 extras = _extras_from_ui(ui)
340 extras = _extras_from_ui(ui)
307
341
308 commit_ids = []
342 commit_ids = []
309 branches = []
343 branches = []
310 bookmarks = []
344 bookmarks = []
311 tags = []
345 tags = []
312
346
313 commits, _heads = _rev_range_hash(repo, node)
347 commits, _heads = _rev_range_hash(repo, node)
314 for commit_id, branch in commits:
348 for commit_id, branch in commits:
315 commit_ids.append(commit_id)
349 commit_ids.append(commit_id)
316 if branch not in branches:
350 if branch not in branches:
317 branches.append(branch)
351 branches.append(branch)
318
352
319 if hasattr(ui, '_rc_pushkey_branches'):
353 if hasattr(ui, '_rc_pushkey_branches'):
320 bookmarks = ui._rc_pushkey_branches
354 bookmarks = ui._rc_pushkey_branches
321
355
356 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
322 extras['commit_ids'] = commit_ids
357 extras['commit_ids'] = commit_ids
323 extras['new_refs'] = {
358 extras['new_refs'] = {
324 'branches': branches,
359 'branches': branches,
325 'bookmarks': bookmarks,
360 'bookmarks': bookmarks,
326 'tags': tags
361 'tags': tags
327 }
362 }
328
363
329 return _call_hook('post_push', extras, HgMessageWriter(ui))
364 return _call_hook('post_push', extras, HgMessageWriter(ui))
330
365
331
366
332 def post_push_ssh(ui, repo, node, **kwargs):
367 def post_push_ssh(ui, repo, node, **kwargs):
333 """
368 """
334 Mercurial post_push hook for SSH
369 Mercurial post_push hook for SSH
335 """
370 """
336 if _extras_from_ui(ui).get('SSH'):
371 if _extras_from_ui(ui).get('SSH'):
337 return post_push(ui, repo, node, **kwargs)
372 return post_push(ui, repo, node, **kwargs)
338 return 0
373 return 0
339
374
340
375
341 def key_push(ui, repo, **kwargs):
376 def key_push(ui, repo, **kwargs):
342 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
377 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
343 # store new bookmarks in our UI object propagated later to post_push
378 # store new bookmarks in our UI object propagated later to post_push
344 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
379 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
345 return
380 return
346
381
347
382
348 # backward compat
383 # backward compat
349 log_pull_action = post_pull
384 log_pull_action = post_pull
350
385
351 # backward compat
386 # backward compat
352 log_push_action = post_push
387 log_push_action = post_push
353
388
354
389
355 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
390 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
356 """
391 """
357 Old hook name: keep here for backward compatibility.
392 Old hook name: keep here for backward compatibility.
358
393
359 This is only required when the installed git hooks are not upgraded.
394 This is only required when the installed git hooks are not upgraded.
360 """
395 """
361 pass
396 pass
362
397
363
398
364 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
399 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
365 """
400 """
366 Old hook name: keep here for backward compatibility.
401 Old hook name: keep here for backward compatibility.
367
402
368 This is only required when the installed git hooks are not upgraded.
403 This is only required when the installed git hooks are not upgraded.
369 """
404 """
370 pass
405 pass
371
406
372
407
373 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
408 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
374
409
375
410
376 def git_pre_pull(extras):
411 def git_pre_pull(extras):
377 """
412 """
378 Pre pull hook.
413 Pre pull hook.
379
414
380 :param extras: dictionary containing the keys defined in simplevcs
415 :param extras: dictionary containing the keys defined in simplevcs
381 :type extras: dict
416 :type extras: dict
382
417
383 :return: status code of the hook. 0 for success.
418 :return: status code of the hook. 0 for success.
384 :rtype: int
419 :rtype: int
385 """
420 """
386 if 'pull' not in extras['hooks']:
421 if 'pull' not in extras['hooks']:
387 return HookResponse(0, '')
422 return HookResponse(0, '')
388
423
389 stdout = io.BytesIO()
424 stdout = io.BytesIO()
390 try:
425 try:
391 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
426 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
392 except Exception as error:
427 except Exception as error:
393 status = 128
428 status = 128
394 stdout.write('ERROR: %s\n' % str(error))
429 stdout.write('ERROR: %s\n' % str(error))
395
430
396 return HookResponse(status, stdout.getvalue())
431 return HookResponse(status, stdout.getvalue())
397
432
398
433
399 def git_post_pull(extras):
434 def git_post_pull(extras):
400 """
435 """
401 Post pull hook.
436 Post pull hook.
402
437
403 :param extras: dictionary containing the keys defined in simplevcs
438 :param extras: dictionary containing the keys defined in simplevcs
404 :type extras: dict
439 :type extras: dict
405
440
406 :return: status code of the hook. 0 for success.
441 :return: status code of the hook. 0 for success.
407 :rtype: int
442 :rtype: int
408 """
443 """
409 if 'pull' not in extras['hooks']:
444 if 'pull' not in extras['hooks']:
410 return HookResponse(0, '')
445 return HookResponse(0, '')
411
446
412 stdout = io.BytesIO()
447 stdout = io.BytesIO()
413 try:
448 try:
414 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
449 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
415 except Exception as error:
450 except Exception as error:
416 status = 128
451 status = 128
417 stdout.write('ERROR: %s\n' % error)
452 stdout.write('ERROR: %s\n' % error)
418
453
419 return HookResponse(status, stdout.getvalue())
454 return HookResponse(status, stdout.getvalue())
420
455
421
456
422 def _parse_git_ref_lines(revision_lines):
457 def _parse_git_ref_lines(revision_lines):
423 rev_data = []
458 rev_data = []
424 for revision_line in revision_lines or []:
459 for revision_line in revision_lines or []:
425 old_rev, new_rev, ref = revision_line.strip().split(' ')
460 old_rev, new_rev, ref = revision_line.strip().split(' ')
426 ref_data = ref.split('/', 2)
461 ref_data = ref.split('/', 2)
427 if ref_data[1] in ('tags', 'heads'):
462 if ref_data[1] in ('tags', 'heads'):
428 rev_data.append({
463 rev_data.append({
464 # NOTE(marcink):
465 # we're unable to tell total_commits for git at this point
466 # but we set the variable for consistency with GIT
467 'total_commits': -1,
429 'old_rev': old_rev,
468 'old_rev': old_rev,
430 'new_rev': new_rev,
469 'new_rev': new_rev,
431 'ref': ref,
470 'ref': ref,
432 'type': ref_data[1],
471 'type': ref_data[1],
433 'name': ref_data[2],
472 'name': ref_data[2],
434 })
473 })
435 return rev_data
474 return rev_data
436
475
437
476
438 def git_pre_receive(unused_repo_path, revision_lines, env):
477 def git_pre_receive(unused_repo_path, revision_lines, env):
439 """
478 """
440 Pre push hook.
479 Pre push hook.
441
480
442 :param extras: dictionary containing the keys defined in simplevcs
481 :param extras: dictionary containing the keys defined in simplevcs
443 :type extras: dict
482 :type extras: dict
444
483
445 :return: status code of the hook. 0 for success.
484 :return: status code of the hook. 0 for success.
446 :rtype: int
485 :rtype: int
447 """
486 """
448 extras = json.loads(env['RC_SCM_DATA'])
487 extras = json.loads(env['RC_SCM_DATA'])
449 rev_data = _parse_git_ref_lines(revision_lines)
488 rev_data = _parse_git_ref_lines(revision_lines)
450 if 'push' not in extras['hooks']:
489 if 'push' not in extras['hooks']:
451 return 0
490 return 0
452 empty_commit_id = '0' * 40
491 empty_commit_id = '0' * 40
453
492
454 detect_force_push = extras.get('detect_force_push')
493 detect_force_push = extras.get('detect_force_push')
455
494
456 for push_ref in rev_data:
495 for push_ref in rev_data:
457 # store our git-env which holds the temp store
496 # store our git-env which holds the temp store
458 push_ref['git_env'] = [
497 push_ref['git_env'] = _get_git_env()
459 (k, v) for k, v in os.environ.items() if k.startswith('GIT')]
460 push_ref['pruned_sha'] = ''
498 push_ref['pruned_sha'] = ''
461 if not detect_force_push:
499 if not detect_force_push:
462 # don't check for forced-push when we don't need to
500 # don't check for forced-push when we don't need to
463 continue
501 continue
464
502
465 type_ = push_ref['type']
503 type_ = push_ref['type']
466 new_branch = push_ref['old_rev'] == empty_commit_id
504 new_branch = push_ref['old_rev'] == empty_commit_id
467 if type_ == 'heads' and not new_branch:
505 if type_ == 'heads' and not new_branch:
468 old_rev = push_ref['old_rev']
506 old_rev = push_ref['old_rev']
469 new_rev = push_ref['new_rev']
507 new_rev = push_ref['new_rev']
470 cmd = [settings.GIT_EXECUTABLE, 'rev-list',
508 cmd = [settings.GIT_EXECUTABLE, 'rev-list',
471 old_rev, '^{}'.format(new_rev)]
509 old_rev, '^{}'.format(new_rev)]
472 stdout, stderr = subprocessio.run_command(
510 stdout, stderr = subprocessio.run_command(
473 cmd, env=os.environ.copy())
511 cmd, env=os.environ.copy())
474 # means we're having some non-reachable objects, this forced push
512 # means we're having some non-reachable objects, this forced push
475 # was used
513 # was used
476 if stdout:
514 if stdout:
477 push_ref['pruned_sha'] = stdout.splitlines()
515 push_ref['pruned_sha'] = stdout.splitlines()
478
516
517 extras['hook_type'] = 'pre_receive'
479 extras['commit_ids'] = rev_data
518 extras['commit_ids'] = rev_data
480 return _call_hook('pre_push', extras, GitMessageWriter())
519 return _call_hook('pre_push', extras, GitMessageWriter())
481
520
482
521
483 def git_post_receive(unused_repo_path, revision_lines, env):
522 def git_post_receive(unused_repo_path, revision_lines, env):
484 """
523 """
485 Post push hook.
524 Post push hook.
486
525
487 :param extras: dictionary containing the keys defined in simplevcs
526 :param extras: dictionary containing the keys defined in simplevcs
488 :type extras: dict
527 :type extras: dict
489
528
490 :return: status code of the hook. 0 for success.
529 :return: status code of the hook. 0 for success.
491 :rtype: int
530 :rtype: int
492 """
531 """
493 extras = json.loads(env['RC_SCM_DATA'])
532 extras = json.loads(env['RC_SCM_DATA'])
494 if 'push' not in extras['hooks']:
533 if 'push' not in extras['hooks']:
495 return 0
534 return 0
496
535
497 rev_data = _parse_git_ref_lines(revision_lines)
536 rev_data = _parse_git_ref_lines(revision_lines)
498
537
499 git_revs = []
538 git_revs = []
500
539
501 # N.B.(skreft): it is ok to just call git, as git before calling a
540 # N.B.(skreft): it is ok to just call git, as git before calling a
502 # subcommand sets the PATH environment variable so that it point to the
541 # subcommand sets the PATH environment variable so that it point to the
503 # correct version of the git executable.
542 # correct version of the git executable.
504 empty_commit_id = '0' * 40
543 empty_commit_id = '0' * 40
505 branches = []
544 branches = []
506 tags = []
545 tags = []
507 for push_ref in rev_data:
546 for push_ref in rev_data:
508 type_ = push_ref['type']
547 type_ = push_ref['type']
509
548
510 if type_ == 'heads':
549 if type_ == 'heads':
511 if push_ref['old_rev'] == empty_commit_id:
550 if push_ref['old_rev'] == empty_commit_id:
512 # starting new branch case
551 # starting new branch case
513 if push_ref['name'] not in branches:
552 if push_ref['name'] not in branches:
514 branches.append(push_ref['name'])
553 branches.append(push_ref['name'])
515
554
516 # Fix up head revision if needed
555 # Fix up head revision if needed
517 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
556 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
518 try:
557 try:
519 subprocessio.run_command(cmd, env=os.environ.copy())
558 subprocessio.run_command(cmd, env=os.environ.copy())
520 except Exception:
559 except Exception:
521 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
560 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
522 'refs/heads/%s' % push_ref['name']]
561 'refs/heads/%s' % push_ref['name']]
523 print("Setting default branch to %s" % push_ref['name'])
562 print("Setting default branch to %s" % push_ref['name'])
524 subprocessio.run_command(cmd, env=os.environ.copy())
563 subprocessio.run_command(cmd, env=os.environ.copy())
525
564
526 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
565 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
527 '--format=%(refname)', 'refs/heads/*']
566 '--format=%(refname)', 'refs/heads/*']
528 stdout, stderr = subprocessio.run_command(
567 stdout, stderr = subprocessio.run_command(
529 cmd, env=os.environ.copy())
568 cmd, env=os.environ.copy())
530 heads = stdout
569 heads = stdout
531 heads = heads.replace(push_ref['ref'], '')
570 heads = heads.replace(push_ref['ref'], '')
532 heads = ' '.join(head for head
571 heads = ' '.join(head for head
533 in heads.splitlines() if head) or '.'
572 in heads.splitlines() if head) or '.'
534 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
573 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
535 '--pretty=format:%H', '--', push_ref['new_rev'],
574 '--pretty=format:%H', '--', push_ref['new_rev'],
536 '--not', heads]
575 '--not', heads]
537 stdout, stderr = subprocessio.run_command(
576 stdout, stderr = subprocessio.run_command(
538 cmd, env=os.environ.copy())
577 cmd, env=os.environ.copy())
539 git_revs.extend(stdout.splitlines())
578 git_revs.extend(stdout.splitlines())
540 elif push_ref['new_rev'] == empty_commit_id:
579 elif push_ref['new_rev'] == empty_commit_id:
541 # delete branch case
580 # delete branch case
542 git_revs.append('delete_branch=>%s' % push_ref['name'])
581 git_revs.append('delete_branch=>%s' % push_ref['name'])
543 else:
582 else:
544 if push_ref['name'] not in branches:
583 if push_ref['name'] not in branches:
545 branches.append(push_ref['name'])
584 branches.append(push_ref['name'])
546
585
547 cmd = [settings.GIT_EXECUTABLE, 'log',
586 cmd = [settings.GIT_EXECUTABLE, 'log',
548 '{old_rev}..{new_rev}'.format(**push_ref),
587 '{old_rev}..{new_rev}'.format(**push_ref),
549 '--reverse', '--pretty=format:%H']
588 '--reverse', '--pretty=format:%H']
550 stdout, stderr = subprocessio.run_command(
589 stdout, stderr = subprocessio.run_command(
551 cmd, env=os.environ.copy())
590 cmd, env=os.environ.copy())
552 git_revs.extend(stdout.splitlines())
591 git_revs.extend(stdout.splitlines())
553 elif type_ == 'tags':
592 elif type_ == 'tags':
554 if push_ref['name'] not in tags:
593 if push_ref['name'] not in tags:
555 tags.append(push_ref['name'])
594 tags.append(push_ref['name'])
556 git_revs.append('tag=>%s' % push_ref['name'])
595 git_revs.append('tag=>%s' % push_ref['name'])
557
596
597 extras['hook_type'] = 'post_receive'
558 extras['commit_ids'] = git_revs
598 extras['commit_ids'] = git_revs
559 extras['new_refs'] = {
599 extras['new_refs'] = {
560 'branches': branches,
600 'branches': branches,
561 'bookmarks': [],
601 'bookmarks': [],
562 'tags': tags,
602 'tags': tags,
563 }
603 }
564
604
565 if 'repo_size' in extras['hooks']:
605 if 'repo_size' in extras['hooks']:
566 try:
606 try:
567 _call_hook('repo_size', extras, GitMessageWriter())
607 _call_hook('repo_size', extras, GitMessageWriter())
568 except:
608 except:
569 pass
609 pass
570
610
571 return _call_hook('post_push', extras, GitMessageWriter())
611 return _call_hook('post_push', extras, GitMessageWriter())
572
612
573
613
574 def _get_extras_from_txn_id(path, txn_id):
614 def _get_extras_from_txn_id(path, txn_id):
575 extras = {}
615 extras = {}
576 try:
616 try:
577 cmd = ['svnlook', 'pget',
617 cmd = ['svnlook', 'pget',
578 '-t', txn_id,
618 '-t', txn_id,
579 '--revprop', path, 'rc-scm-extras']
619 '--revprop', path, 'rc-scm-extras']
580 stdout, stderr = subprocessio.run_command(
620 stdout, stderr = subprocessio.run_command(
581 cmd, env=os.environ.copy())
621 cmd, env=os.environ.copy())
582 extras = json.loads(base64.urlsafe_b64decode(stdout))
622 extras = json.loads(base64.urlsafe_b64decode(stdout))
583 except Exception:
623 except Exception:
584 log.exception('Failed to extract extras info from txn_id')
624 log.exception('Failed to extract extras info from txn_id')
585
625
586 return extras
626 return extras
587
627
588
628
629 def _get_extras_from_commit_id(commit_id, path):
630 extras = {}
631 try:
632 cmd = ['svnlook', 'pget',
633 '-r', commit_id,
634 '--revprop', path, 'rc-scm-extras']
635 stdout, stderr = subprocessio.run_command(
636 cmd, env=os.environ.copy())
637 extras = json.loads(base64.urlsafe_b64decode(stdout))
638 except Exception:
639 log.exception('Failed to extract extras info from commit_id')
640
641 return extras
642
643
589 def svn_pre_commit(repo_path, commit_data, env):
644 def svn_pre_commit(repo_path, commit_data, env):
590 path, txn_id = commit_data
645 path, txn_id = commit_data
591 branches = []
646 branches = []
592 tags = []
647 tags = []
593
648
594 if env.get('RC_SCM_DATA'):
649 if env.get('RC_SCM_DATA'):
595 extras = json.loads(env['RC_SCM_DATA'])
650 extras = json.loads(env['RC_SCM_DATA'])
596 else:
651 else:
597 # fallback method to read from TXN-ID stored data
652 # fallback method to read from TXN-ID stored data
598 extras = _get_extras_from_txn_id(path, txn_id)
653 extras = _get_extras_from_txn_id(path, txn_id)
599 if not extras:
654 if not extras:
600 return 0
655 return 0
601
656
602 extras['commit_ids'] = []
657 extras['commit_ids'] = []
603 extras['txn_id'] = txn_id
658 extras['txn_id'] = txn_id
604 extras['new_refs'] = {
659 extras['new_refs'] = {
660 'total_commits': 1,
605 'branches': branches,
661 'branches': branches,
606 'bookmarks': [],
662 'bookmarks': [],
607 'tags': tags,
663 'tags': tags,
608 }
664 }
609
665
610 return _call_hook('pre_push', extras, SvnMessageWriter())
666 return _call_hook('pre_push', extras, SvnMessageWriter())
611
667
612
668
613 def _get_extras_from_commit_id(commit_id, path):
614 extras = {}
615 try:
616 cmd = ['svnlook', 'pget',
617 '-r', commit_id,
618 '--revprop', path, 'rc-scm-extras']
619 stdout, stderr = subprocessio.run_command(
620 cmd, env=os.environ.copy())
621 extras = json.loads(base64.urlsafe_b64decode(stdout))
622 except Exception:
623 log.exception('Failed to extract extras info from commit_id')
624
625 return extras
626
627
628 def svn_post_commit(repo_path, commit_data, env):
669 def svn_post_commit(repo_path, commit_data, env):
629 """
670 """
630 commit_data is path, rev, txn_id
671 commit_data is path, rev, txn_id
631 """
672 """
632 path, commit_id, txn_id = commit_data
673 path, commit_id, txn_id = commit_data
633 branches = []
674 branches = []
634 tags = []
675 tags = []
635
676
636 if env.get('RC_SCM_DATA'):
677 if env.get('RC_SCM_DATA'):
637 extras = json.loads(env['RC_SCM_DATA'])
678 extras = json.loads(env['RC_SCM_DATA'])
638 else:
679 else:
639 # fallback method to read from TXN-ID stored data
680 # fallback method to read from TXN-ID stored data
640 extras = _get_extras_from_commit_id(commit_id, path)
681 extras = _get_extras_from_commit_id(commit_id, path)
641 if not extras:
682 if not extras:
642 return 0
683 return 0
643
684
644 extras['commit_ids'] = [commit_id]
685 extras['commit_ids'] = [commit_id]
645 extras['txn_id'] = txn_id
686 extras['txn_id'] = txn_id
646 extras['new_refs'] = {
687 extras['new_refs'] = {
647 'branches': branches,
688 'branches': branches,
648 'bookmarks': [],
689 'bookmarks': [],
649 'tags': tags,
690 'tags': tags,
691 'total_commits': 1,
650 }
692 }
651
693
652 if 'repo_size' in extras['hooks']:
694 if 'repo_size' in extras['hooks']:
653 try:
695 try:
654 _call_hook('repo_size', extras, SvnMessageWriter())
696 _call_hook('repo_size', extras, SvnMessageWriter())
655 except Exception:
697 except Exception:
656 pass
698 pass
657
699
658 return _call_hook('post_push', extras, SvnMessageWriter())
700 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,598 +1,598 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import sys
19 import sys
20 import base64
20 import base64
21 import locale
21 import locale
22 import logging
22 import logging
23 import uuid
23 import uuid
24 import wsgiref.util
24 import wsgiref.util
25 import traceback
25 import traceback
26 import tempfile
26 import tempfile
27 from itertools import chain
27 from itertools import chain
28
28
29 import simplejson as json
29 import simplejson as json
30 import msgpack
30 import msgpack
31 from pyramid.config import Configurator
31 from pyramid.config import Configurator
32 from pyramid.settings import asbool, aslist
32 from pyramid.settings import asbool, aslist
33 from pyramid.wsgi import wsgiapp
33 from pyramid.wsgi import wsgiapp
34 from pyramid.compat import configparser
34 from pyramid.compat import configparser
35
35
36
36
37 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
38
38
39 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
39 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
40 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
40 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
41
41
42 try:
42 try:
43 locale.setlocale(locale.LC_ALL, '')
43 locale.setlocale(locale.LC_ALL, '')
44 except locale.Error as e:
44 except locale.Error as e:
45 log.error(
45 log.error(
46 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
46 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
47 os.environ['LC_ALL'] = 'C'
47 os.environ['LC_ALL'] = 'C'
48
48
49 import vcsserver
49 import vcsserver
50 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
50 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
51 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
51 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
52 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
52 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
53 from vcsserver.echo_stub.echo_app import EchoApp
53 from vcsserver.echo_stub.echo_app import EchoApp
54 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
54 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
55 from vcsserver.lib.exc_tracking import store_exception
55 from vcsserver.lib.exc_tracking import store_exception
56 from vcsserver.server import VcsServer
56 from vcsserver.server import VcsServer
57
57
58 try:
58 try:
59 from vcsserver.git import GitFactory, GitRemote
59 from vcsserver.git import GitFactory, GitRemote
60 except ImportError:
60 except ImportError:
61 GitFactory = None
61 GitFactory = None
62 GitRemote = None
62 GitRemote = None
63
63
64 try:
64 try:
65 from vcsserver.hg import MercurialFactory, HgRemote
65 from vcsserver.hg import MercurialFactory, HgRemote
66 except ImportError:
66 except ImportError:
67 MercurialFactory = None
67 MercurialFactory = None
68 HgRemote = None
68 HgRemote = None
69
69
70 try:
70 try:
71 from vcsserver.svn import SubversionFactory, SvnRemote
71 from vcsserver.svn import SubversionFactory, SvnRemote
72 except ImportError:
72 except ImportError:
73 SubversionFactory = None
73 SubversionFactory = None
74 SvnRemote = None
74 SvnRemote = None
75
75
76
76
77 def _is_request_chunked(environ):
77 def _is_request_chunked(environ):
78 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
78 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
79 return stream
79 return stream
80
80
81
81
82 def _int_setting(settings, name, default):
82 def _int_setting(settings, name, default):
83 settings[name] = int(settings.get(name, default))
83 settings[name] = int(settings.get(name, default))
84 return settings[name]
84 return settings[name]
85
85
86
86
87 def _bool_setting(settings, name, default):
87 def _bool_setting(settings, name, default):
88 input_val = settings.get(name, default)
88 input_val = settings.get(name, default)
89 if isinstance(input_val, unicode):
89 if isinstance(input_val, unicode):
90 input_val = input_val.encode('utf8')
90 input_val = input_val.encode('utf8')
91 settings[name] = asbool(input_val)
91 settings[name] = asbool(input_val)
92 return settings[name]
92 return settings[name]
93
93
94
94
95 def _list_setting(settings, name, default):
95 def _list_setting(settings, name, default):
96 raw_value = settings.get(name, default)
96 raw_value = settings.get(name, default)
97
97
98 # Otherwise we assume it uses pyramids space/newline separation.
98 # Otherwise we assume it uses pyramids space/newline separation.
99 settings[name] = aslist(raw_value)
99 settings[name] = aslist(raw_value)
100 return settings[name]
100 return settings[name]
101
101
102
102
103 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
103 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
104 value = settings.get(name, default)
104 value = settings.get(name, default)
105
105
106 if default_when_empty and not value:
106 if default_when_empty and not value:
107 # use default value when value is empty
107 # use default value when value is empty
108 value = default
108 value = default
109
109
110 if lower:
110 if lower:
111 value = value.lower()
111 value = value.lower()
112 settings[name] = value
112 settings[name] = value
113 return settings[name]
113 return settings[name]
114
114
115
115
116 class VCS(object):
116 class VCS(object):
117 def __init__(self, locale=None, cache_config=None):
117 def __init__(self, locale=None, cache_config=None):
118 self.locale = locale
118 self.locale = locale
119 self.cache_config = cache_config
119 self.cache_config = cache_config
120 self._configure_locale()
120 self._configure_locale()
121
121
122 if GitFactory and GitRemote:
122 if GitFactory and GitRemote:
123 git_factory = GitFactory()
123 git_factory = GitFactory()
124 self._git_remote = GitRemote(git_factory)
124 self._git_remote = GitRemote(git_factory)
125 else:
125 else:
126 log.info("Git client import failed")
126 log.info("Git client import failed")
127
127
128 if MercurialFactory and HgRemote:
128 if MercurialFactory and HgRemote:
129 hg_factory = MercurialFactory()
129 hg_factory = MercurialFactory()
130 self._hg_remote = HgRemote(hg_factory)
130 self._hg_remote = HgRemote(hg_factory)
131 else:
131 else:
132 log.info("Mercurial client import failed")
132 log.info("Mercurial client import failed")
133
133
134 if SubversionFactory and SvnRemote:
134 if SubversionFactory and SvnRemote:
135 svn_factory = SubversionFactory()
135 svn_factory = SubversionFactory()
136
136
137 # hg factory is used for svn url validation
137 # hg factory is used for svn url validation
138 hg_factory = MercurialFactory()
138 hg_factory = MercurialFactory()
139 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
139 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
140 else:
140 else:
141 log.info("Subversion client import failed")
141 log.info("Subversion client import failed")
142
142
143 self._vcsserver = VcsServer()
143 self._vcsserver = VcsServer()
144
144
145 def _configure_locale(self):
145 def _configure_locale(self):
146 if self.locale:
146 if self.locale:
147 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
147 log.info('Settings locale: `LC_ALL` to %s', self.locale)
148 else:
148 else:
149 log.info(
149 log.info(
150 'Configuring locale subsystem based on environment variables')
150 'Configuring locale subsystem based on environment variables')
151 try:
151 try:
152 # If self.locale is the empty string, then the locale
152 # If self.locale is the empty string, then the locale
153 # module will use the environment variables. See the
153 # module will use the environment variables. See the
154 # documentation of the package `locale`.
154 # documentation of the package `locale`.
155 locale.setlocale(locale.LC_ALL, self.locale)
155 locale.setlocale(locale.LC_ALL, self.locale)
156
156
157 language_code, encoding = locale.getlocale()
157 language_code, encoding = locale.getlocale()
158 log.info(
158 log.info(
159 'Locale set to language code "%s" with encoding "%s".',
159 'Locale set to language code "%s" with encoding "%s".',
160 language_code, encoding)
160 language_code, encoding)
161 except locale.Error:
161 except locale.Error:
162 log.exception(
162 log.exception(
163 'Cannot set locale, not configuring the locale system')
163 'Cannot set locale, not configuring the locale system')
164
164
165
165
166 class WsgiProxy(object):
166 class WsgiProxy(object):
167 def __init__(self, wsgi):
167 def __init__(self, wsgi):
168 self.wsgi = wsgi
168 self.wsgi = wsgi
169
169
170 def __call__(self, environ, start_response):
170 def __call__(self, environ, start_response):
171 input_data = environ['wsgi.input'].read()
171 input_data = environ['wsgi.input'].read()
172 input_data = msgpack.unpackb(input_data)
172 input_data = msgpack.unpackb(input_data)
173
173
174 error = None
174 error = None
175 try:
175 try:
176 data, status, headers = self.wsgi.handle(
176 data, status, headers = self.wsgi.handle(
177 input_data['environment'], input_data['input_data'],
177 input_data['environment'], input_data['input_data'],
178 *input_data['args'], **input_data['kwargs'])
178 *input_data['args'], **input_data['kwargs'])
179 except Exception as e:
179 except Exception as e:
180 data, status, headers = [], None, None
180 data, status, headers = [], None, None
181 error = {
181 error = {
182 'message': str(e),
182 'message': str(e),
183 '_vcs_kind': getattr(e, '_vcs_kind', None)
183 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 }
184 }
185
185
186 start_response(200, {})
186 start_response(200, {})
187 return self._iterator(error, status, headers, data)
187 return self._iterator(error, status, headers, data)
188
188
189 def _iterator(self, error, status, headers, data):
189 def _iterator(self, error, status, headers, data):
190 initial_data = [
190 initial_data = [
191 error,
191 error,
192 status,
192 status,
193 headers,
193 headers,
194 ]
194 ]
195
195
196 for d in chain(initial_data, data):
196 for d in chain(initial_data, data):
197 yield msgpack.packb(d)
197 yield msgpack.packb(d)
198
198
199
199
200 class HTTPApplication(object):
200 class HTTPApplication(object):
201 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
201 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
202
202
203 remote_wsgi = remote_wsgi
203 remote_wsgi = remote_wsgi
204 _use_echo_app = False
204 _use_echo_app = False
205
205
206 def __init__(self, settings=None, global_config=None):
206 def __init__(self, settings=None, global_config=None):
207 self._sanitize_settings_and_apply_defaults(settings)
207 self._sanitize_settings_and_apply_defaults(settings)
208
208
209 self.config = Configurator(settings=settings)
209 self.config = Configurator(settings=settings)
210 self.global_config = global_config
210 self.global_config = global_config
211 self.config.include('vcsserver.lib.rc_cache')
211 self.config.include('vcsserver.lib.rc_cache')
212
212
213 locale = settings.get('locale', '') or 'en_US.UTF-8'
213 locale = settings.get('locale', '') or 'en_US.UTF-8'
214 vcs = VCS(locale=locale, cache_config=settings)
214 vcs = VCS(locale=locale, cache_config=settings)
215 self._remotes = {
215 self._remotes = {
216 'hg': vcs._hg_remote,
216 'hg': vcs._hg_remote,
217 'git': vcs._git_remote,
217 'git': vcs._git_remote,
218 'svn': vcs._svn_remote,
218 'svn': vcs._svn_remote,
219 'server': vcs._vcsserver,
219 'server': vcs._vcsserver,
220 }
220 }
221 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
221 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
222 self._use_echo_app = True
222 self._use_echo_app = True
223 log.warning("Using EchoApp for VCS operations.")
223 log.warning("Using EchoApp for VCS operations.")
224 self.remote_wsgi = remote_wsgi_stub
224 self.remote_wsgi = remote_wsgi_stub
225
225
226 self._configure_settings(global_config, settings)
226 self._configure_settings(global_config, settings)
227 self._configure()
227 self._configure()
228
228
229 def _configure_settings(self, global_config, app_settings):
229 def _configure_settings(self, global_config, app_settings):
230 """
230 """
231 Configure the settings module.
231 Configure the settings module.
232 """
232 """
233 settings_merged = global_config.copy()
233 settings_merged = global_config.copy()
234 settings_merged.update(app_settings)
234 settings_merged.update(app_settings)
235
235
236 git_path = app_settings.get('git_path', None)
236 git_path = app_settings.get('git_path', None)
237 if git_path:
237 if git_path:
238 settings.GIT_EXECUTABLE = git_path
238 settings.GIT_EXECUTABLE = git_path
239 binary_dir = app_settings.get('core.binary_dir', None)
239 binary_dir = app_settings.get('core.binary_dir', None)
240 if binary_dir:
240 if binary_dir:
241 settings.BINARY_DIR = binary_dir
241 settings.BINARY_DIR = binary_dir
242
242
243 # Store the settings to make them available to other modules.
243 # Store the settings to make them available to other modules.
244 vcsserver.PYRAMID_SETTINGS = settings_merged
244 vcsserver.PYRAMID_SETTINGS = settings_merged
245 vcsserver.CONFIG = settings_merged
245 vcsserver.CONFIG = settings_merged
246
246
247 def _sanitize_settings_and_apply_defaults(self, settings):
247 def _sanitize_settings_and_apply_defaults(self, settings):
248 temp_store = tempfile.gettempdir()
248 temp_store = tempfile.gettempdir()
249 default_cache_dir = os.path.join(temp_store, 'rc_cache')
249 default_cache_dir = os.path.join(temp_store, 'rc_cache')
250
250
251 # save default, cache dir, and use it for all backends later.
251 # save default, cache dir, and use it for all backends later.
252 default_cache_dir = _string_setting(
252 default_cache_dir = _string_setting(
253 settings,
253 settings,
254 'cache_dir',
254 'cache_dir',
255 default_cache_dir, lower=False, default_when_empty=True)
255 default_cache_dir, lower=False, default_when_empty=True)
256
256
257 # ensure we have our dir created
257 # ensure we have our dir created
258 if not os.path.isdir(default_cache_dir):
258 if not os.path.isdir(default_cache_dir):
259 os.makedirs(default_cache_dir, mode=0755)
259 os.makedirs(default_cache_dir, mode=0755)
260
260
261 # exception store cache
261 # exception store cache
262 _string_setting(
262 _string_setting(
263 settings,
263 settings,
264 'exception_tracker.store_path',
264 'exception_tracker.store_path',
265 temp_store, lower=False, default_when_empty=True)
265 temp_store, lower=False, default_when_empty=True)
266
266
267 # repo_object cache
267 # repo_object cache
268 _string_setting(
268 _string_setting(
269 settings,
269 settings,
270 'rc_cache.repo_object.backend',
270 'rc_cache.repo_object.backend',
271 'dogpile.cache.rc.memory_lru')
271 'dogpile.cache.rc.memory_lru')
272 _int_setting(
272 _int_setting(
273 settings,
273 settings,
274 'rc_cache.repo_object.expiration_time',
274 'rc_cache.repo_object.expiration_time',
275 300)
275 300)
276 _int_setting(
276 _int_setting(
277 settings,
277 settings,
278 'rc_cache.repo_object.max_size',
278 'rc_cache.repo_object.max_size',
279 1024)
279 1024)
280
280
281 def _configure(self):
281 def _configure(self):
282 self.config.add_renderer(
282 self.config.add_renderer(
283 name='msgpack',
283 name='msgpack',
284 factory=self._msgpack_renderer_factory)
284 factory=self._msgpack_renderer_factory)
285
285
286 self.config.add_route('service', '/_service')
286 self.config.add_route('service', '/_service')
287 self.config.add_route('status', '/status')
287 self.config.add_route('status', '/status')
288 self.config.add_route('hg_proxy', '/proxy/hg')
288 self.config.add_route('hg_proxy', '/proxy/hg')
289 self.config.add_route('git_proxy', '/proxy/git')
289 self.config.add_route('git_proxy', '/proxy/git')
290 self.config.add_route('vcs', '/{backend}')
290 self.config.add_route('vcs', '/{backend}')
291 self.config.add_route('stream_git', '/stream/git/*repo_name')
291 self.config.add_route('stream_git', '/stream/git/*repo_name')
292 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
292 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
293
293
294 self.config.add_view(
294 self.config.add_view(
295 self.status_view, route_name='status', renderer='json')
295 self.status_view, route_name='status', renderer='json')
296 self.config.add_view(
296 self.config.add_view(
297 self.service_view, route_name='service', renderer='msgpack')
297 self.service_view, route_name='service', renderer='msgpack')
298
298
299 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
299 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
300 self.config.add_view(self.git_proxy(), route_name='git_proxy')
300 self.config.add_view(self.git_proxy(), route_name='git_proxy')
301 self.config.add_view(
301 self.config.add_view(
302 self.vcs_view, route_name='vcs', renderer='msgpack',
302 self.vcs_view, route_name='vcs', renderer='msgpack',
303 custom_predicates=[self.is_vcs_view])
303 custom_predicates=[self.is_vcs_view])
304
304
305 self.config.add_view(self.hg_stream(), route_name='stream_hg')
305 self.config.add_view(self.hg_stream(), route_name='stream_hg')
306 self.config.add_view(self.git_stream(), route_name='stream_git')
306 self.config.add_view(self.git_stream(), route_name='stream_git')
307
307
308 def notfound(request):
308 def notfound(request):
309 return {'status': '404 NOT FOUND'}
309 return {'status': '404 NOT FOUND'}
310 self.config.add_notfound_view(notfound, renderer='json')
310 self.config.add_notfound_view(notfound, renderer='json')
311
311
312 self.config.add_view(self.handle_vcs_exception, context=Exception)
312 self.config.add_view(self.handle_vcs_exception, context=Exception)
313
313
314 self.config.add_tween(
314 self.config.add_tween(
315 'vcsserver.tweens.RequestWrapperTween',
315 'vcsserver.tweens.RequestWrapperTween',
316 )
316 )
317
317
318 def wsgi_app(self):
318 def wsgi_app(self):
319 return self.config.make_wsgi_app()
319 return self.config.make_wsgi_app()
320
320
321 def vcs_view(self, request):
321 def vcs_view(self, request):
322 remote = self._remotes[request.matchdict['backend']]
322 remote = self._remotes[request.matchdict['backend']]
323 payload = msgpack.unpackb(request.body, use_list=True)
323 payload = msgpack.unpackb(request.body, use_list=True)
324 method = payload.get('method')
324 method = payload.get('method')
325 params = payload.get('params')
325 params = payload.get('params')
326 wire = params.get('wire')
326 wire = params.get('wire')
327 args = params.get('args')
327 args = params.get('args')
328 kwargs = params.get('kwargs')
328 kwargs = params.get('kwargs')
329 context_uid = None
329 context_uid = None
330
330
331 if wire:
331 if wire:
332 try:
332 try:
333 wire['context'] = context_uid = uuid.UUID(wire['context'])
333 wire['context'] = context_uid = uuid.UUID(wire['context'])
334 except KeyError:
334 except KeyError:
335 pass
335 pass
336 args.insert(0, wire)
336 args.insert(0, wire)
337
337
338 log.debug('method called:%s with kwargs:%s context_uid: %s',
338 log.debug('method called:%s with kwargs:%s context_uid: %s',
339 method, kwargs, context_uid)
339 method, kwargs, context_uid)
340 try:
340 try:
341 resp = getattr(remote, method)(*args, **kwargs)
341 resp = getattr(remote, method)(*args, **kwargs)
342 except Exception as e:
342 except Exception as e:
343 exc_info = list(sys.exc_info())
343 exc_info = list(sys.exc_info())
344 exc_type, exc_value, exc_traceback = exc_info
344 exc_type, exc_value, exc_traceback = exc_info
345
345
346 org_exc = getattr(e, '_org_exc', None)
346 org_exc = getattr(e, '_org_exc', None)
347 org_exc_name = None
347 org_exc_name = None
348 if org_exc:
348 if org_exc:
349 org_exc_name = org_exc.__class__.__name__
349 org_exc_name = org_exc.__class__.__name__
350 # replace our "faked" exception with our org
350 # replace our "faked" exception with our org
351 exc_info[0] = org_exc.__class__
351 exc_info[0] = org_exc.__class__
352 exc_info[1] = org_exc
352 exc_info[1] = org_exc
353
353
354 store_exception(id(exc_info), exc_info)
354 store_exception(id(exc_info), exc_info)
355
355
356 tb_info = ''.join(
356 tb_info = ''.join(
357 traceback.format_exception(exc_type, exc_value, exc_traceback))
357 traceback.format_exception(exc_type, exc_value, exc_traceback))
358
358
359 type_ = e.__class__.__name__
359 type_ = e.__class__.__name__
360 if type_ not in self.ALLOWED_EXCEPTIONS:
360 if type_ not in self.ALLOWED_EXCEPTIONS:
361 type_ = None
361 type_ = None
362
362
363 resp = {
363 resp = {
364 'id': payload.get('id'),
364 'id': payload.get('id'),
365 'error': {
365 'error': {
366 'message': e.message,
366 'message': e.message,
367 'traceback': tb_info,
367 'traceback': tb_info,
368 'org_exc': org_exc_name,
368 'org_exc': org_exc_name,
369 'type': type_
369 'type': type_
370 }
370 }
371 }
371 }
372 try:
372 try:
373 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
373 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
374 except AttributeError:
374 except AttributeError:
375 pass
375 pass
376 else:
376 else:
377 resp = {
377 resp = {
378 'id': payload.get('id'),
378 'id': payload.get('id'),
379 'result': resp
379 'result': resp
380 }
380 }
381
381
382 return resp
382 return resp
383
383
384 def status_view(self, request):
384 def status_view(self, request):
385 import vcsserver
385 import vcsserver
386 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
386 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
387 'pid': os.getpid()}
387 'pid': os.getpid()}
388
388
389 def service_view(self, request):
389 def service_view(self, request):
390 import vcsserver
390 import vcsserver
391
391
392 payload = msgpack.unpackb(request.body, use_list=True)
392 payload = msgpack.unpackb(request.body, use_list=True)
393
393
394 try:
394 try:
395 path = self.global_config['__file__']
395 path = self.global_config['__file__']
396 config = configparser.ConfigParser()
396 config = configparser.ConfigParser()
397 config.read(path)
397 config.read(path)
398 parsed_ini = config
398 parsed_ini = config
399 if parsed_ini.has_section('server:main'):
399 if parsed_ini.has_section('server:main'):
400 parsed_ini = dict(parsed_ini.items('server:main'))
400 parsed_ini = dict(parsed_ini.items('server:main'))
401 except Exception:
401 except Exception:
402 log.exception('Failed to read .ini file for display')
402 log.exception('Failed to read .ini file for display')
403 parsed_ini = {}
403 parsed_ini = {}
404
404
405 resp = {
405 resp = {
406 'id': payload.get('id'),
406 'id': payload.get('id'),
407 'result': dict(
407 'result': dict(
408 version=vcsserver.__version__,
408 version=vcsserver.__version__,
409 config=parsed_ini,
409 config=parsed_ini,
410 payload=payload,
410 payload=payload,
411 )
411 )
412 }
412 }
413 return resp
413 return resp
414
414
415 def _msgpack_renderer_factory(self, info):
415 def _msgpack_renderer_factory(self, info):
416 def _render(value, system):
416 def _render(value, system):
417 value = msgpack.packb(value)
417 value = msgpack.packb(value)
418 request = system.get('request')
418 request = system.get('request')
419 if request is not None:
419 if request is not None:
420 response = request.response
420 response = request.response
421 ct = response.content_type
421 ct = response.content_type
422 if ct == response.default_content_type:
422 if ct == response.default_content_type:
423 response.content_type = 'application/x-msgpack'
423 response.content_type = 'application/x-msgpack'
424 return value
424 return value
425 return _render
425 return _render
426
426
427 def set_env_from_config(self, environ, config):
427 def set_env_from_config(self, environ, config):
428 dict_conf = {}
428 dict_conf = {}
429 try:
429 try:
430 for elem in config:
430 for elem in config:
431 if elem[0] == 'rhodecode':
431 if elem[0] == 'rhodecode':
432 dict_conf = json.loads(elem[2])
432 dict_conf = json.loads(elem[2])
433 break
433 break
434 except Exception:
434 except Exception:
435 log.exception('Failed to fetch SCM CONFIG')
435 log.exception('Failed to fetch SCM CONFIG')
436 return
436 return
437
437
438 username = dict_conf.get('username')
438 username = dict_conf.get('username')
439 if username:
439 if username:
440 environ['REMOTE_USER'] = username
440 environ['REMOTE_USER'] = username
441 # mercurial specific, some extension api rely on this
441 # mercurial specific, some extension api rely on this
442 environ['HGUSER'] = username
442 environ['HGUSER'] = username
443
443
444 ip = dict_conf.get('ip')
444 ip = dict_conf.get('ip')
445 if ip:
445 if ip:
446 environ['REMOTE_HOST'] = ip
446 environ['REMOTE_HOST'] = ip
447
447
448 if _is_request_chunked(environ):
448 if _is_request_chunked(environ):
449 # set the compatibility flag for webob
449 # set the compatibility flag for webob
450 environ['wsgi.input_terminated'] = True
450 environ['wsgi.input_terminated'] = True
451
451
452 def hg_proxy(self):
452 def hg_proxy(self):
453 @wsgiapp
453 @wsgiapp
454 def _hg_proxy(environ, start_response):
454 def _hg_proxy(environ, start_response):
455 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
455 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
456 return app(environ, start_response)
456 return app(environ, start_response)
457 return _hg_proxy
457 return _hg_proxy
458
458
459 def git_proxy(self):
459 def git_proxy(self):
460 @wsgiapp
460 @wsgiapp
461 def _git_proxy(environ, start_response):
461 def _git_proxy(environ, start_response):
462 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
462 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
463 return app(environ, start_response)
463 return app(environ, start_response)
464 return _git_proxy
464 return _git_proxy
465
465
466 def hg_stream(self):
466 def hg_stream(self):
467 if self._use_echo_app:
467 if self._use_echo_app:
468 @wsgiapp
468 @wsgiapp
469 def _hg_stream(environ, start_response):
469 def _hg_stream(environ, start_response):
470 app = EchoApp('fake_path', 'fake_name', None)
470 app = EchoApp('fake_path', 'fake_name', None)
471 return app(environ, start_response)
471 return app(environ, start_response)
472 return _hg_stream
472 return _hg_stream
473 else:
473 else:
474 @wsgiapp
474 @wsgiapp
475 def _hg_stream(environ, start_response):
475 def _hg_stream(environ, start_response):
476 log.debug('http-app: handling hg stream')
476 log.debug('http-app: handling hg stream')
477 repo_path = environ['HTTP_X_RC_REPO_PATH']
477 repo_path = environ['HTTP_X_RC_REPO_PATH']
478 repo_name = environ['HTTP_X_RC_REPO_NAME']
478 repo_name = environ['HTTP_X_RC_REPO_NAME']
479 packed_config = base64.b64decode(
479 packed_config = base64.b64decode(
480 environ['HTTP_X_RC_REPO_CONFIG'])
480 environ['HTTP_X_RC_REPO_CONFIG'])
481 config = msgpack.unpackb(packed_config)
481 config = msgpack.unpackb(packed_config)
482 app = scm_app.create_hg_wsgi_app(
482 app = scm_app.create_hg_wsgi_app(
483 repo_path, repo_name, config)
483 repo_path, repo_name, config)
484
484
485 # Consistent path information for hgweb
485 # Consistent path information for hgweb
486 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
486 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
487 environ['REPO_NAME'] = repo_name
487 environ['REPO_NAME'] = repo_name
488 self.set_env_from_config(environ, config)
488 self.set_env_from_config(environ, config)
489
489
490 log.debug('http-app: starting app handler '
490 log.debug('http-app: starting app handler '
491 'with %s and process request', app)
491 'with %s and process request', app)
492 return app(environ, ResponseFilter(start_response))
492 return app(environ, ResponseFilter(start_response))
493 return _hg_stream
493 return _hg_stream
494
494
495 def git_stream(self):
495 def git_stream(self):
496 if self._use_echo_app:
496 if self._use_echo_app:
497 @wsgiapp
497 @wsgiapp
498 def _git_stream(environ, start_response):
498 def _git_stream(environ, start_response):
499 app = EchoApp('fake_path', 'fake_name', None)
499 app = EchoApp('fake_path', 'fake_name', None)
500 return app(environ, start_response)
500 return app(environ, start_response)
501 return _git_stream
501 return _git_stream
502 else:
502 else:
503 @wsgiapp
503 @wsgiapp
504 def _git_stream(environ, start_response):
504 def _git_stream(environ, start_response):
505 log.debug('http-app: handling git stream')
505 log.debug('http-app: handling git stream')
506 repo_path = environ['HTTP_X_RC_REPO_PATH']
506 repo_path = environ['HTTP_X_RC_REPO_PATH']
507 repo_name = environ['HTTP_X_RC_REPO_NAME']
507 repo_name = environ['HTTP_X_RC_REPO_NAME']
508 packed_config = base64.b64decode(
508 packed_config = base64.b64decode(
509 environ['HTTP_X_RC_REPO_CONFIG'])
509 environ['HTTP_X_RC_REPO_CONFIG'])
510 config = msgpack.unpackb(packed_config)
510 config = msgpack.unpackb(packed_config)
511
511
512 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
512 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
513 self.set_env_from_config(environ, config)
513 self.set_env_from_config(environ, config)
514
514
515 content_type = environ.get('CONTENT_TYPE', '')
515 content_type = environ.get('CONTENT_TYPE', '')
516
516
517 path = environ['PATH_INFO']
517 path = environ['PATH_INFO']
518 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
518 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
519 log.debug(
519 log.debug(
520 'LFS: Detecting if request `%s` is LFS server path based '
520 'LFS: Detecting if request `%s` is LFS server path based '
521 'on content type:`%s`, is_lfs:%s',
521 'on content type:`%s`, is_lfs:%s',
522 path, content_type, is_lfs_request)
522 path, content_type, is_lfs_request)
523
523
524 if not is_lfs_request:
524 if not is_lfs_request:
525 # fallback detection by path
525 # fallback detection by path
526 if GIT_LFS_PROTO_PAT.match(path):
526 if GIT_LFS_PROTO_PAT.match(path):
527 is_lfs_request = True
527 is_lfs_request = True
528 log.debug(
528 log.debug(
529 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
529 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
530 path, is_lfs_request)
530 path, is_lfs_request)
531
531
532 if is_lfs_request:
532 if is_lfs_request:
533 app = scm_app.create_git_lfs_wsgi_app(
533 app = scm_app.create_git_lfs_wsgi_app(
534 repo_path, repo_name, config)
534 repo_path, repo_name, config)
535 else:
535 else:
536 app = scm_app.create_git_wsgi_app(
536 app = scm_app.create_git_wsgi_app(
537 repo_path, repo_name, config)
537 repo_path, repo_name, config)
538
538
539 log.debug('http-app: starting app handler '
539 log.debug('http-app: starting app handler '
540 'with %s and process request', app)
540 'with %s and process request', app)
541
541
542 return app(environ, start_response)
542 return app(environ, start_response)
543
543
544 return _git_stream
544 return _git_stream
545
545
546 def is_vcs_view(self, context, request):
546 def is_vcs_view(self, context, request):
547 """
547 """
548 View predicate that returns true if given backend is supported by
548 View predicate that returns true if given backend is supported by
549 defined remotes.
549 defined remotes.
550 """
550 """
551 backend = request.matchdict.get('backend')
551 backend = request.matchdict.get('backend')
552 return backend in self._remotes
552 return backend in self._remotes
553
553
554 def handle_vcs_exception(self, exception, request):
554 def handle_vcs_exception(self, exception, request):
555 _vcs_kind = getattr(exception, '_vcs_kind', '')
555 _vcs_kind = getattr(exception, '_vcs_kind', '')
556 if _vcs_kind == 'repo_locked':
556 if _vcs_kind == 'repo_locked':
557 # Get custom repo-locked status code if present.
557 # Get custom repo-locked status code if present.
558 status_code = request.headers.get('X-RC-Locked-Status-Code')
558 status_code = request.headers.get('X-RC-Locked-Status-Code')
559 return HTTPRepoLocked(
559 return HTTPRepoLocked(
560 title=exception.message, status_code=status_code)
560 title=exception.message, status_code=status_code)
561
561
562 elif _vcs_kind == 'repo_branch_protected':
562 elif _vcs_kind == 'repo_branch_protected':
563 # Get custom repo-branch-protected status code if present.
563 # Get custom repo-branch-protected status code if present.
564 return HTTPRepoBranchProtected(title=exception.message)
564 return HTTPRepoBranchProtected(title=exception.message)
565
565
566 exc_info = request.exc_info
566 exc_info = request.exc_info
567 store_exception(id(exc_info), exc_info)
567 store_exception(id(exc_info), exc_info)
568
568
569 traceback_info = 'unavailable'
569 traceback_info = 'unavailable'
570 if request.exc_info:
570 if request.exc_info:
571 exc_type, exc_value, exc_tb = request.exc_info
571 exc_type, exc_value, exc_tb = request.exc_info
572 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
572 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
573
573
574 log.error(
574 log.error(
575 'error occurred handling this request for path: %s, \n tb: %s',
575 'error occurred handling this request for path: %s, \n tb: %s',
576 request.path, traceback_info)
576 request.path, traceback_info)
577 raise exception
577 raise exception
578
578
579
579
580 class ResponseFilter(object):
580 class ResponseFilter(object):
581
581
582 def __init__(self, start_response):
582 def __init__(self, start_response):
583 self._start_response = start_response
583 self._start_response = start_response
584
584
585 def __call__(self, status, response_headers, exc_info=None):
585 def __call__(self, status, response_headers, exc_info=None):
586 headers = tuple(
586 headers = tuple(
587 (h, v) for h, v in response_headers
587 (h, v) for h, v in response_headers
588 if not wsgiref.util.is_hop_by_hop(h))
588 if not wsgiref.util.is_hop_by_hop(h))
589 return self._start_response(status, headers, exc_info)
589 return self._start_response(status, headers, exc_info)
590
590
591
591
592 def main(global_config, **settings):
592 def main(global_config, **settings):
593 if MercurialFactory:
593 if MercurialFactory:
594 hgpatches.patch_largefiles_capabilities()
594 hgpatches.patch_largefiles_capabilities()
595 hgpatches.patch_subrepo_type_mapping()
595 hgpatches.patch_subrepo_type_mapping()
596
596
597 app = HTTPApplication(settings=settings, global_config=global_config)
597 app = HTTPApplication(settings=settings, global_config=global_config)
598 return app.wsgi_app()
598 return app.wsgi_app()
@@ -1,65 +1,65 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2018 RhodeCode GmbH
4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20
20
21 import logging
21 import logging
22
22
23 from repoze.lru import LRUCache
23 from repoze.lru import LRUCache
24
24
25 from vcsserver.utils import safe_str
25 from vcsserver.utils import safe_str
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29
29
30 class LRUDict(LRUCache):
30 class LRUDict(LRUCache):
31 """
31 """
32 Wrapper to provide partial dict access
32 Wrapper to provide partial dict access
33 """
33 """
34
34
35 def __setitem__(self, key, value):
35 def __setitem__(self, key, value):
36 return self.put(key, value)
36 return self.put(key, value)
37
37
38 def __getitem__(self, key):
38 def __getitem__(self, key):
39 return self.get(key)
39 return self.get(key)
40
40
41 def __contains__(self, key):
41 def __contains__(self, key):
42 return bool(self.get(key))
42 return bool(self.get(key))
43
43
44 def __delitem__(self, key):
44 def __delitem__(self, key):
45 del self.data[key]
45 del self.data[key]
46
46
47 def keys(self):
47 def keys(self):
48 return self.data.keys()
48 return self.data.keys()
49
49
50
50
51 class LRUDictDebug(LRUDict):
51 class LRUDictDebug(LRUDict):
52 """
52 """
53 Wrapper to provide some debug options
53 Wrapper to provide some debug options
54 """
54 """
55 def _report_keys(self):
55 def _report_keys(self):
56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
57 # trick for pformat print it more nicely
57 # trick for pformat print it more nicely
58 fmt = '\n'
58 fmt = '\n'
59 for cnt, elem in enumerate(self.keys()):
59 for cnt, elem in enumerate(self.keys()):
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 log.debug('current LRU keys (%s):%s' % (elems_cnt, fmt))
61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
62
62
63 def __getitem__(self, key):
63 def __getitem__(self, key):
64 self._report_keys()
64 self._report_keys()
65 return self.get(key)
65 return self.get(key)
@@ -1,86 +1,86 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import os
18 import os
19 import shutil
19 import shutil
20 import tempfile
20 import tempfile
21
21
22 import configobj
22 import configobj
23
23
24
24
25 class ContextINI(object):
25 class ContextINI(object):
26 """
26 """
27 Allows to create a new test.ini file as a copy of existing one with edited
27 Allows to create a new test.ini file as a copy of existing one with edited
28 data. If existing file is not present, it creates a new one. Example usage::
28 data. If existing file is not present, it creates a new one. Example usage::
29
29
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 print 'vcsserver --config=%s' % new_test_ini
31 print 'vcsserver --config=%s' % new_test_ini
32 """
32 """
33
33
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 destroy=True):
35 destroy=True):
36 self.ini_file_path = ini_file_path
36 self.ini_file_path = ini_file_path
37 self.ini_params = ini_params
37 self.ini_params = ini_params
38 self.new_path = None
38 self.new_path = None
39 self.new_path_prefix = new_file_prefix or 'test'
39 self.new_path_prefix = new_file_prefix or 'test'
40 self.destroy = destroy
40 self.destroy = destroy
41
41
42 def __enter__(self):
42 def __enter__(self):
43 _, pref = tempfile.mkstemp()
43 _, pref = tempfile.mkstemp()
44 loc = tempfile.gettempdir()
44 loc = tempfile.gettempdir()
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 pref, self.new_path_prefix, self.ini_file_path))
46 pref, self.new_path_prefix, self.ini_file_path))
47
47
48 # copy ini file and modify according to the params, if we re-use a file
48 # copy ini file and modify according to the params, if we re-use a file
49 if os.path.isfile(self.ini_file_path):
49 if os.path.isfile(self.ini_file_path):
50 shutil.copy(self.ini_file_path, self.new_path)
50 shutil.copy(self.ini_file_path, self.new_path)
51 else:
51 else:
52 # create new dump file for configObj to write to.
52 # create new dump file for configObj to write to.
53 with open(self.new_path, 'wb'):
53 with open(self.new_path, 'wb'):
54 pass
54 pass
55
55
56 config = configobj.ConfigObj(
56 config = configobj.ConfigObj(
57 self.new_path, file_error=True, write_empty_values=True)
57 self.new_path, file_error=True, write_empty_values=True)
58
58
59 for data in self.ini_params:
59 for data in self.ini_params:
60 section, ini_params = data.items()[0]
60 section, ini_params = data.items()[0]
61 key, val = ini_params.items()[0]
61 key, val = ini_params.items()[0]
62 if section not in config:
62 if section not in config:
63 config[section] = {}
63 config[section] = {}
64 config[section][key] = val
64 config[section][key] = val
65
65
66 config.write()
66 config.write()
67 return self.new_path
67 return self.new_path
68
68
69 def __exit__(self, exc_type, exc_val, exc_tb):
69 def __exit__(self, exc_type, exc_val, exc_tb):
70 if self.destroy:
70 if self.destroy:
71 os.remove(self.new_path)
71 os.remove(self.new_path)
72
72
73
73
74 def no_newline_id_generator(test_name):
74 def no_newline_id_generator(test_name):
75 """
75 """
76 Generates a test name without spaces or newlines characters. Used for
76 Generates a test name without spaces or newlines characters. Used for
77 nicer output of progress of test
77 nicer output of progress of test
78 """
78 """
79 org_name = test_name
79 org_name = test_name
80 test_name = test_name\
80 test_name = str(test_name)\
81 .replace('\n', '_N') \
81 .replace('\n', '_N') \
82 .replace('\r', '_N') \
82 .replace('\r', '_N') \
83 .replace('\t', '_T') \
83 .replace('\t', '_T') \
84 .replace(' ', '_S')
84 .replace(' ', '_S')
85
85
86 return test_name or 'test-with-empty-name'
86 return test_name or 'test-with-empty-name'
@@ -1,165 +1,165 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import inspect
18 import inspect
19
19
20 import pytest
20 import pytest
21 import dulwich.errors
21 import dulwich.errors
22 from mock import Mock, patch
22 from mock import Mock, patch
23
23
24 from vcsserver import git
24 from vcsserver import git
25
25
26
26
27 SAMPLE_REFS = {
27 SAMPLE_REFS = {
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 }
33 }
34
34
35
35
36 @pytest.fixture
36 @pytest.fixture
37 def git_remote():
37 def git_remote():
38 """
38 """
39 A GitRemote instance with a mock factory.
39 A GitRemote instance with a mock factory.
40 """
40 """
41 factory = Mock()
41 factory = Mock()
42 remote = git.GitRemote(factory)
42 remote = git.GitRemote(factory)
43 return remote
43 return remote
44
44
45
45
46 def test_discover_git_version(git_remote):
46 def test_discover_git_version(git_remote):
47 version = git_remote.discover_git_version()
47 version = git_remote.discover_git_version()
48 assert version
48 assert version
49
49
50
50
51 class TestGitFetch(object):
51 class TestGitFetch(object):
52 def setup(self):
52 def setup(self):
53 self.mock_repo = Mock()
53 self.mock_repo = Mock()
54 factory = Mock()
54 factory = Mock()
55 factory.repo = Mock(return_value=self.mock_repo)
55 factory.repo = Mock(return_value=self.mock_repo)
56 self.remote_git = git.GitRemote(factory)
56 self.remote_git = git.GitRemote(factory)
57
57
58 def test_fetches_all_when_no_commit_ids_specified(self):
58 def test_fetches_all_when_no_commit_ids_specified(self):
59 def side_effect(determine_wants, *args, **kwargs):
59 def side_effect(determine_wants, *args, **kwargs):
60 determine_wants(SAMPLE_REFS)
60 determine_wants(SAMPLE_REFS)
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire=None, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
68 def test_fetches_specified_commits(self):
68 def test_fetches_specified_commits(self):
69 selected_refs = {
69 selected_refs = {
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 }
72 }
73
73
74 def side_effect(determine_wants, *args, **kwargs):
74 def side_effect(determine_wants, *args, **kwargs):
75 result = determine_wants(SAMPLE_REFS)
75 result = determine_wants(SAMPLE_REFS)
76 assert sorted(result) == sorted(selected_refs.values())
76 assert sorted(result) == sorted(selected_refs.values())
77 return result
77 return result
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.fetch(
81 self.remote_git.pull(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire=None, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 assert determine_wants.call_count == 0
85 assert determine_wants.call_count == 0
86
86
87 def test_get_remote_refs(self):
87 def test_get_remote_refs(self):
88 factory = Mock()
88 factory = Mock()
89 remote_git = git.GitRemote(factory)
89 remote_git = git.GitRemote(factory)
90 url = 'http://example.com/test/test.git'
90 url = 'http://example.com/test/test.git'
91 sample_refs = {
91 sample_refs = {
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 }
94 }
95
95
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 mock_repo().get_refs.return_value = sample_refs
97 mock_repo().get_refs.return_value = sample_refs
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 mock_repo().get_refs.assert_called_once_with()
99 mock_repo().get_refs.assert_called_once_with()
100 assert remote_refs == sample_refs
100 assert remote_refs == sample_refs
101
101
102 def test_remove_ref(self):
102 def test_remove_ref(self):
103 ref_to_remove = 'refs/tags/v0.1.9'
103 ref_to_remove = 'refs/tags/v0.1.9'
104 self.mock_repo.refs = SAMPLE_REFS.copy()
104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 self.remote_git.remove_ref(None, ref_to_remove)
105 self.remote_git.remove_ref(None, ref_to_remove)
106 assert ref_to_remove not in self.mock_repo.refs
106 assert ref_to_remove not in self.mock_repo.refs
107
107
108
108
109 class TestReraiseSafeExceptions(object):
109 class TestReraiseSafeExceptions(object):
110 def test_method_decorated_with_reraise_safe_exceptions(self):
110 def test_method_decorated_with_reraise_safe_exceptions(self):
111 factory = Mock()
111 factory = Mock()
112 git_remote = git.GitRemote(factory)
112 git_remote = git.GitRemote(factory)
113
113
114 def fake_function():
114 def fake_function():
115 return None
115 return None
116
116
117 decorator = git.reraise_safe_exceptions(fake_function)
117 decorator = git.reraise_safe_exceptions(fake_function)
118
118
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 for method_name, method in methods:
120 for method_name, method in methods:
121 if not method_name.startswith('_'):
121 if not method_name.startswith('_'):
122 assert method.im_func.__code__ == decorator.__code__
122 assert method.im_func.__code__ == decorator.__code__
123
123
124 @pytest.mark.parametrize('side_effect, expected_type', [
124 @pytest.mark.parametrize('side_effect, expected_type', [
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 (dulwich.errors.HangupException(), 'error'),
129 (dulwich.errors.HangupException(), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 ])
131 ])
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 @git.reraise_safe_exceptions
133 @git.reraise_safe_exceptions
134 def fake_method():
134 def fake_method():
135 raise side_effect
135 raise side_effect
136
136
137 with pytest.raises(Exception) as exc_info:
137 with pytest.raises(Exception) as exc_info:
138 fake_method()
138 fake_method()
139 assert type(exc_info.value) == Exception
139 assert type(exc_info.value) == Exception
140 assert exc_info.value._vcs_kind == expected_type
140 assert exc_info.value._vcs_kind == expected_type
141
141
142
142
143 class TestDulwichRepoWrapper(object):
143 class TestDulwichRepoWrapper(object):
144 def test_calls_close_on_delete(self):
144 def test_calls_close_on_delete(self):
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 with isdir_patcher:
146 with isdir_patcher:
147 repo = git.Repo('/tmp/abcde')
147 repo = git.Repo('/tmp/abcde')
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
148 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 del repo
149 del repo
150 close_mock.assert_called_once_with()
150 close_mock.assert_called_once_with()
151
151
152
152
153 class TestGitFactory(object):
153 class TestGitFactory(object):
154 def test_create_repo_returns_dulwich_wrapper(self):
154 def test_create_repo_returns_dulwich_wrapper(self):
155
155
156 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
156 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 mock.side_effect = {'repo_objects': ''}
157 mock.side_effect = {'repo_objects': ''}
158 factory = git.GitFactory()
158 factory = git.GitFactory()
159 wire = {
159 wire = {
160 'path': '/tmp/abcde'
160 'path': '/tmp/abcde'
161 }
161 }
162 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
162 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
163 with isdir_patcher:
163 with isdir_patcher:
164 result = factory._create_repo(wire, True)
164 result = factory._create_repo(wire, True)
165 assert isinstance(result, git.Repo)
165 assert isinstance(result, git.Repo)
@@ -1,241 +1,241 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import contextlib
18 import contextlib
19 import io
19 import io
20 import threading
20 import threading
21 from BaseHTTPServer import BaseHTTPRequestHandler
21 from BaseHTTPServer import BaseHTTPRequestHandler
22 from SocketServer import TCPServer
22 from SocketServer import TCPServer
23
23
24 import mercurial.ui
24 import mercurial.ui
25 import mock
25 import mock
26 import pytest
26 import pytest
27 import simplejson as json
27 import simplejson as json
28
28
29 from vcsserver import hooks
29 from vcsserver import hooks
30
30
31
31
32 def get_hg_ui(extras=None):
32 def get_hg_ui(extras=None):
33 """Create a Config object with a valid RC_SCM_DATA entry."""
33 """Create a Config object with a valid RC_SCM_DATA entry."""
34 extras = extras or {}
34 extras = extras or {}
35 required_extras = {
35 required_extras = {
36 'username': '',
36 'username': '',
37 'repository': '',
37 'repository': '',
38 'locked_by': '',
38 'locked_by': '',
39 'scm': '',
39 'scm': '',
40 'make_lock': '',
40 'make_lock': '',
41 'action': '',
41 'action': '',
42 'ip': '',
42 'ip': '',
43 'hooks_uri': 'fake_hooks_uri',
43 'hooks_uri': 'fake_hooks_uri',
44 }
44 }
45 required_extras.update(extras)
45 required_extras.update(extras)
46 hg_ui = mercurial.ui.ui()
46 hg_ui = mercurial.ui.ui()
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
48
48
49 return hg_ui
49 return hg_ui
50
50
51
51
52 def test_git_pre_receive_is_disabled():
52 def test_git_pre_receive_is_disabled():
53 extras = {'hooks': ['pull']}
53 extras = {'hooks': ['pull']}
54 response = hooks.git_pre_receive(None, None,
54 response = hooks.git_pre_receive(None, None,
55 {'RC_SCM_DATA': json.dumps(extras)})
55 {'RC_SCM_DATA': json.dumps(extras)})
56
56
57 assert response == 0
57 assert response == 0
58
58
59
59
60 def test_git_post_receive_is_disabled():
60 def test_git_post_receive_is_disabled():
61 extras = {'hooks': ['pull']}
61 extras = {'hooks': ['pull']}
62 response = hooks.git_post_receive(None, '',
62 response = hooks.git_post_receive(None, '',
63 {'RC_SCM_DATA': json.dumps(extras)})
63 {'RC_SCM_DATA': json.dumps(extras)})
64
64
65 assert response == 0
65 assert response == 0
66
66
67
67
68 def test_git_post_receive_calls_repo_size():
68 def test_git_post_receive_calls_repo_size():
69 extras = {'hooks': ['push', 'repo_size']}
69 extras = {'hooks': ['push', 'repo_size']}
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 hooks.git_post_receive(
71 hooks.git_post_receive(
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
73 extras.update({'commit_ids': [],
73 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
75 expected_calls = [
75 expected_calls = [
76 mock.call('repo_size', extras, mock.ANY),
76 mock.call('repo_size', extras, mock.ANY),
77 mock.call('post_push', extras, mock.ANY),
77 mock.call('post_push', extras, mock.ANY),
78 ]
78 ]
79 assert call_hook_mock.call_args_list == expected_calls
79 assert call_hook_mock.call_args_list == expected_calls
80
80
81
81
82 def test_git_post_receive_does_not_call_disabled_repo_size():
82 def test_git_post_receive_does_not_call_disabled_repo_size():
83 extras = {'hooks': ['push']}
83 extras = {'hooks': ['push']}
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 hooks.git_post_receive(
85 hooks.git_post_receive(
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
87 extras.update({'commit_ids': [],
87 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
89 expected_calls = [
89 expected_calls = [
90 mock.call('post_push', extras, mock.ANY)
90 mock.call('post_push', extras, mock.ANY)
91 ]
91 ]
92 assert call_hook_mock.call_args_list == expected_calls
92 assert call_hook_mock.call_args_list == expected_calls
93
93
94
94
95 def test_repo_size_exception_does_not_affect_git_post_receive():
95 def test_repo_size_exception_does_not_affect_git_post_receive():
96 extras = {'hooks': ['push', 'repo_size']}
96 extras = {'hooks': ['push', 'repo_size']}
97 status = 0
97 status = 0
98
98
99 def side_effect(name, *args, **kwargs):
99 def side_effect(name, *args, **kwargs):
100 if name == 'repo_size':
100 if name == 'repo_size':
101 raise Exception('Fake exception')
101 raise Exception('Fake exception')
102 else:
102 else:
103 return status
103 return status
104
104
105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
106 call_hook_mock.side_effect = side_effect
106 call_hook_mock.side_effect = side_effect
107 result = hooks.git_post_receive(
107 result = hooks.git_post_receive(
108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
109 assert result == status
109 assert result == status
110
110
111
111
112 def test_git_pre_pull_is_disabled():
112 def test_git_pre_pull_is_disabled():
113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
114
114
115
115
116 def test_git_post_pull_is_disabled():
116 def test_git_post_pull_is_disabled():
117 assert (
117 assert (
118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
119
119
120
120
121 class TestGetHooksClient(object):
121 class TestGetHooksClient(object):
122
122
123 def test_returns_http_client_when_protocol_matches(self):
123 def test_returns_http_client_when_protocol_matches(self):
124 hooks_uri = 'localhost:8000'
124 hooks_uri = 'localhost:8000'
125 result = hooks._get_hooks_client({
125 result = hooks._get_hooks_client({
126 'hooks_uri': hooks_uri,
126 'hooks_uri': hooks_uri,
127 'hooks_protocol': 'http'
127 'hooks_protocol': 'http'
128 })
128 })
129 assert isinstance(result, hooks.HooksHttpClient)
129 assert isinstance(result, hooks.HooksHttpClient)
130 assert result.hooks_uri == hooks_uri
130 assert result.hooks_uri == hooks_uri
131
131
132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
133 fake_module = mock.Mock()
133 fake_module = mock.Mock()
134 import_patcher = mock.patch.object(
134 import_patcher = mock.patch.object(
135 hooks.importlib, 'import_module', return_value=fake_module)
135 hooks.importlib, 'import_module', return_value=fake_module)
136 fake_module_name = 'fake.module'
136 fake_module_name = 'fake.module'
137 with import_patcher as import_mock:
137 with import_patcher as import_mock:
138 result = hooks._get_hooks_client(
138 result = hooks._get_hooks_client(
139 {'hooks_module': fake_module_name})
139 {'hooks_module': fake_module_name})
140
140
141 import_mock.assert_called_once_with(fake_module_name)
141 import_mock.assert_called_once_with(fake_module_name)
142 assert isinstance(result, hooks.HooksDummyClient)
142 assert isinstance(result, hooks.HooksDummyClient)
143 assert result._hooks_module == fake_module
143 assert result._hooks_module == fake_module
144
144
145
145
146 class TestHooksHttpClient(object):
146 class TestHooksHttpClient(object):
147 def test_init_sets_hooks_uri(self):
147 def test_init_sets_hooks_uri(self):
148 uri = 'localhost:3000'
148 uri = 'localhost:3000'
149 client = hooks.HooksHttpClient(uri)
149 client = hooks.HooksHttpClient(uri)
150 assert client.hooks_uri == uri
150 assert client.hooks_uri == uri
151
151
152 def test_serialize_returns_json_string(self):
152 def test_serialize_returns_json_string(self):
153 client = hooks.HooksHttpClient('localhost:3000')
153 client = hooks.HooksHttpClient('localhost:3000')
154 hook_name = 'test'
154 hook_name = 'test'
155 extras = {
155 extras = {
156 'first': 1,
156 'first': 1,
157 'second': 'two'
157 'second': 'two'
158 }
158 }
159 result = client._serialize(hook_name, extras)
159 result = client._serialize(hook_name, extras)
160 expected_result = json.dumps({
160 expected_result = json.dumps({
161 'method': hook_name,
161 'method': hook_name,
162 'extras': extras
162 'extras': extras
163 })
163 })
164 assert result == expected_result
164 assert result == expected_result
165
165
166 def test_call_queries_http_server(self, http_mirror):
166 def test_call_queries_http_server(self, http_mirror):
167 client = hooks.HooksHttpClient(http_mirror.uri)
167 client = hooks.HooksHttpClient(http_mirror.uri)
168 hook_name = 'test'
168 hook_name = 'test'
169 extras = {
169 extras = {
170 'first': 1,
170 'first': 1,
171 'second': 'two'
171 'second': 'two'
172 }
172 }
173 result = client(hook_name, extras)
173 result = client(hook_name, extras)
174 expected_result = {
174 expected_result = {
175 'method': hook_name,
175 'method': hook_name,
176 'extras': extras
176 'extras': extras
177 }
177 }
178 assert result == expected_result
178 assert result == expected_result
179
179
180
180
181 class TestHooksDummyClient(object):
181 class TestHooksDummyClient(object):
182 def test_init_imports_hooks_module(self):
182 def test_init_imports_hooks_module(self):
183 hooks_module_name = 'rhodecode.fake.module'
183 hooks_module_name = 'rhodecode.fake.module'
184 hooks_module = mock.MagicMock()
184 hooks_module = mock.MagicMock()
185
185
186 import_patcher = mock.patch.object(
186 import_patcher = mock.patch.object(
187 hooks.importlib, 'import_module', return_value=hooks_module)
187 hooks.importlib, 'import_module', return_value=hooks_module)
188 with import_patcher as import_mock:
188 with import_patcher as import_mock:
189 client = hooks.HooksDummyClient(hooks_module_name)
189 client = hooks.HooksDummyClient(hooks_module_name)
190 import_mock.assert_called_once_with(hooks_module_name)
190 import_mock.assert_called_once_with(hooks_module_name)
191 assert client._hooks_module == hooks_module
191 assert client._hooks_module == hooks_module
192
192
193 def test_call_returns_hook_result(self):
193 def test_call_returns_hook_result(self):
194 hooks_module_name = 'rhodecode.fake.module'
194 hooks_module_name = 'rhodecode.fake.module'
195 hooks_module = mock.MagicMock()
195 hooks_module = mock.MagicMock()
196 import_patcher = mock.patch.object(
196 import_patcher = mock.patch.object(
197 hooks.importlib, 'import_module', return_value=hooks_module)
197 hooks.importlib, 'import_module', return_value=hooks_module)
198 with import_patcher:
198 with import_patcher:
199 client = hooks.HooksDummyClient(hooks_module_name)
199 client = hooks.HooksDummyClient(hooks_module_name)
200
200
201 result = client('post_push', {})
201 result = client('post_push', {})
202 hooks_module.Hooks.assert_called_once_with()
202 hooks_module.Hooks.assert_called_once_with()
203 assert result == hooks_module.Hooks().__enter__().post_push()
203 assert result == hooks_module.Hooks().__enter__().post_push()
204
204
205
205
206 @pytest.fixture
206 @pytest.fixture
207 def http_mirror(request):
207 def http_mirror(request):
208 server = MirrorHttpServer()
208 server = MirrorHttpServer()
209 request.addfinalizer(server.stop)
209 request.addfinalizer(server.stop)
210 return server
210 return server
211
211
212
212
213 class MirrorHttpHandler(BaseHTTPRequestHandler):
213 class MirrorHttpHandler(BaseHTTPRequestHandler):
214 def do_POST(self):
214 def do_POST(self):
215 length = int(self.headers['Content-Length'])
215 length = int(self.headers['Content-Length'])
216 body = self.rfile.read(length).decode('utf-8')
216 body = self.rfile.read(length).decode('utf-8')
217 self.send_response(200)
217 self.send_response(200)
218 self.end_headers()
218 self.end_headers()
219 self.wfile.write(body)
219 self.wfile.write(body)
220
220
221
221
222 class MirrorHttpServer(object):
222 class MirrorHttpServer(object):
223 ip_address = '127.0.0.1'
223 ip_address = '127.0.0.1'
224 port = 0
224 port = 0
225
225
226 def __init__(self):
226 def __init__(self):
227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
228 _, self.port = self._daemon.server_address
228 _, self.port = self._daemon.server_address
229 self._thread = threading.Thread(target=self._daemon.serve_forever)
229 self._thread = threading.Thread(target=self._daemon.serve_forever)
230 self._thread.daemon = True
230 self._thread.daemon = True
231 self._thread.start()
231 self._thread.start()
232
232
233 def stop(self):
233 def stop(self):
234 self._daemon.shutdown()
234 self._daemon.shutdown()
235 self._thread.join()
235 self._thread.join()
236 self._daemon = None
236 self._daemon = None
237 self._thread = None
237 self._thread = None
238
238
239 @property
239 @property
240 def uri(self):
240 def uri(self):
241 return '{}:{}'.format(self.ip_address, self.port)
241 return '{}:{}'.format(self.ip_address, self.port)
@@ -1,60 +1,58 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2018 RhodeCode GmbH
2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19
19
20 import time
20 import time
21 import logging
21 import logging
22
22
23
23
24 from vcsserver.utils import safe_str
24 from vcsserver.utils import safe_str
25
25
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29
29
30 def get_access_path(request):
30 def get_access_path(request):
31 environ = request.environ
31 environ = request.environ
32 return environ.get('PATH_INFO')
32 return environ.get('PATH_INFO')
33
33
34
34
35 class RequestWrapperTween(object):
35 class RequestWrapperTween(object):
36 def __init__(self, handler, registry):
36 def __init__(self, handler, registry):
37 self.handler = handler
37 self.handler = handler
38 self.registry = registry
38 self.registry = registry
39
39
40 # one-time configuration code goes here
40 # one-time configuration code goes here
41
41
42 def __call__(self, request):
42 def __call__(self, request):
43 start = time.time()
43 start = time.time()
44 try:
44 try:
45 response = self.handler(request)
45 response = self.handler(request)
46 finally:
46 finally:
47 end = time.time()
47 end = time.time()
48
48
49 log.info('IP: %s Request to path: `%s` time: %.3fs' % (
49 log.info('IP: %s Request to path: `%s` time: %.3fs',
50 '127.0.0.1',
50 '127.0.0.1', safe_str(get_access_path(request)), end - start)
51 safe_str(get_access_path(request)), end - start)
52 )
53
51
54 return response
52 return response
55
53
56
54
57 def includeme(config):
55 def includeme(config):
58 config.add_tween(
56 config.add_tween(
59 'vcsserver.tweens.RequestWrapperTween',
57 'vcsserver.tweens.RequestWrapperTween',
60 )
58 )
General Comments 0
You need to be logged in to leave comments. Login now