##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r227:ddd92633 merge stable
parent child Browse files
Show More
@@ -1,6 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.7.2
2 current_version = 4.8.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
6
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.7.2
13 version = 4.8.0
16
14
@@ -1,85 +1,79 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
3 # #
4 ################################################################################
4 ################################################################################
5
5
6 [app:main]
6 [app:main]
7 use = egg:rhodecode-vcsserver
7 use = egg:rhodecode-vcsserver
8
8
9 pyramid.default_locale_name = en
9 pyramid.default_locale_name = en
10 pyramid.includes =
10 pyramid.includes =
11
11
12 # default locale used by VCS systems
12 # default locale used by VCS systems
13 locale = en_US.UTF-8
13 locale = en_US.UTF-8
14
14
15 # cache regions, please don't change
15 # cache regions, please don't change
16 beaker.cache.regions = repo_object
16 beaker.cache.regions = repo_object
17 beaker.cache.repo_object.type = memorylru
17 beaker.cache.repo_object.type = memorylru
18 beaker.cache.repo_object.max_items = 100
18 beaker.cache.repo_object.max_items = 100
19 # cache auto-expires after N seconds
19 # cache auto-expires after N seconds
20 beaker.cache.repo_object.expire = 300
20 beaker.cache.repo_object.expire = 300
21 beaker.cache.repo_object.enabled = true
21 beaker.cache.repo_object.enabled = true
22
22
23 [server:main]
23 [server:main]
24 ## COMMON ##
24 ## COMMON ##
25 host = 0.0.0.0
25 host = 0.0.0.0
26 port = 9900
26 port = 9900
27
27
28 use = egg:waitress#main
28 use = egg:waitress#main
29
29
30
30
31 ################################
31 ################################
32 ### LOGGING CONFIGURATION ####
32 ### LOGGING CONFIGURATION ####
33 ################################
33 ################################
34 [loggers]
34 [loggers]
35 keys = root, vcsserver, pyro4, beaker
35 keys = root, vcsserver, beaker
36
36
37 [handlers]
37 [handlers]
38 keys = console
38 keys = console
39
39
40 [formatters]
40 [formatters]
41 keys = generic
41 keys = generic
42
42
43 #############
43 #############
44 ## LOGGERS ##
44 ## LOGGERS ##
45 #############
45 #############
46 [logger_root]
46 [logger_root]
47 level = NOTSET
47 level = NOTSET
48 handlers = console
48 handlers = console
49
49
50 [logger_vcsserver]
50 [logger_vcsserver]
51 level = DEBUG
51 level = DEBUG
52 handlers =
52 handlers =
53 qualname = vcsserver
53 qualname = vcsserver
54 propagate = 1
54 propagate = 1
55
55
56 [logger_beaker]
56 [logger_beaker]
57 level = DEBUG
57 level = DEBUG
58 handlers =
58 handlers =
59 qualname = beaker
59 qualname = beaker
60 propagate = 1
60 propagate = 1
61
61
62 [logger_pyro4]
63 level = DEBUG
64 handlers =
65 qualname = Pyro4
66 propagate = 1
67
68
62
69 ##############
63 ##############
70 ## HANDLERS ##
64 ## HANDLERS ##
71 ##############
65 ##############
72
66
73 [handler_console]
67 [handler_console]
74 class = StreamHandler
68 class = StreamHandler
75 args = (sys.stderr,)
69 args = (sys.stderr,)
76 level = DEBUG
70 level = DEBUG
77 formatter = generic
71 formatter = generic
78
72
79 ################
73 ################
80 ## FORMATTERS ##
74 ## FORMATTERS ##
81 ################
75 ################
82
76
83 [formatter_generic]
77 [formatter_generic]
84 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
78 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
85 datefmt = %Y-%m-%d %H:%M:%S
79 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,109 +1,102 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
3 # #
4 ################################################################################
4 ################################################################################
5
5
6
6
7 [server:main]
7 [server:main]
8 ## COMMON ##
8 ## COMMON ##
9 host = 127.0.0.1
9 host = 127.0.0.1
10 port = 9900
10 port = 9900
11
11
12
12
13 ##########################
13 ##########################
14 ## GUNICORN WSGI SERVER ##
14 ## GUNICORN WSGI SERVER ##
15 ##########################
15 ##########################
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
16 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 use = egg:gunicorn#main
17 use = egg:gunicorn#main
18 ## Sets the number of process workers. Recommended
18 ## Sets the number of process workers. Recommended
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
19 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 workers = 2
20 workers = 2
21 ## process name
21 ## process name
22 proc_name = rhodecode_vcsserver
22 proc_name = rhodecode_vcsserver
23 ## type of worker class, one of sync, gevent
23 ## type of worker class, currently `sync` is the only option allowed.
24 ## recommended for bigger setup is using of of other than sync one
25 worker_class = sync
24 worker_class = sync
26 ## The maximum number of simultaneous clients. Valid only for Gevent
25 ## The maximum number of simultaneous clients. Valid only for Gevent
27 #worker_connections = 10
26 #worker_connections = 10
28 ## max number of requests that worker will handle before being gracefully
27 ## max number of requests that worker will handle before being gracefully
29 ## restarted, could prevent memory leaks
28 ## restarted, could prevent memory leaks
30 max_requests = 1000
29 max_requests = 1000
31 max_requests_jitter = 30
30 max_requests_jitter = 30
32 ## amount of time a worker can spend with handling a request before it
31 ## amount of time a worker can spend with handling a request before it
33 ## gets killed and restarted. Set to 6hrs
32 ## gets killed and restarted. Set to 6hrs
34 timeout = 21600
33 timeout = 21600
35
34
36
35
37 [app:main]
36 [app:main]
38 use = egg:rhodecode-vcsserver
37 use = egg:rhodecode-vcsserver
39
38
40 pyramid.default_locale_name = en
39 pyramid.default_locale_name = en
41 pyramid.includes =
40 pyramid.includes =
42
41
43 ## default locale used by VCS systems
42 ## default locale used by VCS systems
44 locale = en_US.UTF-8
43 locale = en_US.UTF-8
45
44
46 # cache regions, please don't change
45 # cache regions, please don't change
47 beaker.cache.regions = repo_object
46 beaker.cache.regions = repo_object
48 beaker.cache.repo_object.type = memorylru
47 beaker.cache.repo_object.type = memorylru
49 beaker.cache.repo_object.max_items = 100
48 beaker.cache.repo_object.max_items = 100
50 # cache auto-expires after N seconds
49 # cache auto-expires after N seconds
51 beaker.cache.repo_object.expire = 300
50 beaker.cache.repo_object.expire = 300
52 beaker.cache.repo_object.enabled = true
51 beaker.cache.repo_object.enabled = true
53
52
54
53
55 ################################
54 ################################
56 ### LOGGING CONFIGURATION ####
55 ### LOGGING CONFIGURATION ####
57 ################################
56 ################################
58 [loggers]
57 [loggers]
59 keys = root, vcsserver, pyro4, beaker
58 keys = root, vcsserver, beaker
60
59
61 [handlers]
60 [handlers]
62 keys = console
61 keys = console
63
62
64 [formatters]
63 [formatters]
65 keys = generic
64 keys = generic
66
65
67 #############
66 #############
68 ## LOGGERS ##
67 ## LOGGERS ##
69 #############
68 #############
70 [logger_root]
69 [logger_root]
71 level = NOTSET
70 level = NOTSET
72 handlers = console
71 handlers = console
73
72
74 [logger_vcsserver]
73 [logger_vcsserver]
75 level = DEBUG
74 level = DEBUG
76 handlers =
75 handlers =
77 qualname = vcsserver
76 qualname = vcsserver
78 propagate = 1
77 propagate = 1
79
78
80 [logger_beaker]
79 [logger_beaker]
81 level = DEBUG
80 level = DEBUG
82 handlers =
81 handlers =
83 qualname = beaker
82 qualname = beaker
84 propagate = 1
83 propagate = 1
85
84
86 [logger_pyro4]
87 level = DEBUG
88 handlers =
89 qualname = Pyro4
90 propagate = 1
91
92
85
93 ##############
86 ##############
94 ## HANDLERS ##
87 ## HANDLERS ##
95 ##############
88 ##############
96
89
97 [handler_console]
90 [handler_console]
98 class = StreamHandler
91 class = StreamHandler
99 args = (sys.stderr,)
92 args = (sys.stderr,)
100 level = DEBUG
93 level = DEBUG
101 formatter = generic
94 formatter = generic
102
95
103 ################
96 ################
104 ## FORMATTERS ##
97 ## FORMATTERS ##
105 ################
98 ################
106
99
107 [formatter_generic]
100 [formatter_generic]
108 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
101 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
109 datefmt = %Y-%m-%d %H:%M:%S No newline at end of file
102 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,154 +1,157 b''
1 # Nix environment for the community edition
1 # Nix environment for the community edition
2 #
2 #
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6
6
7 { pkgs ? (import <nixpkgs> {})
7 { pkgs ? (import <nixpkgs> {})
8 , pythonPackages ? "python27Packages"
8 , pythonPackages ? "python27Packages"
9 , pythonExternalOverrides ? self: super: {}
9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? true
10 , doCheck ? true
11 }:
11 }:
12
12
13 let pkgs_ = pkgs; in
13 let pkgs_ = pkgs; in
14
14
15 let
15 let
16 pkgs = pkgs_.overridePackages (self: super: {
16 pkgs = pkgs_.overridePackages (self: super: {
17 # bump GIT version
17 # bump GIT version
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 name = "git-2.9.3";
19 name = "git-2.9.3";
20 src = pkgs.fetchurl {
20 src = pkgs.fetchurl {
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.9.3.tar.xz";
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.9.3.tar.xz";
22 sha256 = "0qzs681a64k3shh5p0rg41l1z16fbk5sj0xga45k34hp1hsp654z";
22 sha256 = "0qzs681a64k3shh5p0rg41l1z16fbk5sj0xga45k34hp1hsp654z";
23 };
23 };
24
24
25 });
25 });
26
26
27 # Override subversion derivation to
27 # Override subversion derivation to
28 # - activate python bindings
28 # - activate python bindings
29 subversion = let
29 subversion = let
30 subversionWithPython = super.subversion.override {
30 subversionWithPython = super.subversion.override {
31 httpSupport = true;
31 httpSupport = true;
32 pythonBindings = true;
32 pythonBindings = true;
33 python = self.python27Packages.python;
33 python = self.python27Packages.python;
34 };
34 };
35
35
36 in
36 in
37
37
38 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
38 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
39 patches = (oldAttrs.patches or []) ++
39 patches = (oldAttrs.patches or []) ++
40 pkgs.lib.optionals pkgs.stdenv.isDarwin [
40 pkgs.lib.optionals pkgs.stdenv.isDarwin [
41 # johbo: "import svn.client" fails on darwin currently.
41 # johbo: "import svn.client" fails on darwin currently.
42 ./pkgs/subversion-1.9.4-darwin.patch
42 ./pkgs/subversion-1.9.4-darwin.patch
43 ];
43 ];
44 });
44 });
45
45
46 });
46 });
47
47
48 inherit (pkgs.lib) fix extends;
48 inherit (pkgs.lib) fix extends;
49 basePythonPackages = with builtins; if isAttrs pythonPackages
49 basePythonPackages = with builtins; if isAttrs pythonPackages
50 then pythonPackages
50 then pythonPackages
51 else getAttr pythonPackages pkgs;
51 else getAttr pythonPackages pkgs;
52
52
53 elem = builtins.elem;
53 elem = builtins.elem;
54 basename = path: with pkgs.lib; last (splitString "/" path);
54 basename = path: with pkgs.lib; last (splitString "/" path);
55 startsWith = prefix: full: let
55 startsWith = prefix: full: let
56 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
56 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
57 in actualPrefix == prefix;
57 in actualPrefix == prefix;
58
58
59 src-filter = path: type: with pkgs.lib;
59 src-filter = path: type: with pkgs.lib;
60 let
60 let
61 ext = last (splitString "." path);
61 ext = last (splitString "." path);
62 in
62 in
63 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
63 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
64 "node_modules" "build" "data" "tmp"] &&
64 "node_modules" "build" "data" "tmp"] &&
65 !elem ext ["egg-info" "pyc"] &&
65 !elem ext ["egg-info" "pyc"] &&
66 !startsWith "result" path;
66 !startsWith "result" path;
67
67
68 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
68 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
69
69
70 pythonGeneratedPackages = self: basePythonPackages.override (a: {
70 pythonGeneratedPackages = self: basePythonPackages.override (a: {
71 inherit self;
71 inherit self;
72 }) // (scopedImport {
72 }) // (scopedImport {
73 self = self;
73 self = self;
74 super = basePythonPackages;
74 super = basePythonPackages;
75 inherit pkgs;
75 inherit pkgs;
76 inherit (pkgs) fetchurl fetchgit;
76 inherit (pkgs) fetchurl fetchgit;
77 } ./pkgs/python-packages.nix);
77 } ./pkgs/python-packages.nix);
78
78
79 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
79 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
80 inherit basePythonPackages pkgs;
80 inherit basePythonPackages pkgs;
81 };
81 };
82
82
83 version = builtins.readFile ./vcsserver/VERSION;
83 version = builtins.readFile ./vcsserver/VERSION;
84
84
85 pythonLocalOverrides = self: super: {
85 pythonLocalOverrides = self: super: {
86 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
86 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
87 inherit doCheck version;
87 inherit doCheck version;
88
88
89 name = "rhodecode-vcsserver-${version}";
89 name = "rhodecode-vcsserver-${version}";
90 releaseName = "RhodeCodeVCSServer-${version}";
90 releaseName = "RhodeCodeVCSServer-${version}";
91 src = rhodecode-vcsserver-src;
91 src = rhodecode-vcsserver-src;
92 dontStrip = true; # prevent strip, we don't need it.
92 dontStrip = true; # prevent strip, we don't need it.
93
93
94 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
94 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
95 pkgs.git
95 pkgs.git
96 pkgs.subversion
96 pkgs.subversion
97 ]);
97 ]);
98
98
99 # TODO: johbo: Make a nicer way to expose the parts. Maybe
99 # TODO: johbo: Make a nicer way to expose the parts. Maybe
100 # pkgs/default.nix?
100 # pkgs/default.nix?
101 passthru = {
101 passthru = {
102 pythonPackages = self;
102 pythonPackages = self;
103 };
103 };
104
104
105 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
105 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
106 preCheck = ''
106 preCheck = ''
107 export PATH="$out/bin:$PATH"
107 export PATH="$out/bin:$PATH"
108 '';
108 '';
109
109
110 # put custom attrs here
110 # put custom attrs here
111 checkPhase = ''
111 checkPhase = ''
112 runHook preCheck
112 runHook preCheck
113 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
113 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
114 runHook postCheck
114 runHook postCheck
115 '';
115 '';
116
116
117 postInstall = ''
117 postInstall = ''
118 echo "Writing meta information for rccontrol to nix-support/rccontrol"
118 echo "Writing meta information for rccontrol to nix-support/rccontrol"
119 mkdir -p $out/nix-support/rccontrol
119 mkdir -p $out/nix-support/rccontrol
120 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
120 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
121 echo "DONE: Meta information for rccontrol written"
121 echo "DONE: Meta information for rccontrol written"
122
122
123 # python based programs need to be wrapped
123 ln -s ${self.pyramid}/bin/* $out/bin/
124 ln -s ${self.pyramid}/bin/* $out/bin/
124 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
125 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
125
126
126 # Symlink version control utilities
127 # Symlink version control utilities
127 #
128 #
128 # We ensure that always the correct version is available as a symlink.
129 # We ensure that always the correct version is available as a symlink.
129 # So that users calling them via the profile path will always use the
130 # So that users calling them via the profile path will always use the
130 # correct version.
131 # correct version.
131 ln -s ${pkgs.git}/bin/git $out/bin
132 ln -s ${pkgs.git}/bin/git $out/bin
132 ln -s ${self.mercurial}/bin/hg $out/bin
133 ln -s ${self.mercurial}/bin/hg $out/bin
133 ln -s ${pkgs.subversion}/bin/svn* $out/bin
134 ln -s ${pkgs.subversion}/bin/svn* $out/bin
134
135
135 for file in $out/bin/*; do
136 for file in $out/bin/*;
137 do
136 wrapProgram $file \
138 wrapProgram $file \
137 --set PATH $PATH \
139 --set PATH $PATH \
138 --set PYTHONPATH $PYTHONPATH \
140 --set PYTHONPATH $PYTHONPATH \
139 --set PYTHONHASHSEED random
141 --set PYTHONHASHSEED random
140 done
142 done
143
141 '';
144 '';
142
145
143 });
146 });
144 };
147 };
145
148
146 # Apply all overrides and fix the final package set
149 # Apply all overrides and fix the final package set
147 myPythonPackages =
150 myPythonPackages =
148 (fix
151 (fix
149 (extends pythonExternalOverrides
152 (extends pythonExternalOverrides
150 (extends pythonLocalOverrides
153 (extends pythonLocalOverrides
151 (extends pythonOverrides
154 (extends pythonOverrides
152 pythonGeneratedPackages))));
155 pythonGeneratedPackages))));
153
156
154 in myPythonPackages.rhodecode-vcsserver
157 in myPythonPackages.rhodecode-vcsserver
@@ -1,812 +1,799 b''
1 # Generated by pip2nix 0.4.0
1 # Generated by pip2nix 0.4.0
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 {
4 {
5 Beaker = super.buildPythonPackage {
5 Beaker = super.buildPythonPackage {
6 name = "Beaker-1.7.0";
6 name = "Beaker-1.7.0";
7 buildInputs = with self; [];
7 buildInputs = with self; [];
8 doCheck = false;
8 doCheck = false;
9 propagatedBuildInputs = with self; [];
9 propagatedBuildInputs = with self; [];
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
12 md5 = "386be3f7fe427358881eee4622b428b3";
12 md5 = "386be3f7fe427358881eee4622b428b3";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.bsdOriginal ];
15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 };
16 };
17 };
17 };
18 Jinja2 = super.buildPythonPackage {
18 Jinja2 = super.buildPythonPackage {
19 name = "Jinja2-2.8";
19 name = "Jinja2-2.8";
20 buildInputs = with self; [];
20 buildInputs = with self; [];
21 doCheck = false;
21 doCheck = false;
22 propagatedBuildInputs = with self; [MarkupSafe];
22 propagatedBuildInputs = with self; [MarkupSafe];
23 src = fetchurl {
23 src = fetchurl {
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
26 };
26 };
27 meta = {
27 meta = {
28 license = [ pkgs.lib.licenses.bsdOriginal ];
28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 };
29 };
30 };
30 };
31 Mako = super.buildPythonPackage {
31 Mako = super.buildPythonPackage {
32 name = "Mako-1.0.6";
32 name = "Mako-1.0.6";
33 buildInputs = with self; [];
33 buildInputs = with self; [];
34 doCheck = false;
34 doCheck = false;
35 propagatedBuildInputs = with self; [MarkupSafe];
35 propagatedBuildInputs = with self; [MarkupSafe];
36 src = fetchurl {
36 src = fetchurl {
37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
38 md5 = "a28e22a339080316b2acc352b9ee631c";
38 md5 = "a28e22a339080316b2acc352b9ee631c";
39 };
39 };
40 meta = {
40 meta = {
41 license = [ pkgs.lib.licenses.mit ];
41 license = [ pkgs.lib.licenses.mit ];
42 };
42 };
43 };
43 };
44 MarkupSafe = super.buildPythonPackage {
44 MarkupSafe = super.buildPythonPackage {
45 name = "MarkupSafe-0.23";
45 name = "MarkupSafe-0.23";
46 buildInputs = with self; [];
46 buildInputs = with self; [];
47 doCheck = false;
47 doCheck = false;
48 propagatedBuildInputs = with self; [];
48 propagatedBuildInputs = with self; [];
49 src = fetchurl {
49 src = fetchurl {
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
52 };
52 };
53 meta = {
53 meta = {
54 license = [ pkgs.lib.licenses.bsdOriginal ];
54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 };
55 };
56 };
56 };
57 PasteDeploy = super.buildPythonPackage {
57 PasteDeploy = super.buildPythonPackage {
58 name = "PasteDeploy-1.5.2";
58 name = "PasteDeploy-1.5.2";
59 buildInputs = with self; [];
59 buildInputs = with self; [];
60 doCheck = false;
60 doCheck = false;
61 propagatedBuildInputs = with self; [];
61 propagatedBuildInputs = with self; [];
62 src = fetchurl {
62 src = fetchurl {
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 md5 = "352b7205c78c8de4987578d19431af3b";
64 md5 = "352b7205c78c8de4987578d19431af3b";
65 };
65 };
66 meta = {
66 meta = {
67 license = [ pkgs.lib.licenses.mit ];
67 license = [ pkgs.lib.licenses.mit ];
68 };
68 };
69 };
69 };
70 Pyro4 = super.buildPythonPackage {
71 name = "Pyro4-4.41";
72 buildInputs = with self; [];
73 doCheck = false;
74 propagatedBuildInputs = with self; [serpent];
75 src = fetchurl {
76 url = "https://pypi.python.org/packages/56/2b/89b566b4bf3e7f8ba790db2d1223852f8cb454c52cab7693dd41f608ca2a/Pyro4-4.41.tar.gz";
77 md5 = "ed69e9bfafa9c06c049a87cb0c4c2b6c";
78 };
79 meta = {
80 license = [ pkgs.lib.licenses.mit ];
81 };
82 };
83 WebOb = super.buildPythonPackage {
70 WebOb = super.buildPythonPackage {
84 name = "WebOb-1.3.1";
71 name = "WebOb-1.3.1";
85 buildInputs = with self; [];
72 buildInputs = with self; [];
86 doCheck = false;
73 doCheck = false;
87 propagatedBuildInputs = with self; [];
74 propagatedBuildInputs = with self; [];
88 src = fetchurl {
75 src = fetchurl {
89 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
76 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
90 md5 = "20918251c5726956ba8fef22d1556177";
77 md5 = "20918251c5726956ba8fef22d1556177";
91 };
78 };
92 meta = {
79 meta = {
93 license = [ pkgs.lib.licenses.mit ];
80 license = [ pkgs.lib.licenses.mit ];
94 };
81 };
95 };
82 };
96 WebTest = super.buildPythonPackage {
83 WebTest = super.buildPythonPackage {
97 name = "WebTest-1.4.3";
84 name = "WebTest-1.4.3";
98 buildInputs = with self; [];
85 buildInputs = with self; [];
99 doCheck = false;
86 doCheck = false;
100 propagatedBuildInputs = with self; [WebOb];
87 propagatedBuildInputs = with self; [WebOb];
101 src = fetchurl {
88 src = fetchurl {
102 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
89 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
103 md5 = "631ce728bed92c681a4020a36adbc353";
90 md5 = "631ce728bed92c681a4020a36adbc353";
104 };
91 };
105 meta = {
92 meta = {
106 license = [ pkgs.lib.licenses.mit ];
93 license = [ pkgs.lib.licenses.mit ];
107 };
94 };
108 };
95 };
109 backports.shutil-get-terminal-size = super.buildPythonPackage {
96 backports.shutil-get-terminal-size = super.buildPythonPackage {
110 name = "backports.shutil-get-terminal-size-1.0.0";
97 name = "backports.shutil-get-terminal-size-1.0.0";
111 buildInputs = with self; [];
98 buildInputs = with self; [];
112 doCheck = false;
99 doCheck = false;
113 propagatedBuildInputs = with self; [];
100 propagatedBuildInputs = with self; [];
114 src = fetchurl {
101 src = fetchurl {
115 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
116 md5 = "03267762480bd86b50580dc19dff3c66";
103 md5 = "03267762480bd86b50580dc19dff3c66";
117 };
104 };
118 meta = {
105 meta = {
119 license = [ pkgs.lib.licenses.mit ];
106 license = [ pkgs.lib.licenses.mit ];
120 };
107 };
121 };
108 };
122 configobj = super.buildPythonPackage {
109 configobj = super.buildPythonPackage {
123 name = "configobj-5.0.6";
110 name = "configobj-5.0.6";
124 buildInputs = with self; [];
111 buildInputs = with self; [];
125 doCheck = false;
112 doCheck = false;
126 propagatedBuildInputs = with self; [six];
113 propagatedBuildInputs = with self; [six];
127 src = fetchurl {
114 src = fetchurl {
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
115 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
116 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
130 };
117 };
131 meta = {
118 meta = {
132 license = [ pkgs.lib.licenses.bsdOriginal ];
119 license = [ pkgs.lib.licenses.bsdOriginal ];
133 };
120 };
134 };
121 };
135 cov-core = super.buildPythonPackage {
122 cov-core = super.buildPythonPackage {
136 name = "cov-core-1.15.0";
123 name = "cov-core-1.15.0";
137 buildInputs = with self; [];
124 buildInputs = with self; [];
138 doCheck = false;
125 doCheck = false;
139 propagatedBuildInputs = with self; [coverage];
126 propagatedBuildInputs = with self; [coverage];
140 src = fetchurl {
127 src = fetchurl {
141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
128 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
129 md5 = "f519d4cb4c4e52856afb14af52919fe6";
143 };
130 };
144 meta = {
131 meta = {
145 license = [ pkgs.lib.licenses.mit ];
132 license = [ pkgs.lib.licenses.mit ];
146 };
133 };
147 };
134 };
148 coverage = super.buildPythonPackage {
135 coverage = super.buildPythonPackage {
149 name = "coverage-3.7.1";
136 name = "coverage-3.7.1";
150 buildInputs = with self; [];
137 buildInputs = with self; [];
151 doCheck = false;
138 doCheck = false;
152 propagatedBuildInputs = with self; [];
139 propagatedBuildInputs = with self; [];
153 src = fetchurl {
140 src = fetchurl {
154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
141 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
142 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
156 };
143 };
157 meta = {
144 meta = {
158 license = [ pkgs.lib.licenses.bsdOriginal ];
145 license = [ pkgs.lib.licenses.bsdOriginal ];
159 };
146 };
160 };
147 };
161 decorator = super.buildPythonPackage {
148 decorator = super.buildPythonPackage {
162 name = "decorator-4.0.11";
149 name = "decorator-4.0.11";
163 buildInputs = with self; [];
150 buildInputs = with self; [];
164 doCheck = false;
151 doCheck = false;
165 propagatedBuildInputs = with self; [];
152 propagatedBuildInputs = with self; [];
166 src = fetchurl {
153 src = fetchurl {
167 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
154 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
168 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
155 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
169 };
156 };
170 meta = {
157 meta = {
171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
158 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
172 };
159 };
173 };
160 };
174 dulwich = super.buildPythonPackage {
161 dulwich = super.buildPythonPackage {
175 name = "dulwich-0.13.0";
162 name = "dulwich-0.13.0";
176 buildInputs = with self; [];
163 buildInputs = with self; [];
177 doCheck = false;
164 doCheck = false;
178 propagatedBuildInputs = with self; [];
165 propagatedBuildInputs = with self; [];
179 src = fetchurl {
166 src = fetchurl {
180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
167 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
181 md5 = "6dede0626657c2bd08f48ca1221eea91";
168 md5 = "6dede0626657c2bd08f48ca1221eea91";
182 };
169 };
183 meta = {
170 meta = {
184 license = [ pkgs.lib.licenses.gpl2Plus ];
171 license = [ pkgs.lib.licenses.gpl2Plus ];
185 };
172 };
186 };
173 };
187 enum34 = super.buildPythonPackage {
174 enum34 = super.buildPythonPackage {
188 name = "enum34-1.1.6";
175 name = "enum34-1.1.6";
189 buildInputs = with self; [];
176 buildInputs = with self; [];
190 doCheck = false;
177 doCheck = false;
191 propagatedBuildInputs = with self; [];
178 propagatedBuildInputs = with self; [];
192 src = fetchurl {
179 src = fetchurl {
193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
180 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
194 md5 = "5f13a0841a61f7fc295c514490d120d0";
181 md5 = "5f13a0841a61f7fc295c514490d120d0";
195 };
182 };
196 meta = {
183 meta = {
197 license = [ pkgs.lib.licenses.bsdOriginal ];
184 license = [ pkgs.lib.licenses.bsdOriginal ];
198 };
185 };
199 };
186 };
200 gevent = super.buildPythonPackage {
187 gevent = super.buildPythonPackage {
201 name = "gevent-1.1.2";
188 name = "gevent-1.1.2";
202 buildInputs = with self; [];
189 buildInputs = with self; [];
203 doCheck = false;
190 doCheck = false;
204 propagatedBuildInputs = with self; [greenlet];
191 propagatedBuildInputs = with self; [greenlet];
205 src = fetchurl {
192 src = fetchurl {
206 url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz";
193 url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz";
207 md5 = "bb32a2f852a4997138014d5007215c6e";
194 md5 = "bb32a2f852a4997138014d5007215c6e";
208 };
195 };
209 meta = {
196 meta = {
210 license = [ pkgs.lib.licenses.mit ];
197 license = [ pkgs.lib.licenses.mit ];
211 };
198 };
212 };
199 };
213 gprof2dot = super.buildPythonPackage {
200 gprof2dot = super.buildPythonPackage {
214 name = "gprof2dot-2016.10.13";
201 name = "gprof2dot-2016.10.13";
215 buildInputs = with self; [];
202 buildInputs = with self; [];
216 doCheck = false;
203 doCheck = false;
217 propagatedBuildInputs = with self; [];
204 propagatedBuildInputs = with self; [];
218 src = fetchurl {
205 src = fetchurl {
219 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
206 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
220 md5 = "0125401f15fd2afe1df686a76c64a4fd";
207 md5 = "0125401f15fd2afe1df686a76c64a4fd";
221 };
208 };
222 meta = {
209 meta = {
223 license = [ { fullName = "LGPL"; } ];
210 license = [ { fullName = "LGPL"; } ];
224 };
211 };
225 };
212 };
226 greenlet = super.buildPythonPackage {
213 greenlet = super.buildPythonPackage {
227 name = "greenlet-0.4.10";
214 name = "greenlet-0.4.10";
228 buildInputs = with self; [];
215 buildInputs = with self; [];
229 doCheck = false;
216 doCheck = false;
230 propagatedBuildInputs = with self; [];
217 propagatedBuildInputs = with self; [];
231 src = fetchurl {
218 src = fetchurl {
232 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
219 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
233 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
220 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
234 };
221 };
235 meta = {
222 meta = {
236 license = [ pkgs.lib.licenses.mit ];
223 license = [ pkgs.lib.licenses.mit ];
237 };
224 };
238 };
225 };
239 gunicorn = super.buildPythonPackage {
226 gunicorn = super.buildPythonPackage {
240 name = "gunicorn-19.6.0";
227 name = "gunicorn-19.6.0";
241 buildInputs = with self; [];
228 buildInputs = with self; [];
242 doCheck = false;
229 doCheck = false;
243 propagatedBuildInputs = with self; [];
230 propagatedBuildInputs = with self; [];
244 src = fetchurl {
231 src = fetchurl {
245 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
232 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
246 md5 = "338e5e8a83ea0f0625f768dba4597530";
233 md5 = "338e5e8a83ea0f0625f768dba4597530";
247 };
234 };
248 meta = {
235 meta = {
249 license = [ pkgs.lib.licenses.mit ];
236 license = [ pkgs.lib.licenses.mit ];
250 };
237 };
251 };
238 };
239 hg-evolve = super.buildPythonPackage {
240 name = "hg-evolve-6.0.1";
241 buildInputs = with self; [];
242 doCheck = false;
243 propagatedBuildInputs = with self; [];
244 src = fetchurl {
245 url = "https://pypi.python.org/packages/c4/31/0673a5657c201ebb46e63c4bba8668f96cf5d7a8a0f8a91892d022ccc32b/hg-evolve-6.0.1.tar.gz";
246 md5 = "9c1ce7ac24792abc0eedee09a3344d06";
247 };
248 meta = {
249 license = [ { fullName = "GPLv2+"; } ];
250 };
251 };
252 hgsubversion = super.buildPythonPackage {
252 hgsubversion = super.buildPythonPackage {
253 name = "hgsubversion-1.8.6";
253 name = "hgsubversion-1.8.6";
254 buildInputs = with self; [];
254 buildInputs = with self; [];
255 doCheck = false;
255 doCheck = false;
256 propagatedBuildInputs = with self; [mercurial subvertpy];
256 propagatedBuildInputs = with self; [mercurial subvertpy];
257 src = fetchurl {
257 src = fetchurl {
258 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
258 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
259 md5 = "9310cb266031cf8d0779885782a84a5b";
259 md5 = "9310cb266031cf8d0779885782a84a5b";
260 };
260 };
261 meta = {
261 meta = {
262 license = [ pkgs.lib.licenses.gpl1 ];
262 license = [ pkgs.lib.licenses.gpl1 ];
263 };
263 };
264 };
264 };
265 infrae.cache = super.buildPythonPackage {
265 infrae.cache = super.buildPythonPackage {
266 name = "infrae.cache-1.0.1";
266 name = "infrae.cache-1.0.1";
267 buildInputs = with self; [];
267 buildInputs = with self; [];
268 doCheck = false;
268 doCheck = false;
269 propagatedBuildInputs = with self; [Beaker repoze.lru];
269 propagatedBuildInputs = with self; [Beaker repoze.lru];
270 src = fetchurl {
270 src = fetchurl {
271 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
271 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
272 md5 = "b09076a766747e6ed2a755cc62088e32";
272 md5 = "b09076a766747e6ed2a755cc62088e32";
273 };
273 };
274 meta = {
274 meta = {
275 license = [ pkgs.lib.licenses.zpt21 ];
275 license = [ pkgs.lib.licenses.zpt21 ];
276 };
276 };
277 };
277 };
278 ipdb = super.buildPythonPackage {
278 ipdb = super.buildPythonPackage {
279 name = "ipdb-0.10.1";
279 name = "ipdb-0.10.1";
280 buildInputs = with self; [];
280 buildInputs = with self; [];
281 doCheck = false;
281 doCheck = false;
282 propagatedBuildInputs = with self; [ipython setuptools];
282 propagatedBuildInputs = with self; [ipython setuptools];
283 src = fetchurl {
283 src = fetchurl {
284 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
284 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
285 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
285 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
286 };
286 };
287 meta = {
287 meta = {
288 license = [ pkgs.lib.licenses.bsdOriginal ];
288 license = [ pkgs.lib.licenses.bsdOriginal ];
289 };
289 };
290 };
290 };
291 ipython = super.buildPythonPackage {
291 ipython = super.buildPythonPackage {
292 name = "ipython-5.1.0";
292 name = "ipython-5.1.0";
293 buildInputs = with self; [];
293 buildInputs = with self; [];
294 doCheck = false;
294 doCheck = false;
295 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
295 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
296 src = fetchurl {
296 src = fetchurl {
297 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
297 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
298 md5 = "47c8122420f65b58784cb4b9b4af35e3";
298 md5 = "47c8122420f65b58784cb4b9b4af35e3";
299 };
299 };
300 meta = {
300 meta = {
301 license = [ pkgs.lib.licenses.bsdOriginal ];
301 license = [ pkgs.lib.licenses.bsdOriginal ];
302 };
302 };
303 };
303 };
304 ipython-genutils = super.buildPythonPackage {
304 ipython-genutils = super.buildPythonPackage {
305 name = "ipython-genutils-0.1.0";
305 name = "ipython-genutils-0.2.0";
306 buildInputs = with self; [];
306 buildInputs = with self; [];
307 doCheck = false;
307 doCheck = false;
308 propagatedBuildInputs = with self; [];
308 propagatedBuildInputs = with self; [];
309 src = fetchurl {
309 src = fetchurl {
310 url = "https://pypi.python.org/packages/71/b7/a64c71578521606edbbce15151358598f3dfb72a3431763edc2baf19e71f/ipython_genutils-0.1.0.tar.gz";
310 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
311 md5 = "9a8afbe0978adbcbfcb3b35b2d015a56";
311 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
312 };
312 };
313 meta = {
313 meta = {
314 license = [ pkgs.lib.licenses.bsdOriginal ];
314 license = [ pkgs.lib.licenses.bsdOriginal ];
315 };
315 };
316 };
316 };
317 mercurial = super.buildPythonPackage {
317 mercurial = super.buildPythonPackage {
318 name = "mercurial-4.1.2";
318 name = "mercurial-4.1.2";
319 buildInputs = with self; [];
319 buildInputs = with self; [];
320 doCheck = false;
320 doCheck = false;
321 propagatedBuildInputs = with self; [];
321 propagatedBuildInputs = with self; [];
322 src = fetchurl {
322 src = fetchurl {
323 url = "https://pypi.python.org/packages/88/c1/f0501fd67f5e69346da41ee0bd7b2619ce4bbc9854bb645074c418b9941f/mercurial-4.1.2.tar.gz";
323 url = "https://pypi.python.org/packages/88/c1/f0501fd67f5e69346da41ee0bd7b2619ce4bbc9854bb645074c418b9941f/mercurial-4.1.2.tar.gz";
324 md5 = "934c99808bdc8385e074b902d59b0d93";
324 md5 = "934c99808bdc8385e074b902d59b0d93";
325 };
325 };
326 meta = {
326 meta = {
327 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
327 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
328 };
328 };
329 };
329 };
330 mock = super.buildPythonPackage {
330 mock = super.buildPythonPackage {
331 name = "mock-1.0.1";
331 name = "mock-1.0.1";
332 buildInputs = with self; [];
332 buildInputs = with self; [];
333 doCheck = false;
333 doCheck = false;
334 propagatedBuildInputs = with self; [];
334 propagatedBuildInputs = with self; [];
335 src = fetchurl {
335 src = fetchurl {
336 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
336 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
337 md5 = "869f08d003c289a97c1a6610faf5e913";
337 md5 = "869f08d003c289a97c1a6610faf5e913";
338 };
338 };
339 meta = {
339 meta = {
340 license = [ pkgs.lib.licenses.bsdOriginal ];
340 license = [ pkgs.lib.licenses.bsdOriginal ];
341 };
341 };
342 };
342 };
343 msgpack-python = super.buildPythonPackage {
343 msgpack-python = super.buildPythonPackage {
344 name = "msgpack-python-0.4.8";
344 name = "msgpack-python-0.4.8";
345 buildInputs = with self; [];
345 buildInputs = with self; [];
346 doCheck = false;
346 doCheck = false;
347 propagatedBuildInputs = with self; [];
347 propagatedBuildInputs = with self; [];
348 src = fetchurl {
348 src = fetchurl {
349 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
349 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
350 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
350 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
351 };
351 };
352 meta = {
352 meta = {
353 license = [ pkgs.lib.licenses.asl20 ];
353 license = [ pkgs.lib.licenses.asl20 ];
354 };
354 };
355 };
355 };
356 pathlib2 = super.buildPythonPackage {
356 pathlib2 = super.buildPythonPackage {
357 name = "pathlib2-2.1.0";
357 name = "pathlib2-2.1.0";
358 buildInputs = with self; [];
358 buildInputs = with self; [];
359 doCheck = false;
359 doCheck = false;
360 propagatedBuildInputs = with self; [six];
360 propagatedBuildInputs = with self; [six];
361 src = fetchurl {
361 src = fetchurl {
362 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
362 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
363 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
363 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
364 };
364 };
365 meta = {
365 meta = {
366 license = [ pkgs.lib.licenses.mit ];
366 license = [ pkgs.lib.licenses.mit ];
367 };
367 };
368 };
368 };
369 pexpect = super.buildPythonPackage {
369 pexpect = super.buildPythonPackage {
370 name = "pexpect-4.2.1";
370 name = "pexpect-4.2.1";
371 buildInputs = with self; [];
371 buildInputs = with self; [];
372 doCheck = false;
372 doCheck = false;
373 propagatedBuildInputs = with self; [ptyprocess];
373 propagatedBuildInputs = with self; [ptyprocess];
374 src = fetchurl {
374 src = fetchurl {
375 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
375 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
376 md5 = "3694410001a99dff83f0b500a1ca1c95";
376 md5 = "3694410001a99dff83f0b500a1ca1c95";
377 };
377 };
378 meta = {
378 meta = {
379 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
379 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
380 };
380 };
381 };
381 };
382 pickleshare = super.buildPythonPackage {
382 pickleshare = super.buildPythonPackage {
383 name = "pickleshare-0.7.4";
383 name = "pickleshare-0.7.4";
384 buildInputs = with self; [];
384 buildInputs = with self; [];
385 doCheck = false;
385 doCheck = false;
386 propagatedBuildInputs = with self; [pathlib2];
386 propagatedBuildInputs = with self; [pathlib2];
387 src = fetchurl {
387 src = fetchurl {
388 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
388 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
389 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
389 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
390 };
390 };
391 meta = {
391 meta = {
392 license = [ pkgs.lib.licenses.mit ];
392 license = [ pkgs.lib.licenses.mit ];
393 };
393 };
394 };
394 };
395 prompt-toolkit = super.buildPythonPackage {
395 prompt-toolkit = super.buildPythonPackage {
396 name = "prompt-toolkit-1.0.9";
396 name = "prompt-toolkit-1.0.14";
397 buildInputs = with self; [];
397 buildInputs = with self; [];
398 doCheck = false;
398 doCheck = false;
399 propagatedBuildInputs = with self; [six wcwidth];
399 propagatedBuildInputs = with self; [six wcwidth];
400 src = fetchurl {
400 src = fetchurl {
401 url = "https://pypi.python.org/packages/83/14/5ac258da6c530eca02852ee25c7a9ff3ca78287bb4c198d0d0055845d856/prompt_toolkit-1.0.9.tar.gz";
401 url = "https://pypi.python.org/packages/55/56/8c39509b614bda53e638b7500f12577d663ac1b868aef53426fc6a26c3f5/prompt_toolkit-1.0.14.tar.gz";
402 md5 = "a39f91a54308fb7446b1a421c11f227c";
402 md5 = "f24061ae133ed32c6b764e92bd48c496";
403 };
403 };
404 meta = {
404 meta = {
405 license = [ pkgs.lib.licenses.bsdOriginal ];
405 license = [ pkgs.lib.licenses.bsdOriginal ];
406 };
406 };
407 };
407 };
408 ptyprocess = super.buildPythonPackage {
408 ptyprocess = super.buildPythonPackage {
409 name = "ptyprocess-0.5.1";
409 name = "ptyprocess-0.5.1";
410 buildInputs = with self; [];
410 buildInputs = with self; [];
411 doCheck = false;
411 doCheck = false;
412 propagatedBuildInputs = with self; [];
412 propagatedBuildInputs = with self; [];
413 src = fetchurl {
413 src = fetchurl {
414 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
414 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
415 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
415 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
416 };
416 };
417 meta = {
417 meta = {
418 license = [ ];
418 license = [ ];
419 };
419 };
420 };
420 };
421 py = super.buildPythonPackage {
421 py = super.buildPythonPackage {
422 name = "py-1.4.31";
422 name = "py-1.4.31";
423 buildInputs = with self; [];
423 buildInputs = with self; [];
424 doCheck = false;
424 doCheck = false;
425 propagatedBuildInputs = with self; [];
425 propagatedBuildInputs = with self; [];
426 src = fetchurl {
426 src = fetchurl {
427 url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz";
427 url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz";
428 md5 = "5d2c63c56dc3f2115ec35c066ecd582b";
428 md5 = "5d2c63c56dc3f2115ec35c066ecd582b";
429 };
429 };
430 meta = {
430 meta = {
431 license = [ pkgs.lib.licenses.mit ];
431 license = [ pkgs.lib.licenses.mit ];
432 };
432 };
433 };
433 };
434 pygments = super.buildPythonPackage {
434 pygments = super.buildPythonPackage {
435 name = "pygments-2.2.0";
435 name = "pygments-2.2.0";
436 buildInputs = with self; [];
436 buildInputs = with self; [];
437 doCheck = false;
437 doCheck = false;
438 propagatedBuildInputs = with self; [];
438 propagatedBuildInputs = with self; [];
439 src = fetchurl {
439 src = fetchurl {
440 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
440 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
441 md5 = "13037baca42f16917cbd5ad2fab50844";
441 md5 = "13037baca42f16917cbd5ad2fab50844";
442 };
442 };
443 meta = {
443 meta = {
444 license = [ pkgs.lib.licenses.bsdOriginal ];
444 license = [ pkgs.lib.licenses.bsdOriginal ];
445 };
445 };
446 };
446 };
447 pyramid = super.buildPythonPackage {
447 pyramid = super.buildPythonPackage {
448 name = "pyramid-1.7.4";
448 name = "pyramid-1.7.4";
449 buildInputs = with self; [];
449 buildInputs = with self; [];
450 doCheck = false;
450 doCheck = false;
451 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
451 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
452 src = fetchurl {
452 src = fetchurl {
453 url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz";
453 url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz";
454 md5 = "6ef1dfdcff9136d04490410757c4c446";
454 md5 = "6ef1dfdcff9136d04490410757c4c446";
455 };
455 };
456 meta = {
456 meta = {
457 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
457 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
458 };
458 };
459 };
459 };
460 pyramid-jinja2 = super.buildPythonPackage {
460 pyramid-jinja2 = super.buildPythonPackage {
461 name = "pyramid-jinja2-2.5";
461 name = "pyramid-jinja2-2.5";
462 buildInputs = with self; [];
462 buildInputs = with self; [];
463 doCheck = false;
463 doCheck = false;
464 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
464 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
465 src = fetchurl {
465 src = fetchurl {
466 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
466 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
467 md5 = "07cb6547204ac5e6f0b22a954ccee928";
467 md5 = "07cb6547204ac5e6f0b22a954ccee928";
468 };
468 };
469 meta = {
469 meta = {
470 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
470 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
471 };
471 };
472 };
472 };
473 pyramid-mako = super.buildPythonPackage {
473 pyramid-mako = super.buildPythonPackage {
474 name = "pyramid-mako-1.0.2";
474 name = "pyramid-mako-1.0.2";
475 buildInputs = with self; [];
475 buildInputs = with self; [];
476 doCheck = false;
476 doCheck = false;
477 propagatedBuildInputs = with self; [pyramid Mako];
477 propagatedBuildInputs = with self; [pyramid Mako];
478 src = fetchurl {
478 src = fetchurl {
479 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
479 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
480 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
480 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
481 };
481 };
482 meta = {
482 meta = {
483 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
483 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
484 };
484 };
485 };
485 };
486 pytest = super.buildPythonPackage {
486 pytest = super.buildPythonPackage {
487 name = "pytest-3.0.5";
487 name = "pytest-3.0.5";
488 buildInputs = with self; [];
488 buildInputs = with self; [];
489 doCheck = false;
489 doCheck = false;
490 propagatedBuildInputs = with self; [py];
490 propagatedBuildInputs = with self; [py];
491 src = fetchurl {
491 src = fetchurl {
492 url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz";
492 url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz";
493 md5 = "cefd527b59332688bf5db4a10aa8a7cb";
493 md5 = "cefd527b59332688bf5db4a10aa8a7cb";
494 };
494 };
495 meta = {
495 meta = {
496 license = [ pkgs.lib.licenses.mit ];
496 license = [ pkgs.lib.licenses.mit ];
497 };
497 };
498 };
498 };
499 pytest-catchlog = super.buildPythonPackage {
499 pytest-catchlog = super.buildPythonPackage {
500 name = "pytest-catchlog-1.2.2";
500 name = "pytest-catchlog-1.2.2";
501 buildInputs = with self; [];
501 buildInputs = with self; [];
502 doCheck = false;
502 doCheck = false;
503 propagatedBuildInputs = with self; [py pytest];
503 propagatedBuildInputs = with self; [py pytest];
504 src = fetchurl {
504 src = fetchurl {
505 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
505 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
506 md5 = "09d890c54c7456c818102b7ff8c182c8";
506 md5 = "09d890c54c7456c818102b7ff8c182c8";
507 };
507 };
508 meta = {
508 meta = {
509 license = [ pkgs.lib.licenses.mit ];
509 license = [ pkgs.lib.licenses.mit ];
510 };
510 };
511 };
511 };
512 pytest-cov = super.buildPythonPackage {
512 pytest-cov = super.buildPythonPackage {
513 name = "pytest-cov-2.4.0";
513 name = "pytest-cov-2.4.0";
514 buildInputs = with self; [];
514 buildInputs = with self; [];
515 doCheck = false;
515 doCheck = false;
516 propagatedBuildInputs = with self; [pytest coverage];
516 propagatedBuildInputs = with self; [pytest coverage];
517 src = fetchurl {
517 src = fetchurl {
518 url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz";
518 url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz";
519 md5 = "2fda09677d232acc99ec1b3c5831e33f";
519 md5 = "2fda09677d232acc99ec1b3c5831e33f";
520 };
520 };
521 meta = {
521 meta = {
522 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
522 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
523 };
523 };
524 };
524 };
525 pytest-profiling = super.buildPythonPackage {
525 pytest-profiling = super.buildPythonPackage {
526 name = "pytest-profiling-1.2.2";
526 name = "pytest-profiling-1.2.2";
527 buildInputs = with self; [];
527 buildInputs = with self; [];
528 doCheck = false;
528 doCheck = false;
529 propagatedBuildInputs = with self; [six pytest gprof2dot];
529 propagatedBuildInputs = with self; [six pytest gprof2dot];
530 src = fetchurl {
530 src = fetchurl {
531 url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz";
531 url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz";
532 md5 = "0a16d7dda2d23b91e9730fa4558cf728";
532 md5 = "0a16d7dda2d23b91e9730fa4558cf728";
533 };
533 };
534 meta = {
534 meta = {
535 license = [ pkgs.lib.licenses.mit ];
535 license = [ pkgs.lib.licenses.mit ];
536 };
536 };
537 };
537 };
538 pytest-runner = super.buildPythonPackage {
538 pytest-runner = super.buildPythonPackage {
539 name = "pytest-runner-2.9";
539 name = "pytest-runner-2.9";
540 buildInputs = with self; [];
540 buildInputs = with self; [];
541 doCheck = false;
541 doCheck = false;
542 propagatedBuildInputs = with self; [];
542 propagatedBuildInputs = with self; [];
543 src = fetchurl {
543 src = fetchurl {
544 url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz";
544 url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz";
545 md5 = "2212a2e34404b0960b2fdc2c469247b2";
545 md5 = "2212a2e34404b0960b2fdc2c469247b2";
546 };
546 };
547 meta = {
547 meta = {
548 license = [ pkgs.lib.licenses.mit ];
548 license = [ pkgs.lib.licenses.mit ];
549 };
549 };
550 };
550 };
551 pytest-sugar = super.buildPythonPackage {
551 pytest-sugar = super.buildPythonPackage {
552 name = "pytest-sugar-0.7.1";
552 name = "pytest-sugar-0.7.1";
553 buildInputs = with self; [];
553 buildInputs = with self; [];
554 doCheck = false;
554 doCheck = false;
555 propagatedBuildInputs = with self; [pytest termcolor];
555 propagatedBuildInputs = with self; [pytest termcolor];
556 src = fetchurl {
556 src = fetchurl {
557 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
557 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
558 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
558 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
559 };
559 };
560 meta = {
560 meta = {
561 license = [ pkgs.lib.licenses.bsdOriginal ];
561 license = [ pkgs.lib.licenses.bsdOriginal ];
562 };
562 };
563 };
563 };
564 pytest-timeout = super.buildPythonPackage {
564 pytest-timeout = super.buildPythonPackage {
565 name = "pytest-timeout-1.2.0";
565 name = "pytest-timeout-1.2.0";
566 buildInputs = with self; [];
566 buildInputs = with self; [];
567 doCheck = false;
567 doCheck = false;
568 propagatedBuildInputs = with self; [pytest];
568 propagatedBuildInputs = with self; [pytest];
569 src = fetchurl {
569 src = fetchurl {
570 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
570 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
571 md5 = "83607d91aa163562c7ee835da57d061d";
571 md5 = "83607d91aa163562c7ee835da57d061d";
572 };
572 };
573 meta = {
573 meta = {
574 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
574 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
575 };
575 };
576 };
576 };
577 repoze.lru = super.buildPythonPackage {
577 repoze.lru = super.buildPythonPackage {
578 name = "repoze.lru-0.6";
578 name = "repoze.lru-0.6";
579 buildInputs = with self; [];
579 buildInputs = with self; [];
580 doCheck = false;
580 doCheck = false;
581 propagatedBuildInputs = with self; [];
581 propagatedBuildInputs = with self; [];
582 src = fetchurl {
582 src = fetchurl {
583 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
583 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
584 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
584 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
585 };
585 };
586 meta = {
586 meta = {
587 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
587 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
588 };
588 };
589 };
589 };
590 rhodecode-vcsserver = super.buildPythonPackage {
590 rhodecode-vcsserver = super.buildPythonPackage {
591 name = "rhodecode-vcsserver-4.7.2";
591 name = "rhodecode-vcsserver-4.8.0";
592 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
592 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
593 doCheck = true;
593 doCheck = true;
594 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress Pyro4 serpent pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
594 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
595 src = ./.;
595 src = ./.;
596 meta = {
596 meta = {
597 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
597 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
598 };
598 };
599 };
599 };
600 serpent = super.buildPythonPackage {
601 name = "serpent-1.15";
602 buildInputs = with self; [];
603 doCheck = false;
604 propagatedBuildInputs = with self; [];
605 src = fetchurl {
606 url = "https://pypi.python.org/packages/7b/38/b2b27673a882ff2ea5871bb3e3e6b496ebbaafd1612e51990ffb158b9254/serpent-1.15.tar.gz";
607 md5 = "e27b1aad5c218e16442f52abb7c7053a";
608 };
609 meta = {
610 license = [ pkgs.lib.licenses.mit ];
611 };
612 };
613 setuptools = super.buildPythonPackage {
600 setuptools = super.buildPythonPackage {
614 name = "setuptools-30.1.0";
601 name = "setuptools-30.1.0";
615 buildInputs = with self; [];
602 buildInputs = with self; [];
616 doCheck = false;
603 doCheck = false;
617 propagatedBuildInputs = with self; [];
604 propagatedBuildInputs = with self; [];
618 src = fetchurl {
605 src = fetchurl {
619 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
606 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
620 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
607 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
621 };
608 };
622 meta = {
609 meta = {
623 license = [ pkgs.lib.licenses.mit ];
610 license = [ pkgs.lib.licenses.mit ];
624 };
611 };
625 };
612 };
626 simplegeneric = super.buildPythonPackage {
613 simplegeneric = super.buildPythonPackage {
627 name = "simplegeneric-0.8.1";
614 name = "simplegeneric-0.8.1";
628 buildInputs = with self; [];
615 buildInputs = with self; [];
629 doCheck = false;
616 doCheck = false;
630 propagatedBuildInputs = with self; [];
617 propagatedBuildInputs = with self; [];
631 src = fetchurl {
618 src = fetchurl {
632 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
619 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
633 md5 = "f9c1fab00fd981be588fc32759f474e3";
620 md5 = "f9c1fab00fd981be588fc32759f474e3";
634 };
621 };
635 meta = {
622 meta = {
636 license = [ pkgs.lib.licenses.zpt21 ];
623 license = [ pkgs.lib.licenses.zpt21 ];
637 };
624 };
638 };
625 };
639 simplejson = super.buildPythonPackage {
626 simplejson = super.buildPythonPackage {
640 name = "simplejson-3.7.2";
627 name = "simplejson-3.7.2";
641 buildInputs = with self; [];
628 buildInputs = with self; [];
642 doCheck = false;
629 doCheck = false;
643 propagatedBuildInputs = with self; [];
630 propagatedBuildInputs = with self; [];
644 src = fetchurl {
631 src = fetchurl {
645 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
632 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
646 md5 = "a5fc7d05d4cb38492285553def5d4b46";
633 md5 = "a5fc7d05d4cb38492285553def5d4b46";
647 };
634 };
648 meta = {
635 meta = {
649 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
636 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
650 };
637 };
651 };
638 };
652 six = super.buildPythonPackage {
639 six = super.buildPythonPackage {
653 name = "six-1.9.0";
640 name = "six-1.9.0";
654 buildInputs = with self; [];
641 buildInputs = with self; [];
655 doCheck = false;
642 doCheck = false;
656 propagatedBuildInputs = with self; [];
643 propagatedBuildInputs = with self; [];
657 src = fetchurl {
644 src = fetchurl {
658 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
645 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
659 md5 = "476881ef4012262dfc8adc645ee786c4";
646 md5 = "476881ef4012262dfc8adc645ee786c4";
660 };
647 };
661 meta = {
648 meta = {
662 license = [ pkgs.lib.licenses.mit ];
649 license = [ pkgs.lib.licenses.mit ];
663 };
650 };
664 };
651 };
665 subprocess32 = super.buildPythonPackage {
652 subprocess32 = super.buildPythonPackage {
666 name = "subprocess32-3.2.6";
653 name = "subprocess32-3.2.6";
667 buildInputs = with self; [];
654 buildInputs = with self; [];
668 doCheck = false;
655 doCheck = false;
669 propagatedBuildInputs = with self; [];
656 propagatedBuildInputs = with self; [];
670 src = fetchurl {
657 src = fetchurl {
671 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
658 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
672 md5 = "754c5ab9f533e764f931136974b618f1";
659 md5 = "754c5ab9f533e764f931136974b618f1";
673 };
660 };
674 meta = {
661 meta = {
675 license = [ pkgs.lib.licenses.psfl ];
662 license = [ pkgs.lib.licenses.psfl ];
676 };
663 };
677 };
664 };
678 subvertpy = super.buildPythonPackage {
665 subvertpy = super.buildPythonPackage {
679 name = "subvertpy-0.9.3";
666 name = "subvertpy-0.9.3";
680 buildInputs = with self; [];
667 buildInputs = with self; [];
681 doCheck = false;
668 doCheck = false;
682 propagatedBuildInputs = with self; [];
669 propagatedBuildInputs = with self; [];
683 src = fetchurl {
670 src = fetchurl {
684 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
671 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
685 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
672 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
686 };
673 };
687 meta = {
674 meta = {
688 license = [ pkgs.lib.licenses.lgpl21Plus ];
675 license = [ pkgs.lib.licenses.lgpl21Plus ];
689 };
676 };
690 };
677 };
691 termcolor = super.buildPythonPackage {
678 termcolor = super.buildPythonPackage {
692 name = "termcolor-1.1.0";
679 name = "termcolor-1.1.0";
693 buildInputs = with self; [];
680 buildInputs = with self; [];
694 doCheck = false;
681 doCheck = false;
695 propagatedBuildInputs = with self; [];
682 propagatedBuildInputs = with self; [];
696 src = fetchurl {
683 src = fetchurl {
697 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
684 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
698 md5 = "043e89644f8909d462fbbfa511c768df";
685 md5 = "043e89644f8909d462fbbfa511c768df";
699 };
686 };
700 meta = {
687 meta = {
701 license = [ pkgs.lib.licenses.mit ];
688 license = [ pkgs.lib.licenses.mit ];
702 };
689 };
703 };
690 };
704 traitlets = super.buildPythonPackage {
691 traitlets = super.buildPythonPackage {
705 name = "traitlets-4.3.1";
692 name = "traitlets-4.3.2";
706 buildInputs = with self; [];
693 buildInputs = with self; [];
707 doCheck = false;
694 doCheck = false;
708 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
695 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
709 src = fetchurl {
696 src = fetchurl {
710 url = "https://pypi.python.org/packages/b1/d6/5b5aa6d5c474691909b91493da1e8972e309c9f01ecfe4aeafd272eb3234/traitlets-4.3.1.tar.gz";
697 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
711 md5 = "dd0b1b6e5d31ce446d55a4b5e5083c98";
698 md5 = "3068663f2f38fd939a9eb3a500ccc154";
712 };
699 };
713 meta = {
700 meta = {
714 license = [ pkgs.lib.licenses.bsdOriginal ];
701 license = [ pkgs.lib.licenses.bsdOriginal ];
715 };
702 };
716 };
703 };
717 translationstring = super.buildPythonPackage {
704 translationstring = super.buildPythonPackage {
718 name = "translationstring-1.3";
705 name = "translationstring-1.3";
719 buildInputs = with self; [];
706 buildInputs = with self; [];
720 doCheck = false;
707 doCheck = false;
721 propagatedBuildInputs = with self; [];
708 propagatedBuildInputs = with self; [];
722 src = fetchurl {
709 src = fetchurl {
723 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
710 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
724 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
711 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
725 };
712 };
726 meta = {
713 meta = {
727 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
714 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
728 };
715 };
729 };
716 };
730 venusian = super.buildPythonPackage {
717 venusian = super.buildPythonPackage {
731 name = "venusian-1.0";
718 name = "venusian-1.0";
732 buildInputs = with self; [];
719 buildInputs = with self; [];
733 doCheck = false;
720 doCheck = false;
734 propagatedBuildInputs = with self; [];
721 propagatedBuildInputs = with self; [];
735 src = fetchurl {
722 src = fetchurl {
736 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
723 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
737 md5 = "dccf2eafb7113759d60c86faf5538756";
724 md5 = "dccf2eafb7113759d60c86faf5538756";
738 };
725 };
739 meta = {
726 meta = {
740 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
727 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
741 };
728 };
742 };
729 };
743 waitress = super.buildPythonPackage {
730 waitress = super.buildPythonPackage {
744 name = "waitress-1.0.1";
731 name = "waitress-1.0.1";
745 buildInputs = with self; [];
732 buildInputs = with self; [];
746 doCheck = false;
733 doCheck = false;
747 propagatedBuildInputs = with self; [];
734 propagatedBuildInputs = with self; [];
748 src = fetchurl {
735 src = fetchurl {
749 url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz";
736 url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz";
750 md5 = "dda92358a7569669086155923a46e57c";
737 md5 = "dda92358a7569669086155923a46e57c";
751 };
738 };
752 meta = {
739 meta = {
753 license = [ pkgs.lib.licenses.zpt21 ];
740 license = [ pkgs.lib.licenses.zpt21 ];
754 };
741 };
755 };
742 };
756 wcwidth = super.buildPythonPackage {
743 wcwidth = super.buildPythonPackage {
757 name = "wcwidth-0.1.7";
744 name = "wcwidth-0.1.7";
758 buildInputs = with self; [];
745 buildInputs = with self; [];
759 doCheck = false;
746 doCheck = false;
760 propagatedBuildInputs = with self; [];
747 propagatedBuildInputs = with self; [];
761 src = fetchurl {
748 src = fetchurl {
762 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
749 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
763 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
750 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
764 };
751 };
765 meta = {
752 meta = {
766 license = [ pkgs.lib.licenses.mit ];
753 license = [ pkgs.lib.licenses.mit ];
767 };
754 };
768 };
755 };
769 wheel = super.buildPythonPackage {
756 wheel = super.buildPythonPackage {
770 name = "wheel-0.29.0";
757 name = "wheel-0.29.0";
771 buildInputs = with self; [];
758 buildInputs = with self; [];
772 doCheck = false;
759 doCheck = false;
773 propagatedBuildInputs = with self; [];
760 propagatedBuildInputs = with self; [];
774 src = fetchurl {
761 src = fetchurl {
775 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
762 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
776 md5 = "555a67e4507cedee23a0deb9651e452f";
763 md5 = "555a67e4507cedee23a0deb9651e452f";
777 };
764 };
778 meta = {
765 meta = {
779 license = [ pkgs.lib.licenses.mit ];
766 license = [ pkgs.lib.licenses.mit ];
780 };
767 };
781 };
768 };
782 zope.deprecation = super.buildPythonPackage {
769 zope.deprecation = super.buildPythonPackage {
783 name = "zope.deprecation-4.1.2";
770 name = "zope.deprecation-4.1.2";
784 buildInputs = with self; [];
771 buildInputs = with self; [];
785 doCheck = false;
772 doCheck = false;
786 propagatedBuildInputs = with self; [setuptools];
773 propagatedBuildInputs = with self; [setuptools];
787 src = fetchurl {
774 src = fetchurl {
788 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
775 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
789 md5 = "e9a663ded58f4f9f7881beb56cae2782";
776 md5 = "e9a663ded58f4f9f7881beb56cae2782";
790 };
777 };
791 meta = {
778 meta = {
792 license = [ pkgs.lib.licenses.zpt21 ];
779 license = [ pkgs.lib.licenses.zpt21 ];
793 };
780 };
794 };
781 };
795 zope.interface = super.buildPythonPackage {
782 zope.interface = super.buildPythonPackage {
796 name = "zope.interface-4.1.3";
783 name = "zope.interface-4.1.3";
797 buildInputs = with self; [];
784 buildInputs = with self; [];
798 doCheck = false;
785 doCheck = false;
799 propagatedBuildInputs = with self; [setuptools];
786 propagatedBuildInputs = with self; [setuptools];
800 src = fetchurl {
787 src = fetchurl {
801 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
788 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
802 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
789 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
803 };
790 };
804 meta = {
791 meta = {
805 license = [ pkgs.lib.licenses.zpt21 ];
792 license = [ pkgs.lib.licenses.zpt21 ];
806 };
793 };
807 };
794 };
808
795
809 ### Test requirements
796 ### Test requirements
810
797
811
798
812 }
799 }
@@ -1,43 +1,40 b''
1 ## core
1 ## core
2 setuptools==30.1.0
2 setuptools==30.1.0
3
3
4 Beaker==1.7.0
4 Beaker==1.7.0
5 configobj==5.0.6
5 configobj==5.0.6
6 decorator==4.0.11
6 decorator==4.0.11
7 dulwich==0.13.0
7 dulwich==0.13.0
8 hgsubversion==1.8.6
8 hgsubversion==1.8.6
9 hg-evolve==6.0.1
9 infrae.cache==1.0.1
10 infrae.cache==1.0.1
10 mercurial==4.1.2
11 mercurial==4.1.2
11 msgpack-python==0.4.8
12 msgpack-python==0.4.8
12 pyramid-jinja2==2.5
13 pyramid-jinja2==2.5
13 pyramid==1.7.4
14 pyramid==1.7.4
14 pyramid-mako==1.0.2
15 pyramid-mako==1.0.2
15 repoze.lru==0.6
16 repoze.lru==0.6
16 simplejson==3.7.2
17 simplejson==3.7.2
17 subprocess32==3.2.6
18 subprocess32==3.2.6
18
19
19 # Custom subvertpy that is not available on pypi.
20 # Custom subvertpy that is not available on pypi.
20 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
21 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
21
22
22 six==1.9.0
23 six==1.9.0
23 translationstring==1.3
24 translationstring==1.3
24 WebOb==1.3.1
25 WebOb==1.3.1
25 wheel==0.29.0
26 wheel==0.29.0
26 zope.deprecation==4.1.2
27 zope.deprecation==4.1.2
27 zope.interface==4.1.3
28 zope.interface==4.1.3
28
29
29 ## debug
30 ## debug
30 ipdb==0.10.1
31 ipdb==0.10.1
31 ipython==5.1.0
32 ipython==5.1.0
32 # http servers
33 # http servers
33 gevent==1.1.2
34 gevent==1.1.2
34 greenlet==0.4.10
35 greenlet==0.4.10
35 gunicorn==19.6.0
36 gunicorn==19.6.0
36 waitress==1.0.1
37 waitress==1.0.1
37
38
38 # Pyro/Deprecated TODO(Marcink): remove in 4.7 release.
39 Pyro4==4.41
40 serpent==1.15
41
42 ## test related requirements
39 ## test related requirements
43 -r requirements_test.txt
40 -r requirements_test.txt
@@ -1,93 +1,87 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer - configuration #
2 # RhodeCode VCSServer - configuration #
3 # #
3 # #
4 ################################################################################
4 ################################################################################
5
5
6 [DEFAULT]
6 [DEFAULT]
7 host = 127.0.0.1
7 host = 127.0.0.1
8 port = 9901
8 port = 9901
9 locale = en_US.UTF-8
9 locale = en_US.UTF-8
10 # number of worker threads, this should be set based on a formula threadpool=N*6
10 # number of worker threads, this should be set based on a formula threadpool=N*6
11 # where N is number of RhodeCode Enterprise workers, eg. running 2 instances
11 # where N is number of RhodeCode Enterprise workers, eg. running 2 instances
12 # 8 gunicorn workers each would be 2 * 8 * 6 = 96, threadpool_size = 96
12 # 8 gunicorn workers each would be 2 * 8 * 6 = 96, threadpool_size = 96
13 threadpool_size = 96
13 threadpool_size = 96
14 timeout = 0
14 timeout = 0
15
15
16 # cache regions, please don't change
16 # cache regions, please don't change
17 beaker.cache.regions = repo_object
17 beaker.cache.regions = repo_object
18 beaker.cache.repo_object.type = memorylru
18 beaker.cache.repo_object.type = memorylru
19 beaker.cache.repo_object.max_items = 100
19 beaker.cache.repo_object.max_items = 100
20 # cache auto-expires after N seconds
20 # cache auto-expires after N seconds
21 beaker.cache.repo_object.expire = 300
21 beaker.cache.repo_object.expire = 300
22 beaker.cache.repo_object.enabled = true
22 beaker.cache.repo_object.enabled = true
23
23
24
24
25 ################################
25 ################################
26 ### LOGGING CONFIGURATION ####
26 ### LOGGING CONFIGURATION ####
27 ################################
27 ################################
28 [loggers]
28 [loggers]
29 keys = root, vcsserver, pyro4, beaker
29 keys = root, vcsserver, beaker
30
30
31 [handlers]
31 [handlers]
32 keys = console
32 keys = console
33
33
34 [formatters]
34 [formatters]
35 keys = generic
35 keys = generic
36
36
37 #############
37 #############
38 ## LOGGERS ##
38 ## LOGGERS ##
39 #############
39 #############
40 [logger_root]
40 [logger_root]
41 level = NOTSET
41 level = NOTSET
42 handlers = console
42 handlers = console
43
43
44 [logger_vcsserver]
44 [logger_vcsserver]
45 level = DEBUG
45 level = DEBUG
46 handlers =
46 handlers =
47 qualname = vcsserver
47 qualname = vcsserver
48 propagate = 1
48 propagate = 1
49
49
50 [logger_beaker]
50 [logger_beaker]
51 level = DEBUG
51 level = DEBUG
52 handlers =
52 handlers =
53 qualname = beaker
53 qualname = beaker
54 propagate = 1
54 propagate = 1
55
55
56 [logger_pyro4]
57 level = DEBUG
58 handlers =
59 qualname = Pyro4
60 propagate = 1
61
62
56
63 ##############
57 ##############
64 ## HANDLERS ##
58 ## HANDLERS ##
65 ##############
59 ##############
66
60
67 [handler_console]
61 [handler_console]
68 class = StreamHandler
62 class = StreamHandler
69 args = (sys.stderr,)
63 args = (sys.stderr,)
70 level = INFO
64 level = INFO
71 formatter = generic
65 formatter = generic
72
66
73 [handler_file]
67 [handler_file]
74 class = FileHandler
68 class = FileHandler
75 args = ('vcsserver.log', 'a',)
69 args = ('vcsserver.log', 'a',)
76 level = DEBUG
70 level = DEBUG
77 formatter = generic
71 formatter = generic
78
72
79 [handler_file_rotating]
73 [handler_file_rotating]
80 class = logging.handlers.TimedRotatingFileHandler
74 class = logging.handlers.TimedRotatingFileHandler
81 # 'D', 5 - rotate every 5days
75 # 'D', 5 - rotate every 5days
82 # you can set 'h', 'midnight'
76 # you can set 'h', 'midnight'
83 args = ('vcsserver.log', 'D', 5, 10,)
77 args = ('vcsserver.log', 'D', 5, 10,)
84 level = DEBUG
78 level = DEBUG
85 formatter = generic
79 formatter = generic
86
80
87 ################
81 ################
88 ## FORMATTERS ##
82 ## FORMATTERS ##
89 ################
83 ################
90
84
91 [formatter_generic]
85 [formatter_generic]
92 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
86 format = %(asctime)s.%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s
93 datefmt = %Y-%m-%d %H:%M:%S
87 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,1 +1,1 b''
1 4.7.2 No newline at end of file
1 4.8.0 No newline at end of file
@@ -1,727 +1,746 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23
23
24 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
26 from mercurial import commands
27 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
28 from mercurial import verify
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
37 InterventionRequired, RequirementError)
37 InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 # force mercurial to only use 1 thread, otherwise it may try to set a
56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # signal in a non-main thread, thus generating a ValueError.
57 # signal in a non-main thread, thus generating a ValueError.
58 baseui.setconfig('worker', 'numcpus', 1)
58 baseui.setconfig('worker', 'numcpus', 1)
59
59
60 # If there is no config for the largefiles extension, we explicitly disable
60 # If there is no config for the largefiles extension, we explicitly disable
61 # it here. This overrides settings from repositories hgrc file. Recent
61 # it here. This overrides settings from repositories hgrc file. Recent
62 # mercurial versions enable largefiles in hgrc on clone from largefile
62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # repo.
63 # repo.
64 if not baseui.hasconfig('extensions', 'largefiles'):
64 if not baseui.hasconfig('extensions', 'largefiles'):
65 log.debug('Explicitly disable largefiles extension for repo.')
65 log.debug('Explicitly disable largefiles extension for repo.')
66 baseui.setconfig('extensions', 'largefiles', '!')
66 baseui.setconfig('extensions', 'largefiles', '!')
67
67
68 return baseui
68 return baseui
69
69
70
70
71 def reraise_safe_exceptions(func):
71 def reraise_safe_exceptions(func):
72 """Decorator for converting mercurial exceptions to something neutral."""
72 """Decorator for converting mercurial exceptions to something neutral."""
73 def wrapper(*args, **kwargs):
73 def wrapper(*args, **kwargs):
74 try:
74 try:
75 return func(*args, **kwargs)
75 return func(*args, **kwargs)
76 except (Abort, InterventionRequired):
76 except (Abort, InterventionRequired):
77 raise_from_original(exceptions.AbortException)
77 raise_from_original(exceptions.AbortException)
78 except RepoLookupError:
78 except RepoLookupError:
79 raise_from_original(exceptions.LookupException)
79 raise_from_original(exceptions.LookupException)
80 except RequirementError:
80 except RequirementError:
81 raise_from_original(exceptions.RequirementException)
81 raise_from_original(exceptions.RequirementException)
82 except RepoError:
82 except RepoError:
83 raise_from_original(exceptions.VcsException)
83 raise_from_original(exceptions.VcsException)
84 except LookupError:
84 except LookupError:
85 raise_from_original(exceptions.LookupException)
85 raise_from_original(exceptions.LookupException)
86 except Exception as e:
86 except Exception as e:
87 if not hasattr(e, '_vcs_kind'):
87 if not hasattr(e, '_vcs_kind'):
88 log.exception("Unhandled exception in hg remote call")
88 log.exception("Unhandled exception in hg remote call")
89 raise_from_original(exceptions.UnhandledException)
89 raise_from_original(exceptions.UnhandledException)
90 raise
90 raise
91 return wrapper
91 return wrapper
92
92
93
93
94 class MercurialFactory(RepoFactory):
94 class MercurialFactory(RepoFactory):
95
95
96 def _create_config(self, config, hooks=True):
96 def _create_config(self, config, hooks=True):
97 if not hooks:
97 if not hooks:
98 hooks_to_clean = frozenset((
98 hooks_to_clean = frozenset((
99 'changegroup.repo_size', 'preoutgoing.pre_pull',
99 'changegroup.repo_size', 'preoutgoing.pre_pull',
100 'outgoing.pull_logger', 'prechangegroup.pre_push'))
100 'outgoing.pull_logger', 'prechangegroup.pre_push'))
101 new_config = []
101 new_config = []
102 for section, option, value in config:
102 for section, option, value in config:
103 if section == 'hooks' and option in hooks_to_clean:
103 if section == 'hooks' and option in hooks_to_clean:
104 continue
104 continue
105 new_config.append((section, option, value))
105 new_config.append((section, option, value))
106 config = new_config
106 config = new_config
107
107
108 baseui = make_ui_from_config(config)
108 baseui = make_ui_from_config(config)
109 return baseui
109 return baseui
110
110
111 def _create_repo(self, wire, create):
111 def _create_repo(self, wire, create):
112 baseui = self._create_config(wire["config"])
112 baseui = self._create_config(wire["config"])
113 return localrepository(baseui, wire["path"], create)
113 return localrepository(baseui, wire["path"], create)
114
114
115
115
116 class HgRemote(object):
116 class HgRemote(object):
117
117
118 def __init__(self, factory):
118 def __init__(self, factory):
119 self._factory = factory
119 self._factory = factory
120
120
121 self._bulk_methods = {
121 self._bulk_methods = {
122 "affected_files": self.ctx_files,
122 "affected_files": self.ctx_files,
123 "author": self.ctx_user,
123 "author": self.ctx_user,
124 "branch": self.ctx_branch,
124 "branch": self.ctx_branch,
125 "children": self.ctx_children,
125 "children": self.ctx_children,
126 "date": self.ctx_date,
126 "date": self.ctx_date,
127 "message": self.ctx_description,
127 "message": self.ctx_description,
128 "parents": self.ctx_parents,
128 "parents": self.ctx_parents,
129 "status": self.ctx_status,
129 "status": self.ctx_status,
130 "_file_paths": self.ctx_list,
130 "_file_paths": self.ctx_list,
131 }
131 }
132
132
133 @reraise_safe_exceptions
133 @reraise_safe_exceptions
134 def discover_hg_version(self):
134 def discover_hg_version(self):
135 from mercurial import util
135 from mercurial import util
136 return util.version()
136 return util.version()
137
137
138 @reraise_safe_exceptions
138 @reraise_safe_exceptions
139 def archive_repo(self, archive_path, mtime, file_info, kind):
139 def archive_repo(self, archive_path, mtime, file_info, kind):
140 if kind == "tgz":
140 if kind == "tgz":
141 archiver = archival.tarit(archive_path, mtime, "gz")
141 archiver = archival.tarit(archive_path, mtime, "gz")
142 elif kind == "tbz2":
142 elif kind == "tbz2":
143 archiver = archival.tarit(archive_path, mtime, "bz2")
143 archiver = archival.tarit(archive_path, mtime, "bz2")
144 elif kind == 'zip':
144 elif kind == 'zip':
145 archiver = archival.zipit(archive_path, mtime)
145 archiver = archival.zipit(archive_path, mtime)
146 else:
146 else:
147 raise exceptions.ArchiveException(
147 raise exceptions.ArchiveException(
148 'Remote does not support: "%s".' % kind)
148 'Remote does not support: "%s".' % kind)
149
149
150 for f_path, f_mode, f_is_link, f_content in file_info:
150 for f_path, f_mode, f_is_link, f_content in file_info:
151 archiver.addfile(f_path, f_mode, f_is_link, f_content)
151 archiver.addfile(f_path, f_mode, f_is_link, f_content)
152 archiver.done()
152 archiver.done()
153
153
154 @reraise_safe_exceptions
154 @reraise_safe_exceptions
155 def bookmarks(self, wire):
155 def bookmarks(self, wire):
156 repo = self._factory.repo(wire)
156 repo = self._factory.repo(wire)
157 return dict(repo._bookmarks)
157 return dict(repo._bookmarks)
158
158
159 @reraise_safe_exceptions
159 @reraise_safe_exceptions
160 def branches(self, wire, normal, closed):
160 def branches(self, wire, normal, closed):
161 repo = self._factory.repo(wire)
161 repo = self._factory.repo(wire)
162 iter_branches = repo.branchmap().iterbranches()
162 iter_branches = repo.branchmap().iterbranches()
163 bt = {}
163 bt = {}
164 for branch_name, _heads, tip, is_closed in iter_branches:
164 for branch_name, _heads, tip, is_closed in iter_branches:
165 if normal and not is_closed:
165 if normal and not is_closed:
166 bt[branch_name] = tip
166 bt[branch_name] = tip
167 if closed and is_closed:
167 if closed and is_closed:
168 bt[branch_name] = tip
168 bt[branch_name] = tip
169
169
170 return bt
170 return bt
171
171
172 @reraise_safe_exceptions
172 @reraise_safe_exceptions
173 def bulk_request(self, wire, rev, pre_load):
173 def bulk_request(self, wire, rev, pre_load):
174 result = {}
174 result = {}
175 for attr in pre_load:
175 for attr in pre_load:
176 try:
176 try:
177 method = self._bulk_methods[attr]
177 method = self._bulk_methods[attr]
178 result[attr] = method(wire, rev)
178 result[attr] = method(wire, rev)
179 except KeyError:
179 except KeyError:
180 raise exceptions.VcsException(
180 raise exceptions.VcsException(
181 'Unknown bulk attribute: "%s"' % attr)
181 'Unknown bulk attribute: "%s"' % attr)
182 return result
182 return result
183
183
184 @reraise_safe_exceptions
184 @reraise_safe_exceptions
185 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
185 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
186 baseui = self._factory._create_config(wire["config"], hooks=hooks)
186 baseui = self._factory._create_config(wire["config"], hooks=hooks)
187 clone(baseui, source, dest, noupdate=not update_after_clone)
187 clone(baseui, source, dest, noupdate=not update_after_clone)
188
188
189 @reraise_safe_exceptions
189 @reraise_safe_exceptions
190 def commitctx(
190 def commitctx(
191 self, wire, message, parents, commit_time, commit_timezone,
191 self, wire, message, parents, commit_time, commit_timezone,
192 user, files, extra, removed, updated):
192 user, files, extra, removed, updated):
193
193
194 def _filectxfn(_repo, memctx, path):
194 def _filectxfn(_repo, memctx, path):
195 """
195 """
196 Marks given path as added/changed/removed in a given _repo. This is
196 Marks given path as added/changed/removed in a given _repo. This is
197 for internal mercurial commit function.
197 for internal mercurial commit function.
198 """
198 """
199
199
200 # check if this path is removed
200 # check if this path is removed
201 if path in removed:
201 if path in removed:
202 # returning None is a way to mark node for removal
202 # returning None is a way to mark node for removal
203 return None
203 return None
204
204
205 # check if this path is added
205 # check if this path is added
206 for node in updated:
206 for node in updated:
207 if node['path'] == path:
207 if node['path'] == path:
208 return memfilectx(
208 return memfilectx(
209 _repo,
209 _repo,
210 path=node['path'],
210 path=node['path'],
211 data=node['content'],
211 data=node['content'],
212 islink=False,
212 islink=False,
213 isexec=bool(node['mode'] & stat.S_IXUSR),
213 isexec=bool(node['mode'] & stat.S_IXUSR),
214 copied=False,
214 copied=False,
215 memctx=memctx)
215 memctx=memctx)
216
216
217 raise exceptions.AbortException(
217 raise exceptions.AbortException(
218 "Given path haven't been marked as added, "
218 "Given path haven't been marked as added, "
219 "changed or removed (%s)" % path)
219 "changed or removed (%s)" % path)
220
220
221 repo = self._factory.repo(wire)
221 repo = self._factory.repo(wire)
222
222
223 commit_ctx = memctx(
223 commit_ctx = memctx(
224 repo=repo,
224 repo=repo,
225 parents=parents,
225 parents=parents,
226 text=message,
226 text=message,
227 files=files,
227 files=files,
228 filectxfn=_filectxfn,
228 filectxfn=_filectxfn,
229 user=user,
229 user=user,
230 date=(commit_time, commit_timezone),
230 date=(commit_time, commit_timezone),
231 extra=extra)
231 extra=extra)
232
232
233 n = repo.commitctx(commit_ctx)
233 n = repo.commitctx(commit_ctx)
234 new_id = hex(n)
234 new_id = hex(n)
235
235
236 return new_id
236 return new_id
237
237
238 @reraise_safe_exceptions
238 @reraise_safe_exceptions
239 def ctx_branch(self, wire, revision):
239 def ctx_branch(self, wire, revision):
240 repo = self._factory.repo(wire)
240 repo = self._factory.repo(wire)
241 ctx = repo[revision]
241 ctx = repo[revision]
242 return ctx.branch()
242 return ctx.branch()
243
243
244 @reraise_safe_exceptions
244 @reraise_safe_exceptions
245 def ctx_children(self, wire, revision):
245 def ctx_children(self, wire, revision):
246 repo = self._factory.repo(wire)
246 repo = self._factory.repo(wire)
247 ctx = repo[revision]
247 ctx = repo[revision]
248 return [child.rev() for child in ctx.children()]
248 return [child.rev() for child in ctx.children()]
249
249
250 @reraise_safe_exceptions
250 @reraise_safe_exceptions
251 def ctx_date(self, wire, revision):
251 def ctx_date(self, wire, revision):
252 repo = self._factory.repo(wire)
252 repo = self._factory.repo(wire)
253 ctx = repo[revision]
253 ctx = repo[revision]
254 return ctx.date()
254 return ctx.date()
255
255
256 @reraise_safe_exceptions
256 @reraise_safe_exceptions
257 def ctx_description(self, wire, revision):
257 def ctx_description(self, wire, revision):
258 repo = self._factory.repo(wire)
258 repo = self._factory.repo(wire)
259 ctx = repo[revision]
259 ctx = repo[revision]
260 return ctx.description()
260 return ctx.description()
261
261
262 @reraise_safe_exceptions
262 @reraise_safe_exceptions
263 def ctx_diff(
263 def ctx_diff(
264 self, wire, revision, git=True, ignore_whitespace=True, context=3):
264 self, wire, revision, git=True, ignore_whitespace=True, context=3):
265 repo = self._factory.repo(wire)
265 repo = self._factory.repo(wire)
266 ctx = repo[revision]
266 ctx = repo[revision]
267 result = ctx.diff(
267 result = ctx.diff(
268 git=git, ignore_whitespace=ignore_whitespace, context=context)
268 git=git, ignore_whitespace=ignore_whitespace, context=context)
269 return list(result)
269 return list(result)
270
270
271 @reraise_safe_exceptions
271 @reraise_safe_exceptions
272 def ctx_files(self, wire, revision):
272 def ctx_files(self, wire, revision):
273 repo = self._factory.repo(wire)
273 repo = self._factory.repo(wire)
274 ctx = repo[revision]
274 ctx = repo[revision]
275 return ctx.files()
275 return ctx.files()
276
276
277 @reraise_safe_exceptions
277 @reraise_safe_exceptions
278 def ctx_list(self, path, revision):
278 def ctx_list(self, path, revision):
279 repo = self._factory.repo(path)
279 repo = self._factory.repo(path)
280 ctx = repo[revision]
280 ctx = repo[revision]
281 return list(ctx)
281 return list(ctx)
282
282
283 @reraise_safe_exceptions
283 @reraise_safe_exceptions
284 def ctx_parents(self, wire, revision):
284 def ctx_parents(self, wire, revision):
285 repo = self._factory.repo(wire)
285 repo = self._factory.repo(wire)
286 ctx = repo[revision]
286 ctx = repo[revision]
287 return [parent.rev() for parent in ctx.parents()]
287 return [parent.rev() for parent in ctx.parents()]
288
288
289 @reraise_safe_exceptions
289 @reraise_safe_exceptions
290 def ctx_phase(self, wire, revision):
291 repo = self._factory.repo(wire)
292 ctx = repo[revision]
293 # public=0, draft=1, secret=3
294 return ctx.phase()
295
296 @reraise_safe_exceptions
297 def ctx_obsolete(self, wire, revision):
298 repo = self._factory.repo(wire)
299 ctx = repo[revision]
300 return ctx.obsolete()
301
302 @reraise_safe_exceptions
303 def ctx_hidden(self, wire, revision):
304 repo = self._factory.repo(wire)
305 ctx = repo[revision]
306 return ctx.hidden()
307
308 @reraise_safe_exceptions
290 def ctx_substate(self, wire, revision):
309 def ctx_substate(self, wire, revision):
291 repo = self._factory.repo(wire)
310 repo = self._factory.repo(wire)
292 ctx = repo[revision]
311 ctx = repo[revision]
293 return ctx.substate
312 return ctx.substate
294
313
295 @reraise_safe_exceptions
314 @reraise_safe_exceptions
296 def ctx_status(self, wire, revision):
315 def ctx_status(self, wire, revision):
297 repo = self._factory.repo(wire)
316 repo = self._factory.repo(wire)
298 ctx = repo[revision]
317 ctx = repo[revision]
299 status = repo[ctx.p1().node()].status(other=ctx.node())
318 status = repo[ctx.p1().node()].status(other=ctx.node())
300 # object of status (odd, custom named tuple in mercurial) is not
319 # object of status (odd, custom named tuple in mercurial) is not
301 # correctly serializable via Pyro, we make it a list, as the underling
320 # correctly serializable, we make it a list, as the underling
302 # API expects this to be a list
321 # API expects this to be a list
303 return list(status)
322 return list(status)
304
323
305 @reraise_safe_exceptions
324 @reraise_safe_exceptions
306 def ctx_user(self, wire, revision):
325 def ctx_user(self, wire, revision):
307 repo = self._factory.repo(wire)
326 repo = self._factory.repo(wire)
308 ctx = repo[revision]
327 ctx = repo[revision]
309 return ctx.user()
328 return ctx.user()
310
329
311 @reraise_safe_exceptions
330 @reraise_safe_exceptions
312 def check_url(self, url, config):
331 def check_url(self, url, config):
313 _proto = None
332 _proto = None
314 if '+' in url[:url.find('://')]:
333 if '+' in url[:url.find('://')]:
315 _proto = url[0:url.find('+')]
334 _proto = url[0:url.find('+')]
316 url = url[url.find('+') + 1:]
335 url = url[url.find('+') + 1:]
317 handlers = []
336 handlers = []
318 url_obj = url_parser(url)
337 url_obj = url_parser(url)
319 test_uri, authinfo = url_obj.authinfo()
338 test_uri, authinfo = url_obj.authinfo()
320 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
339 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
321 url_obj.query = obfuscate_qs(url_obj.query)
340 url_obj.query = obfuscate_qs(url_obj.query)
322
341
323 cleaned_uri = str(url_obj)
342 cleaned_uri = str(url_obj)
324 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
343 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
325
344
326 if authinfo:
345 if authinfo:
327 # create a password manager
346 # create a password manager
328 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
347 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
329 passmgr.add_password(*authinfo)
348 passmgr.add_password(*authinfo)
330
349
331 handlers.extend((httpbasicauthhandler(passmgr),
350 handlers.extend((httpbasicauthhandler(passmgr),
332 httpdigestauthhandler(passmgr)))
351 httpdigestauthhandler(passmgr)))
333
352
334 o = urllib2.build_opener(*handlers)
353 o = urllib2.build_opener(*handlers)
335 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
354 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
336 ('Accept', 'application/mercurial-0.1')]
355 ('Accept', 'application/mercurial-0.1')]
337
356
338 q = {"cmd": 'between'}
357 q = {"cmd": 'between'}
339 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
358 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
340 qs = '?%s' % urllib.urlencode(q)
359 qs = '?%s' % urllib.urlencode(q)
341 cu = "%s%s" % (test_uri, qs)
360 cu = "%s%s" % (test_uri, qs)
342 req = urllib2.Request(cu, None, {})
361 req = urllib2.Request(cu, None, {})
343
362
344 try:
363 try:
345 log.debug("Trying to open URL %s", cleaned_uri)
364 log.debug("Trying to open URL %s", cleaned_uri)
346 resp = o.open(req)
365 resp = o.open(req)
347 if resp.code != 200:
366 if resp.code != 200:
348 raise exceptions.URLError('Return Code is not 200')
367 raise exceptions.URLError('Return Code is not 200')
349 except Exception as e:
368 except Exception as e:
350 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
369 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
351 # means it cannot be cloned
370 # means it cannot be cloned
352 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
371 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
353
372
354 # now check if it's a proper hg repo, but don't do it for svn
373 # now check if it's a proper hg repo, but don't do it for svn
355 try:
374 try:
356 if _proto == 'svn':
375 if _proto == 'svn':
357 pass
376 pass
358 else:
377 else:
359 # check for pure hg repos
378 # check for pure hg repos
360 log.debug(
379 log.debug(
361 "Verifying if URL is a Mercurial repository: %s",
380 "Verifying if URL is a Mercurial repository: %s",
362 cleaned_uri)
381 cleaned_uri)
363 httppeer(make_ui_from_config(config), url).lookup('tip')
382 httppeer(make_ui_from_config(config), url).lookup('tip')
364 except Exception as e:
383 except Exception as e:
365 log.warning("URL is not a valid Mercurial repository: %s",
384 log.warning("URL is not a valid Mercurial repository: %s",
366 cleaned_uri)
385 cleaned_uri)
367 raise exceptions.URLError(
386 raise exceptions.URLError(
368 "url [%s] does not look like an hg repo org_exc: %s"
387 "url [%s] does not look like an hg repo org_exc: %s"
369 % (cleaned_uri, e))
388 % (cleaned_uri, e))
370
389
371 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
390 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
372 return True
391 return True
373
392
374 @reraise_safe_exceptions
393 @reraise_safe_exceptions
375 def diff(
394 def diff(
376 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
395 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
377 context):
396 context):
378 repo = self._factory.repo(wire)
397 repo = self._factory.repo(wire)
379
398
380 if file_filter:
399 if file_filter:
381 match_filter = match(file_filter[0], '', [file_filter[1]])
400 match_filter = match(file_filter[0], '', [file_filter[1]])
382 else:
401 else:
383 match_filter = file_filter
402 match_filter = file_filter
384 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
403 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
385
404
386 try:
405 try:
387 return "".join(patch.diff(
406 return "".join(patch.diff(
388 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
407 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
389 except RepoLookupError:
408 except RepoLookupError:
390 raise exceptions.LookupException()
409 raise exceptions.LookupException()
391
410
392 @reraise_safe_exceptions
411 @reraise_safe_exceptions
393 def file_history(self, wire, revision, path, limit):
412 def file_history(self, wire, revision, path, limit):
394 repo = self._factory.repo(wire)
413 repo = self._factory.repo(wire)
395
414
396 ctx = repo[revision]
415 ctx = repo[revision]
397 fctx = ctx.filectx(path)
416 fctx = ctx.filectx(path)
398
417
399 def history_iter():
418 def history_iter():
400 limit_rev = fctx.rev()
419 limit_rev = fctx.rev()
401 for obj in reversed(list(fctx.filelog())):
420 for obj in reversed(list(fctx.filelog())):
402 obj = fctx.filectx(obj)
421 obj = fctx.filectx(obj)
403 if limit_rev >= obj.rev():
422 if limit_rev >= obj.rev():
404 yield obj
423 yield obj
405
424
406 history = []
425 history = []
407 for cnt, obj in enumerate(history_iter()):
426 for cnt, obj in enumerate(history_iter()):
408 if limit and cnt >= limit:
427 if limit and cnt >= limit:
409 break
428 break
410 history.append(hex(obj.node()))
429 history.append(hex(obj.node()))
411
430
412 return [x for x in history]
431 return [x for x in history]
413
432
414 @reraise_safe_exceptions
433 @reraise_safe_exceptions
415 def file_history_untill(self, wire, revision, path, limit):
434 def file_history_untill(self, wire, revision, path, limit):
416 repo = self._factory.repo(wire)
435 repo = self._factory.repo(wire)
417 ctx = repo[revision]
436 ctx = repo[revision]
418 fctx = ctx.filectx(path)
437 fctx = ctx.filectx(path)
419
438
420 file_log = list(fctx.filelog())
439 file_log = list(fctx.filelog())
421 if limit:
440 if limit:
422 # Limit to the last n items
441 # Limit to the last n items
423 file_log = file_log[-limit:]
442 file_log = file_log[-limit:]
424
443
425 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
444 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
426
445
427 @reraise_safe_exceptions
446 @reraise_safe_exceptions
428 def fctx_annotate(self, wire, revision, path):
447 def fctx_annotate(self, wire, revision, path):
429 repo = self._factory.repo(wire)
448 repo = self._factory.repo(wire)
430 ctx = repo[revision]
449 ctx = repo[revision]
431 fctx = ctx.filectx(path)
450 fctx = ctx.filectx(path)
432
451
433 result = []
452 result = []
434 for i, annotate_data in enumerate(fctx.annotate()):
453 for i, annotate_data in enumerate(fctx.annotate()):
435 ln_no = i + 1
454 ln_no = i + 1
436 node_info, content = annotate_data
455 node_info, content = annotate_data
437 sha = hex(node_info[0].node())
456 sha = hex(node_info[0].node())
438 result.append((ln_no, sha, content))
457 result.append((ln_no, sha, content))
439 return result
458 return result
440
459
441 @reraise_safe_exceptions
460 @reraise_safe_exceptions
442 def fctx_data(self, wire, revision, path):
461 def fctx_data(self, wire, revision, path):
443 repo = self._factory.repo(wire)
462 repo = self._factory.repo(wire)
444 ctx = repo[revision]
463 ctx = repo[revision]
445 fctx = ctx.filectx(path)
464 fctx = ctx.filectx(path)
446 return fctx.data()
465 return fctx.data()
447
466
448 @reraise_safe_exceptions
467 @reraise_safe_exceptions
449 def fctx_flags(self, wire, revision, path):
468 def fctx_flags(self, wire, revision, path):
450 repo = self._factory.repo(wire)
469 repo = self._factory.repo(wire)
451 ctx = repo[revision]
470 ctx = repo[revision]
452 fctx = ctx.filectx(path)
471 fctx = ctx.filectx(path)
453 return fctx.flags()
472 return fctx.flags()
454
473
455 @reraise_safe_exceptions
474 @reraise_safe_exceptions
456 def fctx_size(self, wire, revision, path):
475 def fctx_size(self, wire, revision, path):
457 repo = self._factory.repo(wire)
476 repo = self._factory.repo(wire)
458 ctx = repo[revision]
477 ctx = repo[revision]
459 fctx = ctx.filectx(path)
478 fctx = ctx.filectx(path)
460 return fctx.size()
479 return fctx.size()
461
480
462 @reraise_safe_exceptions
481 @reraise_safe_exceptions
463 def get_all_commit_ids(self, wire, name):
482 def get_all_commit_ids(self, wire, name):
464 repo = self._factory.repo(wire)
483 repo = self._factory.repo(wire)
465 revs = repo.filtered(name).changelog.index
484 revs = repo.filtered(name).changelog.index
466 return map(lambda x: hex(x[7]), revs)[:-1]
485 return map(lambda x: hex(x[7]), revs)[:-1]
467
486
468 @reraise_safe_exceptions
487 @reraise_safe_exceptions
469 def get_config_value(self, wire, section, name, untrusted=False):
488 def get_config_value(self, wire, section, name, untrusted=False):
470 repo = self._factory.repo(wire)
489 repo = self._factory.repo(wire)
471 return repo.ui.config(section, name, untrusted=untrusted)
490 return repo.ui.config(section, name, untrusted=untrusted)
472
491
473 @reraise_safe_exceptions
492 @reraise_safe_exceptions
474 def get_config_bool(self, wire, section, name, untrusted=False):
493 def get_config_bool(self, wire, section, name, untrusted=False):
475 repo = self._factory.repo(wire)
494 repo = self._factory.repo(wire)
476 return repo.ui.configbool(section, name, untrusted=untrusted)
495 return repo.ui.configbool(section, name, untrusted=untrusted)
477
496
478 @reraise_safe_exceptions
497 @reraise_safe_exceptions
479 def get_config_list(self, wire, section, name, untrusted=False):
498 def get_config_list(self, wire, section, name, untrusted=False):
480 repo = self._factory.repo(wire)
499 repo = self._factory.repo(wire)
481 return repo.ui.configlist(section, name, untrusted=untrusted)
500 return repo.ui.configlist(section, name, untrusted=untrusted)
482
501
483 @reraise_safe_exceptions
502 @reraise_safe_exceptions
484 def is_large_file(self, wire, path):
503 def is_large_file(self, wire, path):
485 return largefiles.lfutil.isstandin(path)
504 return largefiles.lfutil.isstandin(path)
486
505
487 @reraise_safe_exceptions
506 @reraise_safe_exceptions
488 def in_largefiles_store(self, wire, sha):
507 def in_largefiles_store(self, wire, sha):
489 repo = self._factory.repo(wire)
508 repo = self._factory.repo(wire)
490 return largefiles.lfutil.instore(repo, sha)
509 return largefiles.lfutil.instore(repo, sha)
491
510
492 @reraise_safe_exceptions
511 @reraise_safe_exceptions
493 def in_user_cache(self, wire, sha):
512 def in_user_cache(self, wire, sha):
494 repo = self._factory.repo(wire)
513 repo = self._factory.repo(wire)
495 return largefiles.lfutil.inusercache(repo.ui, sha)
514 return largefiles.lfutil.inusercache(repo.ui, sha)
496
515
497 @reraise_safe_exceptions
516 @reraise_safe_exceptions
498 def store_path(self, wire, sha):
517 def store_path(self, wire, sha):
499 repo = self._factory.repo(wire)
518 repo = self._factory.repo(wire)
500 return largefiles.lfutil.storepath(repo, sha)
519 return largefiles.lfutil.storepath(repo, sha)
501
520
502 @reraise_safe_exceptions
521 @reraise_safe_exceptions
503 def link(self, wire, sha, path):
522 def link(self, wire, sha, path):
504 repo = self._factory.repo(wire)
523 repo = self._factory.repo(wire)
505 largefiles.lfutil.link(
524 largefiles.lfutil.link(
506 largefiles.lfutil.usercachepath(repo.ui, sha), path)
525 largefiles.lfutil.usercachepath(repo.ui, sha), path)
507
526
508 @reraise_safe_exceptions
527 @reraise_safe_exceptions
509 def localrepository(self, wire, create=False):
528 def localrepository(self, wire, create=False):
510 self._factory.repo(wire, create=create)
529 self._factory.repo(wire, create=create)
511
530
512 @reraise_safe_exceptions
531 @reraise_safe_exceptions
513 def lookup(self, wire, revision, both):
532 def lookup(self, wire, revision, both):
514 # TODO Paris: Ugly hack to "deserialize" long for msgpack
533 # TODO Paris: Ugly hack to "deserialize" long for msgpack
515 if isinstance(revision, float):
534 if isinstance(revision, float):
516 revision = long(revision)
535 revision = long(revision)
517 repo = self._factory.repo(wire)
536 repo = self._factory.repo(wire)
518 try:
537 try:
519 ctx = repo[revision]
538 ctx = repo[revision]
520 except RepoLookupError:
539 except RepoLookupError:
521 raise exceptions.LookupException(revision)
540 raise exceptions.LookupException(revision)
522 except LookupError as e:
541 except LookupError as e:
523 raise exceptions.LookupException(e.name)
542 raise exceptions.LookupException(e.name)
524
543
525 if not both:
544 if not both:
526 return ctx.hex()
545 return ctx.hex()
527
546
528 ctx = repo[ctx.hex()]
547 ctx = repo[ctx.hex()]
529 return ctx.hex(), ctx.rev()
548 return ctx.hex(), ctx.rev()
530
549
531 @reraise_safe_exceptions
550 @reraise_safe_exceptions
532 def pull(self, wire, url, commit_ids=None):
551 def pull(self, wire, url, commit_ids=None):
533 repo = self._factory.repo(wire)
552 repo = self._factory.repo(wire)
534 remote = peer(repo, {}, url)
553 remote = peer(repo, {}, url)
535 if commit_ids:
554 if commit_ids:
536 commit_ids = [bin(commit_id) for commit_id in commit_ids]
555 commit_ids = [bin(commit_id) for commit_id in commit_ids]
537
556
538 return exchange.pull(
557 return exchange.pull(
539 repo, remote, heads=commit_ids, force=None).cgresult
558 repo, remote, heads=commit_ids, force=None).cgresult
540
559
541 @reraise_safe_exceptions
560 @reraise_safe_exceptions
542 def revision(self, wire, rev):
561 def revision(self, wire, rev):
543 repo = self._factory.repo(wire)
562 repo = self._factory.repo(wire)
544 ctx = repo[rev]
563 ctx = repo[rev]
545 return ctx.rev()
564 return ctx.rev()
546
565
547 @reraise_safe_exceptions
566 @reraise_safe_exceptions
548 def rev_range(self, wire, filter):
567 def rev_range(self, wire, filter):
549 repo = self._factory.repo(wire)
568 repo = self._factory.repo(wire)
550 revisions = [rev for rev in revrange(repo, filter)]
569 revisions = [rev for rev in revrange(repo, filter)]
551 return revisions
570 return revisions
552
571
553 @reraise_safe_exceptions
572 @reraise_safe_exceptions
554 def rev_range_hash(self, wire, node):
573 def rev_range_hash(self, wire, node):
555 repo = self._factory.repo(wire)
574 repo = self._factory.repo(wire)
556
575
557 def get_revs(repo, rev_opt):
576 def get_revs(repo, rev_opt):
558 if rev_opt:
577 if rev_opt:
559 revs = revrange(repo, rev_opt)
578 revs = revrange(repo, rev_opt)
560 if len(revs) == 0:
579 if len(revs) == 0:
561 return (nullrev, nullrev)
580 return (nullrev, nullrev)
562 return max(revs), min(revs)
581 return max(revs), min(revs)
563 else:
582 else:
564 return len(repo) - 1, 0
583 return len(repo) - 1, 0
565
584
566 stop, start = get_revs(repo, [node + ':'])
585 stop, start = get_revs(repo, [node + ':'])
567 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
586 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
568 return revs
587 return revs
569
588
570 @reraise_safe_exceptions
589 @reraise_safe_exceptions
571 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
590 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
572 other_path = kwargs.pop('other_path', None)
591 other_path = kwargs.pop('other_path', None)
573
592
574 # case when we want to compare two independent repositories
593 # case when we want to compare two independent repositories
575 if other_path and other_path != wire["path"]:
594 if other_path and other_path != wire["path"]:
576 baseui = self._factory._create_config(wire["config"])
595 baseui = self._factory._create_config(wire["config"])
577 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
596 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
578 else:
597 else:
579 repo = self._factory.repo(wire)
598 repo = self._factory.repo(wire)
580 return list(repo.revs(rev_spec, *args))
599 return list(repo.revs(rev_spec, *args))
581
600
582 @reraise_safe_exceptions
601 @reraise_safe_exceptions
583 def strip(self, wire, revision, update, backup):
602 def strip(self, wire, revision, update, backup):
584 repo = self._factory.repo(wire)
603 repo = self._factory.repo(wire)
585 ctx = repo[revision]
604 ctx = repo[revision]
586 hgext_strip(
605 hgext_strip(
587 repo.baseui, repo, ctx.node(), update=update, backup=backup)
606 repo.baseui, repo, ctx.node(), update=update, backup=backup)
588
607
589 @reraise_safe_exceptions
608 @reraise_safe_exceptions
590 def verify(self, wire,):
609 def verify(self, wire,):
591 repo = self._factory.repo(wire)
610 repo = self._factory.repo(wire)
592 baseui = self._factory._create_config(wire['config'])
611 baseui = self._factory._create_config(wire['config'])
593 baseui.setconfig('ui', 'quiet', 'false')
612 baseui.setconfig('ui', 'quiet', 'false')
594 output = io.BytesIO()
613 output = io.BytesIO()
595
614
596 def write(data, **unused_kwargs):
615 def write(data, **unused_kwargs):
597 output.write(data)
616 output.write(data)
598 baseui.write = write
617 baseui.write = write
599
618
600 repo.ui = baseui
619 repo.ui = baseui
601 verify.verify(repo)
620 verify.verify(repo)
602 return output.getvalue()
621 return output.getvalue()
603
622
604 @reraise_safe_exceptions
623 @reraise_safe_exceptions
605 def tag(self, wire, name, revision, message, local, user,
624 def tag(self, wire, name, revision, message, local, user,
606 tag_time, tag_timezone):
625 tag_time, tag_timezone):
607 repo = self._factory.repo(wire)
626 repo = self._factory.repo(wire)
608 ctx = repo[revision]
627 ctx = repo[revision]
609 node = ctx.node()
628 node = ctx.node()
610
629
611 date = (tag_time, tag_timezone)
630 date = (tag_time, tag_timezone)
612 try:
631 try:
613 repo.tag(name, node, message, local, user, date)
632 repo.tag(name, node, message, local, user, date)
614 except Abort as e:
633 except Abort as e:
615 log.exception("Tag operation aborted")
634 log.exception("Tag operation aborted")
616 # Exception can contain unicode which we convert
635 # Exception can contain unicode which we convert
617 raise exceptions.AbortException(repr(e))
636 raise exceptions.AbortException(repr(e))
618
637
619 @reraise_safe_exceptions
638 @reraise_safe_exceptions
620 def tags(self, wire):
639 def tags(self, wire):
621 repo = self._factory.repo(wire)
640 repo = self._factory.repo(wire)
622 return repo.tags()
641 return repo.tags()
623
642
624 @reraise_safe_exceptions
643 @reraise_safe_exceptions
625 def update(self, wire, node=None, clean=False):
644 def update(self, wire, node=None, clean=False):
626 repo = self._factory.repo(wire)
645 repo = self._factory.repo(wire)
627 baseui = self._factory._create_config(wire['config'])
646 baseui = self._factory._create_config(wire['config'])
628 commands.update(baseui, repo, node=node, clean=clean)
647 commands.update(baseui, repo, node=node, clean=clean)
629
648
630 @reraise_safe_exceptions
649 @reraise_safe_exceptions
631 def identify(self, wire):
650 def identify(self, wire):
632 repo = self._factory.repo(wire)
651 repo = self._factory.repo(wire)
633 baseui = self._factory._create_config(wire['config'])
652 baseui = self._factory._create_config(wire['config'])
634 output = io.BytesIO()
653 output = io.BytesIO()
635 baseui.write = output.write
654 baseui.write = output.write
636 # This is required to get a full node id
655 # This is required to get a full node id
637 baseui.debugflag = True
656 baseui.debugflag = True
638 commands.identify(baseui, repo, id=True)
657 commands.identify(baseui, repo, id=True)
639
658
640 return output.getvalue()
659 return output.getvalue()
641
660
642 @reraise_safe_exceptions
661 @reraise_safe_exceptions
643 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
662 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
644 hooks=True):
663 hooks=True):
645 repo = self._factory.repo(wire)
664 repo = self._factory.repo(wire)
646 baseui = self._factory._create_config(wire['config'], hooks=hooks)
665 baseui = self._factory._create_config(wire['config'], hooks=hooks)
647
666
648 # Mercurial internally has a lot of logic that checks ONLY if
667 # Mercurial internally has a lot of logic that checks ONLY if
649 # option is defined, we just pass those if they are defined then
668 # option is defined, we just pass those if they are defined then
650 opts = {}
669 opts = {}
651 if bookmark:
670 if bookmark:
652 opts['bookmark'] = bookmark
671 opts['bookmark'] = bookmark
653 if branch:
672 if branch:
654 opts['branch'] = branch
673 opts['branch'] = branch
655 if revision:
674 if revision:
656 opts['rev'] = revision
675 opts['rev'] = revision
657
676
658 commands.pull(baseui, repo, source, **opts)
677 commands.pull(baseui, repo, source, **opts)
659
678
660 @reraise_safe_exceptions
679 @reraise_safe_exceptions
661 def heads(self, wire, branch=None):
680 def heads(self, wire, branch=None):
662 repo = self._factory.repo(wire)
681 repo = self._factory.repo(wire)
663 baseui = self._factory._create_config(wire['config'])
682 baseui = self._factory._create_config(wire['config'])
664 output = io.BytesIO()
683 output = io.BytesIO()
665
684
666 def write(data, **unused_kwargs):
685 def write(data, **unused_kwargs):
667 output.write(data)
686 output.write(data)
668
687
669 baseui.write = write
688 baseui.write = write
670 if branch:
689 if branch:
671 args = [branch]
690 args = [branch]
672 else:
691 else:
673 args = []
692 args = []
674 commands.heads(baseui, repo, template='{node} ', *args)
693 commands.heads(baseui, repo, template='{node} ', *args)
675
694
676 return output.getvalue()
695 return output.getvalue()
677
696
678 @reraise_safe_exceptions
697 @reraise_safe_exceptions
679 def ancestor(self, wire, revision1, revision2):
698 def ancestor(self, wire, revision1, revision2):
680 repo = self._factory.repo(wire)
699 repo = self._factory.repo(wire)
681 changelog = repo.changelog
700 changelog = repo.changelog
682 lookup = repo.lookup
701 lookup = repo.lookup
683 a = changelog.ancestor(lookup(revision1), lookup(revision2))
702 a = changelog.ancestor(lookup(revision1), lookup(revision2))
684 return hex(a)
703 return hex(a)
685
704
686 @reraise_safe_exceptions
705 @reraise_safe_exceptions
687 def push(self, wire, revisions, dest_path, hooks=True,
706 def push(self, wire, revisions, dest_path, hooks=True,
688 push_branches=False):
707 push_branches=False):
689 repo = self._factory.repo(wire)
708 repo = self._factory.repo(wire)
690 baseui = self._factory._create_config(wire['config'], hooks=hooks)
709 baseui = self._factory._create_config(wire['config'], hooks=hooks)
691 commands.push(baseui, repo, dest=dest_path, rev=revisions,
710 commands.push(baseui, repo, dest=dest_path, rev=revisions,
692 new_branch=push_branches)
711 new_branch=push_branches)
693
712
694 @reraise_safe_exceptions
713 @reraise_safe_exceptions
695 def merge(self, wire, revision):
714 def merge(self, wire, revision):
696 repo = self._factory.repo(wire)
715 repo = self._factory.repo(wire)
697 baseui = self._factory._create_config(wire['config'])
716 baseui = self._factory._create_config(wire['config'])
698 repo.ui.setconfig('ui', 'merge', 'internal:dump')
717 repo.ui.setconfig('ui', 'merge', 'internal:dump')
699
718
700 # In case of sub repositories are used mercurial prompts the user in
719 # In case of sub repositories are used mercurial prompts the user in
701 # case of merge conflicts or different sub repository sources. By
720 # case of merge conflicts or different sub repository sources. By
702 # setting the interactive flag to `False` mercurial doesn't prompt the
721 # setting the interactive flag to `False` mercurial doesn't prompt the
703 # used but instead uses a default value.
722 # used but instead uses a default value.
704 repo.ui.setconfig('ui', 'interactive', False)
723 repo.ui.setconfig('ui', 'interactive', False)
705
724
706 commands.merge(baseui, repo, rev=revision)
725 commands.merge(baseui, repo, rev=revision)
707
726
708 @reraise_safe_exceptions
727 @reraise_safe_exceptions
709 def commit(self, wire, message, username):
728 def commit(self, wire, message, username):
710 repo = self._factory.repo(wire)
729 repo = self._factory.repo(wire)
711 baseui = self._factory._create_config(wire['config'])
730 baseui = self._factory._create_config(wire['config'])
712 repo.ui.setconfig('ui', 'username', username)
731 repo.ui.setconfig('ui', 'username', username)
713 commands.commit(baseui, repo, message=message)
732 commands.commit(baseui, repo, message=message)
714
733
715 @reraise_safe_exceptions
734 @reraise_safe_exceptions
716 def rebase(self, wire, source=None, dest=None, abort=False):
735 def rebase(self, wire, source=None, dest=None, abort=False):
717 repo = self._factory.repo(wire)
736 repo = self._factory.repo(wire)
718 baseui = self._factory._create_config(wire['config'])
737 baseui = self._factory._create_config(wire['config'])
719 repo.ui.setconfig('ui', 'merge', 'internal:dump')
738 repo.ui.setconfig('ui', 'merge', 'internal:dump')
720 rebase.rebase(
739 rebase.rebase(
721 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
740 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
722
741
723 @reraise_safe_exceptions
742 @reraise_safe_exceptions
724 def bookmark(self, wire, bookmark, revision=None):
743 def bookmark(self, wire, bookmark, revision=None):
725 repo = self._factory.repo(wire)
744 repo = self._factory.repo(wire)
726 baseui = self._factory._create_config(wire['config'])
745 baseui = self._factory._create_config(wire['config'])
727 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
746 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,404 +1,426 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2017 RodeCode GmbH
4 # Copyright (C) 2014-2017 RodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import sys
21 import sys
22 import json
22 import json
23 import logging
23 import logging
24 import collections
24 import collections
25 import importlib
25 import importlib
26 import subprocess
26 import subprocess
27
27
28 from httplib import HTTPConnection
28 from httplib import HTTPConnection
29
29
30
30
31 import mercurial.scmutil
31 import mercurial.scmutil
32 import mercurial.node
32 import mercurial.node
33 import Pyro4
34 import simplejson as json
33 import simplejson as json
35
34
36 from vcsserver import exceptions
35 from vcsserver import exceptions
37
36
38 log = logging.getLogger(__name__)
37 log = logging.getLogger(__name__)
39
38
40
39
41 class HooksHttpClient(object):
40 class HooksHttpClient(object):
42 connection = None
41 connection = None
43
42
44 def __init__(self, hooks_uri):
43 def __init__(self, hooks_uri):
45 self.hooks_uri = hooks_uri
44 self.hooks_uri = hooks_uri
46
45
47 def __call__(self, method, extras):
46 def __call__(self, method, extras):
48 connection = HTTPConnection(self.hooks_uri)
47 connection = HTTPConnection(self.hooks_uri)
49 body = self._serialize(method, extras)
48 body = self._serialize(method, extras)
50 connection.request('POST', '/', body)
49 connection.request('POST', '/', body)
51 response = connection.getresponse()
50 response = connection.getresponse()
52 return json.loads(response.read())
51 return json.loads(response.read())
53
52
54 def _serialize(self, hook_name, extras):
53 def _serialize(self, hook_name, extras):
55 data = {
54 data = {
56 'method': hook_name,
55 'method': hook_name,
57 'extras': extras
56 'extras': extras
58 }
57 }
59 return json.dumps(data)
58 return json.dumps(data)
60
59
61
60
62 class HooksDummyClient(object):
61 class HooksDummyClient(object):
63 def __init__(self, hooks_module):
62 def __init__(self, hooks_module):
64 self._hooks_module = importlib.import_module(hooks_module)
63 self._hooks_module = importlib.import_module(hooks_module)
65
64
66 def __call__(self, hook_name, extras):
65 def __call__(self, hook_name, extras):
67 with self._hooks_module.Hooks() as hooks:
66 with self._hooks_module.Hooks() as hooks:
68 return getattr(hooks, hook_name)(extras)
67 return getattr(hooks, hook_name)(extras)
69
68
70
69
71 class HooksPyro4Client(object):
72 def __init__(self, hooks_uri):
73 self.hooks_uri = hooks_uri
74
75 def __call__(self, hook_name, extras):
76 with Pyro4.Proxy(self.hooks_uri) as hooks:
77 return getattr(hooks, hook_name)(extras)
78
79
80 class RemoteMessageWriter(object):
70 class RemoteMessageWriter(object):
81 """Writer base class."""
71 """Writer base class."""
82 def write(message):
72 def write(self, message):
83 raise NotImplementedError()
73 raise NotImplementedError()
84
74
85
75
86 class HgMessageWriter(RemoteMessageWriter):
76 class HgMessageWriter(RemoteMessageWriter):
87 """Writer that knows how to send messages to mercurial clients."""
77 """Writer that knows how to send messages to mercurial clients."""
88
78
89 def __init__(self, ui):
79 def __init__(self, ui):
90 self.ui = ui
80 self.ui = ui
91
81
92 def write(self, message):
82 def write(self, message):
93 # TODO: Check why the quiet flag is set by default.
83 # TODO: Check why the quiet flag is set by default.
94 old = self.ui.quiet
84 old = self.ui.quiet
95 self.ui.quiet = False
85 self.ui.quiet = False
96 self.ui.status(message.encode('utf-8'))
86 self.ui.status(message.encode('utf-8'))
97 self.ui.quiet = old
87 self.ui.quiet = old
98
88
99
89
100 class GitMessageWriter(RemoteMessageWriter):
90 class GitMessageWriter(RemoteMessageWriter):
101 """Writer that knows how to send messages to git clients."""
91 """Writer that knows how to send messages to git clients."""
102
92
103 def __init__(self, stdout=None):
93 def __init__(self, stdout=None):
104 self.stdout = stdout or sys.stdout
94 self.stdout = stdout or sys.stdout
105
95
106 def write(self, message):
96 def write(self, message):
107 self.stdout.write(message.encode('utf-8'))
97 self.stdout.write(message.encode('utf-8'))
108
98
109
99
110 def _handle_exception(result):
100 def _handle_exception(result):
111 exception_class = result.get('exception')
101 exception_class = result.get('exception')
112 exception_traceback = result.get('exception_traceback')
102 exception_traceback = result.get('exception_traceback')
113
103
114 if exception_traceback:
104 if exception_traceback:
115 log.error('Got traceback from remote call:%s', exception_traceback)
105 log.error('Got traceback from remote call:%s', exception_traceback)
116
106
117 if exception_class == 'HTTPLockedRC':
107 if exception_class == 'HTTPLockedRC':
118 raise exceptions.RepositoryLockedException(*result['exception_args'])
108 raise exceptions.RepositoryLockedException(*result['exception_args'])
119 elif exception_class == 'RepositoryError':
109 elif exception_class == 'RepositoryError':
120 raise exceptions.VcsException(*result['exception_args'])
110 raise exceptions.VcsException(*result['exception_args'])
121 elif exception_class:
111 elif exception_class:
122 raise Exception('Got remote exception "%s" with args "%s"' %
112 raise Exception('Got remote exception "%s" with args "%s"' %
123 (exception_class, result['exception_args']))
113 (exception_class, result['exception_args']))
124
114
125
115
126 def _get_hooks_client(extras):
116 def _get_hooks_client(extras):
127 if 'hooks_uri' in extras:
117 if 'hooks_uri' in extras:
128 protocol = extras.get('hooks_protocol')
118 protocol = extras.get('hooks_protocol')
129 return (
119 return HooksHttpClient(extras['hooks_uri'])
130 HooksHttpClient(extras['hooks_uri'])
131 if protocol == 'http'
132 else HooksPyro4Client(extras['hooks_uri'])
133 )
134 else:
120 else:
135 return HooksDummyClient(extras['hooks_module'])
121 return HooksDummyClient(extras['hooks_module'])
136
122
137
123
138 def _call_hook(hook_name, extras, writer):
124 def _call_hook(hook_name, extras, writer):
139 hooks = _get_hooks_client(extras)
125 hooks = _get_hooks_client(extras)
140 result = hooks(hook_name, extras)
126 result = hooks(hook_name, extras)
141 writer.write(result['output'])
127 writer.write(result['output'])
142 _handle_exception(result)
128 _handle_exception(result)
143
129
144 return result['status']
130 return result['status']
145
131
146
132
147 def _extras_from_ui(ui):
133 def _extras_from_ui(ui):
148 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
134 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
149 return extras
135 return extras
150
136
151
137
152 def repo_size(ui, repo, **kwargs):
138 def repo_size(ui, repo, **kwargs):
153 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
139 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
154
140
155
141
156 def pre_pull(ui, repo, **kwargs):
142 def pre_pull(ui, repo, **kwargs):
157 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
143 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
158
144
159
145
160 def post_pull(ui, repo, **kwargs):
146 def post_pull(ui, repo, **kwargs):
161 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
147 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
162
148
163
149
150 def _rev_range_hash(repo, node):
151
152 commits = []
153 for rev in xrange(repo[node], len(repo)):
154 ctx = repo[rev]
155 commit_id = mercurial.node.hex(ctx.node())
156 branch = ctx.branch()
157 commits.append((commit_id, branch))
158
159 return commits
160
161
164 def pre_push(ui, repo, node=None, **kwargs):
162 def pre_push(ui, repo, node=None, **kwargs):
165 extras = _extras_from_ui(ui)
163 extras = _extras_from_ui(ui)
166
164
167 rev_data = []
165 rev_data = []
168 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
166 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
169 branches = collections.defaultdict(list)
167 branches = collections.defaultdict(list)
170 for commit_id, branch in _rev_range_hash(repo, node, with_branch=True):
168 for commit_id, branch in _rev_range_hash(repo, node):
171 branches[branch].append(commit_id)
169 branches[branch].append(commit_id)
172
170
173 for branch, commits in branches.iteritems():
171 for branch, commits in branches.iteritems():
174 old_rev = kwargs.get('node_last') or commits[0]
172 old_rev = kwargs.get('node_last') or commits[0]
175 rev_data.append({
173 rev_data.append({
176 'old_rev': old_rev,
174 'old_rev': old_rev,
177 'new_rev': commits[-1],
175 'new_rev': commits[-1],
178 'ref': '',
176 'ref': '',
179 'type': 'branch',
177 'type': 'branch',
180 'name': branch,
178 'name': branch,
181 })
179 })
182
180
183 extras['commit_ids'] = rev_data
181 extras['commit_ids'] = rev_data
184 return _call_hook('pre_push', extras, HgMessageWriter(ui))
182 return _call_hook('pre_push', extras, HgMessageWriter(ui))
185
183
186
184
187 def _rev_range_hash(repo, node, with_branch=False):
185 def post_push(ui, repo, node, **kwargs):
186 extras = _extras_from_ui(ui)
187
188 commit_ids = []
189 branches = []
190 bookmarks = []
191 tags = []
188
192
189 commits = []
193 for commit_id, branch in _rev_range_hash(repo, node):
190 for rev in xrange(repo[node], len(repo)):
194 commit_ids.append(commit_id)
191 ctx = repo[rev]
195 if branch not in branches:
192 commit_id = mercurial.node.hex(ctx.node())
196 branches.append(branch)
193 branch = ctx.branch()
194 if with_branch:
195 commits.append((commit_id, branch))
196 else:
197 commits.append(commit_id)
198
197
199 return commits
198 if hasattr(ui, '_rc_pushkey_branches'):
200
199 bookmarks = ui._rc_pushkey_branches
201
200
202 def post_push(ui, repo, node, **kwargs):
203 commit_ids = _rev_range_hash(repo, node)
204
205 extras = _extras_from_ui(ui)
206 extras['commit_ids'] = commit_ids
201 extras['commit_ids'] = commit_ids
202 extras['new_refs'] = {
203 'branches': branches,
204 'bookmarks': bookmarks,
205 'tags': tags
206 }
207
207
208 return _call_hook('post_push', extras, HgMessageWriter(ui))
208 return _call_hook('post_push', extras, HgMessageWriter(ui))
209
209
210
210
211 def key_push(ui, repo, **kwargs):
212 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
213 # store new bookmarks in our UI object propagated later to post_push
214 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
215 return
216
211 # backward compat
217 # backward compat
212 log_pull_action = post_pull
218 log_pull_action = post_pull
213
219
214 # backward compat
220 # backward compat
215 log_push_action = post_push
221 log_push_action = post_push
216
222
217
223
218 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
224 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
219 """
225 """
220 Old hook name: keep here for backward compatibility.
226 Old hook name: keep here for backward compatibility.
221
227
222 This is only required when the installed git hooks are not upgraded.
228 This is only required when the installed git hooks are not upgraded.
223 """
229 """
224 pass
230 pass
225
231
226
232
227 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
233 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
228 """
234 """
229 Old hook name: keep here for backward compatibility.
235 Old hook name: keep here for backward compatibility.
230
236
231 This is only required when the installed git hooks are not upgraded.
237 This is only required when the installed git hooks are not upgraded.
232 """
238 """
233 pass
239 pass
234
240
235
241
236 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
242 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
237
243
238
244
239 def git_pre_pull(extras):
245 def git_pre_pull(extras):
240 """
246 """
241 Pre pull hook.
247 Pre pull hook.
242
248
243 :param extras: dictionary containing the keys defined in simplevcs
249 :param extras: dictionary containing the keys defined in simplevcs
244 :type extras: dict
250 :type extras: dict
245
251
246 :return: status code of the hook. 0 for success.
252 :return: status code of the hook. 0 for success.
247 :rtype: int
253 :rtype: int
248 """
254 """
249 if 'pull' not in extras['hooks']:
255 if 'pull' not in extras['hooks']:
250 return HookResponse(0, '')
256 return HookResponse(0, '')
251
257
252 stdout = io.BytesIO()
258 stdout = io.BytesIO()
253 try:
259 try:
254 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
260 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
255 except Exception as error:
261 except Exception as error:
256 status = 128
262 status = 128
257 stdout.write('ERROR: %s\n' % str(error))
263 stdout.write('ERROR: %s\n' % str(error))
258
264
259 return HookResponse(status, stdout.getvalue())
265 return HookResponse(status, stdout.getvalue())
260
266
261
267
262 def git_post_pull(extras):
268 def git_post_pull(extras):
263 """
269 """
264 Post pull hook.
270 Post pull hook.
265
271
266 :param extras: dictionary containing the keys defined in simplevcs
272 :param extras: dictionary containing the keys defined in simplevcs
267 :type extras: dict
273 :type extras: dict
268
274
269 :return: status code of the hook. 0 for success.
275 :return: status code of the hook. 0 for success.
270 :rtype: int
276 :rtype: int
271 """
277 """
272 if 'pull' not in extras['hooks']:
278 if 'pull' not in extras['hooks']:
273 return HookResponse(0, '')
279 return HookResponse(0, '')
274
280
275 stdout = io.BytesIO()
281 stdout = io.BytesIO()
276 try:
282 try:
277 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
283 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
278 except Exception as error:
284 except Exception as error:
279 status = 128
285 status = 128
280 stdout.write('ERROR: %s\n' % error)
286 stdout.write('ERROR: %s\n' % error)
281
287
282 return HookResponse(status, stdout.getvalue())
288 return HookResponse(status, stdout.getvalue())
283
289
284
290
285 def _parse_git_ref_lines(revision_lines):
291 def _parse_git_ref_lines(revision_lines):
286 rev_data = []
292 rev_data = []
287 for revision_line in revision_lines or []:
293 for revision_line in revision_lines or []:
288 old_rev, new_rev, ref = revision_line.strip().split(' ')
294 old_rev, new_rev, ref = revision_line.strip().split(' ')
289 ref_data = ref.split('/', 2)
295 ref_data = ref.split('/', 2)
290 if ref_data[1] in ('tags', 'heads'):
296 if ref_data[1] in ('tags', 'heads'):
291 rev_data.append({
297 rev_data.append({
292 'old_rev': old_rev,
298 'old_rev': old_rev,
293 'new_rev': new_rev,
299 'new_rev': new_rev,
294 'ref': ref,
300 'ref': ref,
295 'type': ref_data[1],
301 'type': ref_data[1],
296 'name': ref_data[2],
302 'name': ref_data[2],
297 })
303 })
298 return rev_data
304 return rev_data
299
305
300
306
301 def git_pre_receive(unused_repo_path, revision_lines, env):
307 def git_pre_receive(unused_repo_path, revision_lines, env):
302 """
308 """
303 Pre push hook.
309 Pre push hook.
304
310
305 :param extras: dictionary containing the keys defined in simplevcs
311 :param extras: dictionary containing the keys defined in simplevcs
306 :type extras: dict
312 :type extras: dict
307
313
308 :return: status code of the hook. 0 for success.
314 :return: status code of the hook. 0 for success.
309 :rtype: int
315 :rtype: int
310 """
316 """
311 extras = json.loads(env['RC_SCM_DATA'])
317 extras = json.loads(env['RC_SCM_DATA'])
312 rev_data = _parse_git_ref_lines(revision_lines)
318 rev_data = _parse_git_ref_lines(revision_lines)
313 if 'push' not in extras['hooks']:
319 if 'push' not in extras['hooks']:
314 return 0
320 return 0
315 extras['commit_ids'] = rev_data
321 extras['commit_ids'] = rev_data
316 return _call_hook('pre_push', extras, GitMessageWriter())
322 return _call_hook('pre_push', extras, GitMessageWriter())
317
323
318
324
319 def _run_command(arguments):
325 def _run_command(arguments):
320 """
326 """
321 Run the specified command and return the stdout.
327 Run the specified command and return the stdout.
322
328
323 :param arguments: sequence of program arguments (including the program name)
329 :param arguments: sequence of program arguments (including the program name)
324 :type arguments: list[str]
330 :type arguments: list[str]
325 """
331 """
326 # TODO(skreft): refactor this method and all the other similar ones.
332 # TODO(skreft): refactor this method and all the other similar ones.
327 # Probably this should be using subprocessio.
333 # Probably this should be using subprocessio.
328 process = subprocess.Popen(
334 process = subprocess.Popen(
329 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
335 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
330 stdout, _ = process.communicate()
336 stdout, stderr = process.communicate()
331
337
332 if process.returncode != 0:
338 if process.returncode != 0:
333 raise Exception(
339 raise Exception(
334 'Command %s exited with exit code %s' % (arguments,
340 'Command %s exited with exit code %s: stderr:%s' % (
335 process.returncode))
341 arguments, process.returncode, stderr))
336
342
337 return stdout
343 return stdout
338
344
339
345
340 def git_post_receive(unused_repo_path, revision_lines, env):
346 def git_post_receive(unused_repo_path, revision_lines, env):
341 """
347 """
342 Post push hook.
348 Post push hook.
343
349
344 :param extras: dictionary containing the keys defined in simplevcs
350 :param extras: dictionary containing the keys defined in simplevcs
345 :type extras: dict
351 :type extras: dict
346
352
347 :return: status code of the hook. 0 for success.
353 :return: status code of the hook. 0 for success.
348 :rtype: int
354 :rtype: int
349 """
355 """
350 extras = json.loads(env['RC_SCM_DATA'])
356 extras = json.loads(env['RC_SCM_DATA'])
351 if 'push' not in extras['hooks']:
357 if 'push' not in extras['hooks']:
352 return 0
358 return 0
353
359
354 rev_data = _parse_git_ref_lines(revision_lines)
360 rev_data = _parse_git_ref_lines(revision_lines)
355
361
356 git_revs = []
362 git_revs = []
357
363
358 # N.B.(skreft): it is ok to just call git, as git before calling a
364 # N.B.(skreft): it is ok to just call git, as git before calling a
359 # subcommand sets the PATH environment variable so that it point to the
365 # subcommand sets the PATH environment variable so that it point to the
360 # correct version of the git executable.
366 # correct version of the git executable.
361 empty_commit_id = '0' * 40
367 empty_commit_id = '0' * 40
368 branches = []
369 tags = []
362 for push_ref in rev_data:
370 for push_ref in rev_data:
363 type_ = push_ref['type']
371 type_ = push_ref['type']
372
364 if type_ == 'heads':
373 if type_ == 'heads':
365 if push_ref['old_rev'] == empty_commit_id:
374 if push_ref['old_rev'] == empty_commit_id:
375 # starting new branch case
376 if push_ref['name'] not in branches:
377 branches.append(push_ref['name'])
366
378
367 # Fix up head revision if needed
379 # Fix up head revision if needed
368 cmd = ['git', 'show', 'HEAD']
380 cmd = ['git', 'show', 'HEAD']
369 try:
381 try:
370 _run_command(cmd)
382 _run_command(cmd)
371 except Exception:
383 except Exception:
372 cmd = ['git', 'symbolic-ref', 'HEAD',
384 cmd = ['git', 'symbolic-ref', 'HEAD',
373 'refs/heads/%s' % push_ref['name']]
385 'refs/heads/%s' % push_ref['name']]
374 print("Setting default branch to %s" % push_ref['name'])
386 print("Setting default branch to %s" % push_ref['name'])
375 _run_command(cmd)
387 _run_command(cmd)
376
388
377 cmd = ['git', 'for-each-ref', '--format=%(refname)',
389 cmd = ['git', 'for-each-ref', '--format=%(refname)',
378 'refs/heads/*']
390 'refs/heads/*']
379 heads = _run_command(cmd)
391 heads = _run_command(cmd)
380 heads = heads.replace(push_ref['ref'], '')
392 heads = heads.replace(push_ref['ref'], '')
381 heads = ' '.join(head for head in heads.splitlines() if head)
393 heads = ' '.join(head for head in heads.splitlines() if head)
382 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
394 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
383 '--', push_ref['new_rev'], '--not', heads]
395 '--', push_ref['new_rev'], '--not', heads]
384 git_revs.extend(_run_command(cmd).splitlines())
396 git_revs.extend(_run_command(cmd).splitlines())
385 elif push_ref['new_rev'] == empty_commit_id:
397 elif push_ref['new_rev'] == empty_commit_id:
386 # delete branch case
398 # delete branch case
387 git_revs.append('delete_branch=>%s' % push_ref['name'])
399 git_revs.append('delete_branch=>%s' % push_ref['name'])
388 else:
400 else:
401 if push_ref['name'] not in branches:
402 branches.append(push_ref['name'])
403
389 cmd = ['git', 'log',
404 cmd = ['git', 'log',
390 '{old_rev}..{new_rev}'.format(**push_ref),
405 '{old_rev}..{new_rev}'.format(**push_ref),
391 '--reverse', '--pretty=format:%H']
406 '--reverse', '--pretty=format:%H']
392 git_revs.extend(_run_command(cmd).splitlines())
407 git_revs.extend(_run_command(cmd).splitlines())
393 elif type_ == 'tags':
408 elif type_ == 'tags':
409 if push_ref['name'] not in tags:
410 tags.append(push_ref['name'])
394 git_revs.append('tag=>%s' % push_ref['name'])
411 git_revs.append('tag=>%s' % push_ref['name'])
395
412
396 extras['commit_ids'] = git_revs
413 extras['commit_ids'] = git_revs
414 extras['new_refs'] = {
415 'branches': branches,
416 'bookmarks': [],
417 'tags': tags,
418 }
397
419
398 if 'repo_size' in extras['hooks']:
420 if 'repo_size' in extras['hooks']:
399 try:
421 try:
400 _call_hook('repo_size', extras, GitMessageWriter())
422 _call_hook('repo_size', extras, GitMessageWriter())
401 except:
423 except:
402 pass
424 pass
403
425
404 return _call_hook('post_push', extras, GitMessageWriter())
426 return _call_hook('post_push', extras, GitMessageWriter())
@@ -1,375 +1,381 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """Handles the Git smart protocol."""
18 """Handles the Git smart protocol."""
19
19
20 import os
20 import os
21 import socket
21 import socket
22 import logging
22 import logging
23
23
24 import simplejson as json
24 import simplejson as json
25 import dulwich.protocol
25 import dulwich.protocol
26 from webob import Request, Response, exc
26 from webob import Request, Response, exc
27
27
28 from vcsserver import hooks, subprocessio
28 from vcsserver import hooks, subprocessio
29
29
30
30
31 log = logging.getLogger(__name__)
31 log = logging.getLogger(__name__)
32
32
33
33
34 class FileWrapper(object):
34 class FileWrapper(object):
35 """File wrapper that ensures how much data is read from it."""
35 """File wrapper that ensures how much data is read from it."""
36
36
37 def __init__(self, fd, content_length):
37 def __init__(self, fd, content_length):
38 self.fd = fd
38 self.fd = fd
39 self.content_length = content_length
39 self.content_length = content_length
40 self.remain = content_length
40 self.remain = content_length
41
41
42 def read(self, size):
42 def read(self, size):
43 if size <= self.remain:
43 if size <= self.remain:
44 try:
44 try:
45 data = self.fd.read(size)
45 data = self.fd.read(size)
46 except socket.error:
46 except socket.error:
47 raise IOError(self)
47 raise IOError(self)
48 self.remain -= size
48 self.remain -= size
49 elif self.remain:
49 elif self.remain:
50 data = self.fd.read(self.remain)
50 data = self.fd.read(self.remain)
51 self.remain = 0
51 self.remain = 0
52 else:
52 else:
53 data = None
53 data = None
54 return data
54 return data
55
55
56 def __repr__(self):
56 def __repr__(self):
57 return '<FileWrapper %s len: %s, read: %s>' % (
57 return '<FileWrapper %s len: %s, read: %s>' % (
58 self.fd, self.content_length, self.content_length - self.remain
58 self.fd, self.content_length, self.content_length - self.remain
59 )
59 )
60
60
61
61
62 class GitRepository(object):
62 class GitRepository(object):
63 """WSGI app for handling Git smart protocol endpoints."""
63 """WSGI app for handling Git smart protocol endpoints."""
64
64
65 git_folder_signature = frozenset(
65 git_folder_signature = frozenset(
66 ('config', 'head', 'info', 'objects', 'refs'))
66 ('config', 'head', 'info', 'objects', 'refs'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
67 commands = frozenset(('git-upload-pack', 'git-receive-pack'))
68 valid_accepts = frozenset(('application/x-%s-result' %
68 valid_accepts = frozenset(('application/x-%s-result' %
69 c for c in commands))
69 c for c in commands))
70
70
71 # The last bytes are the SHA1 of the first 12 bytes.
71 # The last bytes are the SHA1 of the first 12 bytes.
72 EMPTY_PACK = (
72 EMPTY_PACK = (
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
73 'PACK\x00\x00\x00\x02\x00\x00\x00\x00' +
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
74 '\x02\x9d\x08\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e'
75 )
75 )
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
76 SIDE_BAND_CAPS = frozenset(('side-band', 'side-band-64k'))
77
77
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
78 def __init__(self, repo_name, content_path, git_path, update_server_info,
79 extras):
79 extras):
80 files = frozenset(f.lower() for f in os.listdir(content_path))
80 files = frozenset(f.lower() for f in os.listdir(content_path))
81 valid_dir_signature = self.git_folder_signature.issubset(files)
81 valid_dir_signature = self.git_folder_signature.issubset(files)
82
82
83 if not valid_dir_signature:
83 if not valid_dir_signature:
84 raise OSError('%s missing git signature' % content_path)
84 raise OSError('%s missing git signature' % content_path)
85
85
86 self.content_path = content_path
86 self.content_path = content_path
87 self.repo_name = repo_name
87 self.repo_name = repo_name
88 self.extras = extras
88 self.extras = extras
89 self.git_path = git_path
89 self.git_path = git_path
90 self.update_server_info = update_server_info
90 self.update_server_info = update_server_info
91
91
92 def _get_fixedpath(self, path):
92 def _get_fixedpath(self, path):
93 """
93 """
94 Small fix for repo_path
94 Small fix for repo_path
95
95
96 :param path:
96 :param path:
97 """
97 """
98 return path.split(self.repo_name, 1)[-1].strip('/')
98 path = path.split(self.repo_name, 1)[-1]
99 if path.startswith('.git'):
100 # for bare repos we still get the .git prefix inside, we skip it
101 # here, and remove from the service command
102 path = path[4:]
103
104 return path.strip('/')
99
105
100 def inforefs(self, request, unused_environ):
106 def inforefs(self, request, unused_environ):
101 """
107 """
102 WSGI Response producer for HTTP GET Git Smart
108 WSGI Response producer for HTTP GET Git Smart
103 HTTP /info/refs request.
109 HTTP /info/refs request.
104 """
110 """
105
111
106 git_command = request.GET.get('service')
112 git_command = request.GET.get('service')
107 if git_command not in self.commands:
113 if git_command not in self.commands:
108 log.debug('command %s not allowed', git_command)
114 log.debug('command %s not allowed', git_command)
109 return exc.HTTPForbidden()
115 return exc.HTTPForbidden()
110
116
111 # please, resist the urge to add '\n' to git capture and increment
117 # please, resist the urge to add '\n' to git capture and increment
112 # line count by 1.
118 # line count by 1.
113 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
119 # by git docs: Documentation/technical/http-protocol.txt#L214 \n is
114 # a part of protocol.
120 # a part of protocol.
115 # The code in Git client not only does NOT need '\n', but actually
121 # The code in Git client not only does NOT need '\n', but actually
116 # blows up if you sprinkle "flush" (0000) as "0001\n".
122 # blows up if you sprinkle "flush" (0000) as "0001\n".
117 # It reads binary, per number of bytes specified.
123 # It reads binary, per number of bytes specified.
118 # if you do add '\n' as part of data, count it.
124 # if you do add '\n' as part of data, count it.
119 server_advert = '# service=%s\n' % git_command
125 server_advert = '# service=%s\n' % git_command
120 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
126 packet_len = str(hex(len(server_advert) + 4)[2:].rjust(4, '0')).lower()
121 try:
127 try:
122 gitenv = dict(os.environ)
128 gitenv = dict(os.environ)
123 # forget all configs
129 # forget all configs
124 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
130 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
125 command = [self.git_path, git_command[4:], '--stateless-rpc',
131 command = [self.git_path, git_command[4:], '--stateless-rpc',
126 '--advertise-refs', self.content_path]
132 '--advertise-refs', self.content_path]
127 out = subprocessio.SubprocessIOChunker(
133 out = subprocessio.SubprocessIOChunker(
128 command,
134 command,
129 env=gitenv,
135 env=gitenv,
130 starting_values=[packet_len + server_advert + '0000'],
136 starting_values=[packet_len + server_advert + '0000'],
131 shell=False
137 shell=False
132 )
138 )
133 except EnvironmentError:
139 except EnvironmentError:
134 log.exception('Error processing command')
140 log.exception('Error processing command')
135 raise exc.HTTPExpectationFailed()
141 raise exc.HTTPExpectationFailed()
136
142
137 resp = Response()
143 resp = Response()
138 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
144 resp.content_type = 'application/x-%s-advertisement' % str(git_command)
139 resp.charset = None
145 resp.charset = None
140 resp.app_iter = out
146 resp.app_iter = out
141
147
142 return resp
148 return resp
143
149
144 def _get_want_capabilities(self, request):
150 def _get_want_capabilities(self, request):
145 """Read the capabilities found in the first want line of the request."""
151 """Read the capabilities found in the first want line of the request."""
146 pos = request.body_file_seekable.tell()
152 pos = request.body_file_seekable.tell()
147 first_line = request.body_file_seekable.readline()
153 first_line = request.body_file_seekable.readline()
148 request.body_file_seekable.seek(pos)
154 request.body_file_seekable.seek(pos)
149
155
150 return frozenset(
156 return frozenset(
151 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
157 dulwich.protocol.extract_want_line_capabilities(first_line)[1])
152
158
153 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
159 def _build_failed_pre_pull_response(self, capabilities, pre_pull_messages):
154 """
160 """
155 Construct a response with an empty PACK file.
161 Construct a response with an empty PACK file.
156
162
157 We use an empty PACK file, as that would trigger the failure of the pull
163 We use an empty PACK file, as that would trigger the failure of the pull
158 or clone command.
164 or clone command.
159
165
160 We also print in the error output a message explaining why the command
166 We also print in the error output a message explaining why the command
161 was aborted.
167 was aborted.
162
168
163 If aditionally, the user is accepting messages we send them the output
169 If aditionally, the user is accepting messages we send them the output
164 of the pre-pull hook.
170 of the pre-pull hook.
165
171
166 Note that for clients not supporting side-band we just send them the
172 Note that for clients not supporting side-band we just send them the
167 emtpy PACK file.
173 emtpy PACK file.
168 """
174 """
169 if self.SIDE_BAND_CAPS.intersection(capabilities):
175 if self.SIDE_BAND_CAPS.intersection(capabilities):
170 response = []
176 response = []
171 proto = dulwich.protocol.Protocol(None, response.append)
177 proto = dulwich.protocol.Protocol(None, response.append)
172 proto.write_pkt_line('NAK\n')
178 proto.write_pkt_line('NAK\n')
173 self._write_sideband_to_proto(pre_pull_messages, proto,
179 self._write_sideband_to_proto(pre_pull_messages, proto,
174 capabilities)
180 capabilities)
175 # N.B.(skreft): Do not change the sideband channel to 3, as that
181 # N.B.(skreft): Do not change the sideband channel to 3, as that
176 # produces a fatal error in the client:
182 # produces a fatal error in the client:
177 # fatal: error in sideband demultiplexer
183 # fatal: error in sideband demultiplexer
178 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
184 proto.write_sideband(2, 'Pre pull hook failed: aborting\n')
179 proto.write_sideband(1, self.EMPTY_PACK)
185 proto.write_sideband(1, self.EMPTY_PACK)
180
186
181 # writes 0000
187 # writes 0000
182 proto.write_pkt_line(None)
188 proto.write_pkt_line(None)
183
189
184 return response
190 return response
185 else:
191 else:
186 return [self.EMPTY_PACK]
192 return [self.EMPTY_PACK]
187
193
188 def _write_sideband_to_proto(self, data, proto, capabilities):
194 def _write_sideband_to_proto(self, data, proto, capabilities):
189 """
195 """
190 Write the data to the proto's sideband number 2.
196 Write the data to the proto's sideband number 2.
191
197
192 We do not use dulwich's write_sideband directly as it only supports
198 We do not use dulwich's write_sideband directly as it only supports
193 side-band-64k.
199 side-band-64k.
194 """
200 """
195 if not data:
201 if not data:
196 return
202 return
197
203
198 # N.B.(skreft): The values below are explained in the pack protocol
204 # N.B.(skreft): The values below are explained in the pack protocol
199 # documentation, section Packfile Data.
205 # documentation, section Packfile Data.
200 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
206 # https://github.com/git/git/blob/master/Documentation/technical/pack-protocol.txt
201 if 'side-band-64k' in capabilities:
207 if 'side-band-64k' in capabilities:
202 chunk_size = 65515
208 chunk_size = 65515
203 elif 'side-band' in capabilities:
209 elif 'side-band' in capabilities:
204 chunk_size = 995
210 chunk_size = 995
205 else:
211 else:
206 return
212 return
207
213
208 chunker = (
214 chunker = (
209 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
215 data[i:i + chunk_size] for i in xrange(0, len(data), chunk_size))
210
216
211 for chunk in chunker:
217 for chunk in chunker:
212 proto.write_sideband(2, chunk)
218 proto.write_sideband(2, chunk)
213
219
214 def _get_messages(self, data, capabilities):
220 def _get_messages(self, data, capabilities):
215 """Return a list with packets for sending data in sideband number 2."""
221 """Return a list with packets for sending data in sideband number 2."""
216 response = []
222 response = []
217 proto = dulwich.protocol.Protocol(None, response.append)
223 proto = dulwich.protocol.Protocol(None, response.append)
218
224
219 self._write_sideband_to_proto(data, proto, capabilities)
225 self._write_sideband_to_proto(data, proto, capabilities)
220
226
221 return response
227 return response
222
228
223 def _inject_messages_to_response(self, response, capabilities,
229 def _inject_messages_to_response(self, response, capabilities,
224 start_messages, end_messages):
230 start_messages, end_messages):
225 """
231 """
226 Given a list reponse we inject the pre/post-pull messages.
232 Given a list reponse we inject the pre/post-pull messages.
227
233
228 We only inject the messages if the client supports sideband, and the
234 We only inject the messages if the client supports sideband, and the
229 response has the format:
235 response has the format:
230 0008NAK\n...0000
236 0008NAK\n...0000
231
237
232 Note that we do not check the no-progress capability as by default, git
238 Note that we do not check the no-progress capability as by default, git
233 sends it, which effectively would block all messages.
239 sends it, which effectively would block all messages.
234 """
240 """
235 if not self.SIDE_BAND_CAPS.intersection(capabilities):
241 if not self.SIDE_BAND_CAPS.intersection(capabilities):
236 return response
242 return response
237
243
238 if (not response[0].startswith('0008NAK\n') or
244 if (not response[0].startswith('0008NAK\n') or
239 not response[-1].endswith('0000')):
245 not response[-1].endswith('0000')):
240 return response
246 return response
241
247
242 if not start_messages and not end_messages:
248 if not start_messages and not end_messages:
243 return response
249 return response
244
250
245 new_response = ['0008NAK\n']
251 new_response = ['0008NAK\n']
246 new_response.extend(self._get_messages(start_messages, capabilities))
252 new_response.extend(self._get_messages(start_messages, capabilities))
247 if len(response) == 1:
253 if len(response) == 1:
248 new_response.append(response[0][8:-4])
254 new_response.append(response[0][8:-4])
249 else:
255 else:
250 new_response.append(response[0][8:])
256 new_response.append(response[0][8:])
251 new_response.extend(response[1:-1])
257 new_response.extend(response[1:-1])
252 new_response.append(response[-1][:-4])
258 new_response.append(response[-1][:-4])
253 new_response.extend(self._get_messages(end_messages, capabilities))
259 new_response.extend(self._get_messages(end_messages, capabilities))
254 new_response.append('0000')
260 new_response.append('0000')
255
261
256 return new_response
262 return new_response
257
263
258 def backend(self, request, environ):
264 def backend(self, request, environ):
259 """
265 """
260 WSGI Response producer for HTTP POST Git Smart HTTP requests.
266 WSGI Response producer for HTTP POST Git Smart HTTP requests.
261 Reads commands and data from HTTP POST's body.
267 Reads commands and data from HTTP POST's body.
262 returns an iterator obj with contents of git command's
268 returns an iterator obj with contents of git command's
263 response to stdout
269 response to stdout
264 """
270 """
265 # TODO(skreft): think how we could detect an HTTPLockedException, as
271 # TODO(skreft): think how we could detect an HTTPLockedException, as
266 # we probably want to have the same mechanism used by mercurial and
272 # we probably want to have the same mechanism used by mercurial and
267 # simplevcs.
273 # simplevcs.
268 # For that we would need to parse the output of the command looking for
274 # For that we would need to parse the output of the command looking for
269 # some signs of the HTTPLockedError, parse the data and reraise it in
275 # some signs of the HTTPLockedError, parse the data and reraise it in
270 # pygrack. However, that would interfere with the streaming.
276 # pygrack. However, that would interfere with the streaming.
271 #
277 #
272 # Now the output of a blocked push is:
278 # Now the output of a blocked push is:
273 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
279 # Pushing to http://test_regular:test12@127.0.0.1:5001/vcs_test_git
274 # POST git-receive-pack (1047 bytes)
280 # POST git-receive-pack (1047 bytes)
275 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
281 # remote: ERROR: Repository `vcs_test_git` locked by user `test_admin`. Reason:`lock_auto`
276 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
282 # To http://test_regular:test12@127.0.0.1:5001/vcs_test_git
277 # ! [remote rejected] master -> master (pre-receive hook declined)
283 # ! [remote rejected] master -> master (pre-receive hook declined)
278 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
284 # error: failed to push some refs to 'http://test_regular:test12@127.0.0.1:5001/vcs_test_git'
279
285
280 git_command = self._get_fixedpath(request.path_info)
286 git_command = self._get_fixedpath(request.path_info)
281 if git_command not in self.commands:
287 if git_command not in self.commands:
282 log.debug('command %s not allowed', git_command)
288 log.debug('command %s not allowed', git_command)
283 return exc.HTTPForbidden()
289 return exc.HTTPForbidden()
284
290
285 capabilities = None
291 capabilities = None
286 if git_command == 'git-upload-pack':
292 if git_command == 'git-upload-pack':
287 capabilities = self._get_want_capabilities(request)
293 capabilities = self._get_want_capabilities(request)
288
294
289 if 'CONTENT_LENGTH' in environ:
295 if 'CONTENT_LENGTH' in environ:
290 inputstream = FileWrapper(request.body_file_seekable,
296 inputstream = FileWrapper(request.body_file_seekable,
291 request.content_length)
297 request.content_length)
292 else:
298 else:
293 inputstream = request.body_file_seekable
299 inputstream = request.body_file_seekable
294
300
295 resp = Response()
301 resp = Response()
296 resp.content_type = ('application/x-%s-result' %
302 resp.content_type = ('application/x-%s-result' %
297 git_command.encode('utf8'))
303 git_command.encode('utf8'))
298 resp.charset = None
304 resp.charset = None
299
305
300 if git_command == 'git-upload-pack':
306 if git_command == 'git-upload-pack':
301 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
307 status, pre_pull_messages = hooks.git_pre_pull(self.extras)
302 if status != 0:
308 if status != 0:
303 resp.app_iter = self._build_failed_pre_pull_response(
309 resp.app_iter = self._build_failed_pre_pull_response(
304 capabilities, pre_pull_messages)
310 capabilities, pre_pull_messages)
305 return resp
311 return resp
306
312
307 gitenv = dict(os.environ)
313 gitenv = dict(os.environ)
308 # forget all configs
314 # forget all configs
309 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
315 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
310 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
316 gitenv['RC_SCM_DATA'] = json.dumps(self.extras)
311 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
317 cmd = [self.git_path, git_command[4:], '--stateless-rpc',
312 self.content_path]
318 self.content_path]
313 log.debug('handling cmd %s', cmd)
319 log.debug('handling cmd %s', cmd)
314
320
315 out = subprocessio.SubprocessIOChunker(
321 out = subprocessio.SubprocessIOChunker(
316 cmd,
322 cmd,
317 inputstream=inputstream,
323 inputstream=inputstream,
318 env=gitenv,
324 env=gitenv,
319 cwd=self.content_path,
325 cwd=self.content_path,
320 shell=False,
326 shell=False,
321 fail_on_stderr=False,
327 fail_on_stderr=False,
322 fail_on_return_code=False
328 fail_on_return_code=False
323 )
329 )
324
330
325 if self.update_server_info and git_command == 'git-receive-pack':
331 if self.update_server_info and git_command == 'git-receive-pack':
326 # We need to fully consume the iterator here, as the
332 # We need to fully consume the iterator here, as the
327 # update-server-info command needs to be run after the push.
333 # update-server-info command needs to be run after the push.
328 out = list(out)
334 out = list(out)
329
335
330 # Updating refs manually after each push.
336 # Updating refs manually after each push.
331 # This is required as some clients are exposing Git repos internally
337 # This is required as some clients are exposing Git repos internally
332 # with the dumb protocol.
338 # with the dumb protocol.
333 cmd = [self.git_path, 'update-server-info']
339 cmd = [self.git_path, 'update-server-info']
334 log.debug('handling cmd %s', cmd)
340 log.debug('handling cmd %s', cmd)
335 output = subprocessio.SubprocessIOChunker(
341 output = subprocessio.SubprocessIOChunker(
336 cmd,
342 cmd,
337 inputstream=inputstream,
343 inputstream=inputstream,
338 env=gitenv,
344 env=gitenv,
339 cwd=self.content_path,
345 cwd=self.content_path,
340 shell=False,
346 shell=False,
341 fail_on_stderr=False,
347 fail_on_stderr=False,
342 fail_on_return_code=False
348 fail_on_return_code=False
343 )
349 )
344 # Consume all the output so the subprocess finishes
350 # Consume all the output so the subprocess finishes
345 for _ in output:
351 for _ in output:
346 pass
352 pass
347
353
348 if git_command == 'git-upload-pack':
354 if git_command == 'git-upload-pack':
349 out = list(out)
355 out = list(out)
350 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
356 unused_status, post_pull_messages = hooks.git_post_pull(self.extras)
351 resp.app_iter = self._inject_messages_to_response(
357 resp.app_iter = self._inject_messages_to_response(
352 out, capabilities, pre_pull_messages, post_pull_messages)
358 out, capabilities, pre_pull_messages, post_pull_messages)
353 else:
359 else:
354 resp.app_iter = out
360 resp.app_iter = out
355
361
356 return resp
362 return resp
357
363
358 def __call__(self, environ, start_response):
364 def __call__(self, environ, start_response):
359 request = Request(environ)
365 request = Request(environ)
360 _path = self._get_fixedpath(request.path_info)
366 _path = self._get_fixedpath(request.path_info)
361 if _path.startswith('info/refs'):
367 if _path.startswith('info/refs'):
362 app = self.inforefs
368 app = self.inforefs
363 else:
369 else:
364 app = self.backend
370 app = self.backend
365
371
366 try:
372 try:
367 resp = app(request, environ)
373 resp = app(request, environ)
368 except exc.HTTPException as error:
374 except exc.HTTPException as error:
369 log.exception('HTTP Error')
375 log.exception('HTTP Error')
370 resp = error
376 resp = error
371 except Exception:
377 except Exception:
372 log.exception('Unknown error')
378 log.exception('Unknown error')
373 resp = exc.HTTPInternalServerError()
379 resp = exc.HTTPInternalServerError()
374
380
375 return resp(environ, start_response)
381 return resp(environ, start_response)
@@ -1,30 +1,19 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
19 PYRO_PORT = 9900
20
21 PYRO_GIT = 'git_remote'
22 PYRO_HG = 'hg_remote'
23 PYRO_SVN = 'svn_remote'
24 PYRO_VCSSERVER = 'vcs_server'
25 PYRO_GIT_REMOTE_WSGI = 'git_remote_wsgi'
26 PYRO_HG_REMOTE_WSGI = 'hg_remote_wsgi'
27
28 WIRE_ENCODING = 'UTF-8'
18 WIRE_ENCODING = 'UTF-8'
29
30 GIT_EXECUTABLE = 'git'
19 GIT_EXECUTABLE = 'git'
@@ -1,644 +1,668 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 from __future__ import absolute_import
18 from __future__ import absolute_import
19
19
20 from urllib2 import URLError
20 from urllib2 import URLError
21 import logging
21 import logging
22 import posixpath as vcspath
22 import posixpath as vcspath
23 import StringIO
23 import StringIO
24 import subprocess
24 import subprocess
25 import urllib
25 import urllib
26 import traceback
26
27
27 import svn.client
28 import svn.client
28 import svn.core
29 import svn.core
29 import svn.delta
30 import svn.delta
30 import svn.diff
31 import svn.diff
31 import svn.fs
32 import svn.fs
32 import svn.repos
33 import svn.repos
33
34
34 from vcsserver import svn_diff
35 from vcsserver import svn_diff
35 from vcsserver import exceptions
36 from vcsserver import exceptions
36 from vcsserver.base import RepoFactory, raise_from_original
37 from vcsserver.base import RepoFactory, raise_from_original
37
38
38
39
39 log = logging.getLogger(__name__)
40 log = logging.getLogger(__name__)
40
41
41
42
42 # Set of svn compatible version flags.
43 # Set of svn compatible version flags.
43 # Compare with subversion/svnadmin/svnadmin.c
44 # Compare with subversion/svnadmin/svnadmin.c
44 svn_compatible_versions = set([
45 svn_compatible_versions = set([
45 'pre-1.4-compatible',
46 'pre-1.4-compatible',
46 'pre-1.5-compatible',
47 'pre-1.5-compatible',
47 'pre-1.6-compatible',
48 'pre-1.6-compatible',
48 'pre-1.8-compatible',
49 'pre-1.8-compatible',
50 'pre-1.9-compatible',
49 ])
51 ])
50
52
53 svn_compatible_versions_map = {
54 'pre-1.4-compatible': '1.3',
55 'pre-1.5-compatible': '1.4',
56 'pre-1.6-compatible': '1.5',
57 'pre-1.8-compatible': '1.7',
58 'pre-1.9-compatible': '1.8',
59 }
60
51
61
52 def reraise_safe_exceptions(func):
62 def reraise_safe_exceptions(func):
53 """Decorator for converting svn exceptions to something neutral."""
63 """Decorator for converting svn exceptions to something neutral."""
54 def wrapper(*args, **kwargs):
64 def wrapper(*args, **kwargs):
55 try:
65 try:
56 return func(*args, **kwargs)
66 return func(*args, **kwargs)
57 except Exception as e:
67 except Exception as e:
58 if not hasattr(e, '_vcs_kind'):
68 if not hasattr(e, '_vcs_kind'):
59 log.exception("Unhandled exception in hg remote call")
69 log.exception("Unhandled exception in hg remote call")
60 raise_from_original(exceptions.UnhandledException)
70 raise_from_original(exceptions.UnhandledException)
61 raise
71 raise
62 return wrapper
72 return wrapper
63
73
64
74
65 class SubversionFactory(RepoFactory):
75 class SubversionFactory(RepoFactory):
66
76
67 def _create_repo(self, wire, create, compatible_version):
77 def _create_repo(self, wire, create, compatible_version):
68 path = svn.core.svn_path_canonicalize(wire['path'])
78 path = svn.core.svn_path_canonicalize(wire['path'])
69 if create:
79 if create:
70 fs_config = {}
80 fs_config = {'compatible-version': '1.9'}
71 if compatible_version:
81 if compatible_version:
72 if compatible_version not in svn_compatible_versions:
82 if compatible_version not in svn_compatible_versions:
73 raise Exception('Unknown SVN compatible version "{}"'
83 raise Exception('Unknown SVN compatible version "{}"'
74 .format(compatible_version))
84 .format(compatible_version))
75 log.debug('Create SVN repo with compatible version "%s"',
85 fs_config['compatible-version'] = \
76 compatible_version)
86 svn_compatible_versions_map[compatible_version]
77 fs_config[compatible_version] = '1'
87
88 log.debug('Create SVN repo with config "%s"', fs_config)
78 repo = svn.repos.create(path, "", "", None, fs_config)
89 repo = svn.repos.create(path, "", "", None, fs_config)
79 else:
90 else:
80 repo = svn.repos.open(path)
91 repo = svn.repos.open(path)
81 return repo
92 return repo
82
93
83 def repo(self, wire, create=False, compatible_version=None):
94 def repo(self, wire, create=False, compatible_version=None):
84 def create_new_repo():
95 def create_new_repo():
85 return self._create_repo(wire, create, compatible_version)
96 return self._create_repo(wire, create, compatible_version)
86
97
87 return self._repo(wire, create_new_repo)
98 return self._repo(wire, create_new_repo)
88
99
89
100
90
91 NODE_TYPE_MAPPING = {
101 NODE_TYPE_MAPPING = {
92 svn.core.svn_node_file: 'file',
102 svn.core.svn_node_file: 'file',
93 svn.core.svn_node_dir: 'dir',
103 svn.core.svn_node_dir: 'dir',
94 }
104 }
95
105
96
106
97 class SvnRemote(object):
107 class SvnRemote(object):
98
108
99 def __init__(self, factory, hg_factory=None):
109 def __init__(self, factory, hg_factory=None):
100 self._factory = factory
110 self._factory = factory
101 # TODO: Remove once we do not use internal Mercurial objects anymore
111 # TODO: Remove once we do not use internal Mercurial objects anymore
102 # for subversion
112 # for subversion
103 self._hg_factory = hg_factory
113 self._hg_factory = hg_factory
104
114
105 @reraise_safe_exceptions
115 @reraise_safe_exceptions
106 def discover_svn_version(self):
116 def discover_svn_version(self):
107 try:
117 try:
108 import svn.core
118 import svn.core
109 svn_ver = svn.core.SVN_VERSION
119 svn_ver = svn.core.SVN_VERSION
110 except ImportError:
120 except ImportError:
111 svn_ver = None
121 svn_ver = None
112 return svn_ver
122 return svn_ver
113
123
114 def check_url(self, url, config_items):
124 def check_url(self, url, config_items):
115 # this can throw exception if not installed, but we detect this
125 # this can throw exception if not installed, but we detect this
116 from hgsubversion import svnrepo
126 from hgsubversion import svnrepo
117
127
118 baseui = self._hg_factory._create_config(config_items)
128 baseui = self._hg_factory._create_config(config_items)
119 # uuid function get's only valid UUID from proper repo, else
129 # uuid function get's only valid UUID from proper repo, else
120 # throws exception
130 # throws exception
121 try:
131 try:
122 svnrepo.svnremoterepo(baseui, url).svn.uuid
132 svnrepo.svnremoterepo(baseui, url).svn.uuid
123 except:
133 except Exception:
124 log.debug("Invalid svn url: %s", url)
134 tb = traceback.format_exc()
135 log.debug("Invalid Subversion url: `%s`, tb: %s", url, tb)
125 raise URLError(
136 raise URLError(
126 '"%s" is not a valid Subversion source url.' % (url, ))
137 '"%s" is not a valid Subversion source url.' % (url, ))
127 return True
138 return True
128
139
129 def is_path_valid_repository(self, wire, path):
140 def is_path_valid_repository(self, wire, path):
130 try:
141 try:
131 svn.repos.open(path)
142 svn.repos.open(path)
132 except svn.core.SubversionException:
143 except svn.core.SubversionException:
133 log.debug("Invalid Subversion path %s", path)
144 tb = traceback.format_exc()
145 log.debug("Invalid Subversion path `%s`, tb: %s", path, tb)
134 return False
146 return False
135 return True
147 return True
136
148
149 @reraise_safe_exceptions
150 def verify(self, wire,):
151 repo_path = wire['path']
152 if not self.is_path_valid_repository(wire, repo_path):
153 raise Exception(
154 "Path %s is not a valid Subversion repository." % repo_path)
155
156 load = subprocess.Popen(
157 ['svnadmin', 'info', repo_path],
158 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
159 return ''.join(load.stdout)
160
137 def lookup(self, wire, revision):
161 def lookup(self, wire, revision):
138 if revision not in [-1, None, 'HEAD']:
162 if revision not in [-1, None, 'HEAD']:
139 raise NotImplementedError
163 raise NotImplementedError
140 repo = self._factory.repo(wire)
164 repo = self._factory.repo(wire)
141 fs_ptr = svn.repos.fs(repo)
165 fs_ptr = svn.repos.fs(repo)
142 head = svn.fs.youngest_rev(fs_ptr)
166 head = svn.fs.youngest_rev(fs_ptr)
143 return head
167 return head
144
168
145 def lookup_interval(self, wire, start_ts, end_ts):
169 def lookup_interval(self, wire, start_ts, end_ts):
146 repo = self._factory.repo(wire)
170 repo = self._factory.repo(wire)
147 fsobj = svn.repos.fs(repo)
171 fsobj = svn.repos.fs(repo)
148 start_rev = None
172 start_rev = None
149 end_rev = None
173 end_rev = None
150 if start_ts:
174 if start_ts:
151 start_ts_svn = apr_time_t(start_ts)
175 start_ts_svn = apr_time_t(start_ts)
152 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
176 start_rev = svn.repos.dated_revision(repo, start_ts_svn) + 1
153 else:
177 else:
154 start_rev = 1
178 start_rev = 1
155 if end_ts:
179 if end_ts:
156 end_ts_svn = apr_time_t(end_ts)
180 end_ts_svn = apr_time_t(end_ts)
157 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
181 end_rev = svn.repos.dated_revision(repo, end_ts_svn)
158 else:
182 else:
159 end_rev = svn.fs.youngest_rev(fsobj)
183 end_rev = svn.fs.youngest_rev(fsobj)
160 return start_rev, end_rev
184 return start_rev, end_rev
161
185
162 def revision_properties(self, wire, revision):
186 def revision_properties(self, wire, revision):
163 repo = self._factory.repo(wire)
187 repo = self._factory.repo(wire)
164 fs_ptr = svn.repos.fs(repo)
188 fs_ptr = svn.repos.fs(repo)
165 return svn.fs.revision_proplist(fs_ptr, revision)
189 return svn.fs.revision_proplist(fs_ptr, revision)
166
190
167 def revision_changes(self, wire, revision):
191 def revision_changes(self, wire, revision):
168
192
169 repo = self._factory.repo(wire)
193 repo = self._factory.repo(wire)
170 fsobj = svn.repos.fs(repo)
194 fsobj = svn.repos.fs(repo)
171 rev_root = svn.fs.revision_root(fsobj, revision)
195 rev_root = svn.fs.revision_root(fsobj, revision)
172
196
173 editor = svn.repos.ChangeCollector(fsobj, rev_root)
197 editor = svn.repos.ChangeCollector(fsobj, rev_root)
174 editor_ptr, editor_baton = svn.delta.make_editor(editor)
198 editor_ptr, editor_baton = svn.delta.make_editor(editor)
175 base_dir = ""
199 base_dir = ""
176 send_deltas = False
200 send_deltas = False
177 svn.repos.replay2(
201 svn.repos.replay2(
178 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
202 rev_root, base_dir, svn.core.SVN_INVALID_REVNUM, send_deltas,
179 editor_ptr, editor_baton, None)
203 editor_ptr, editor_baton, None)
180
204
181 added = []
205 added = []
182 changed = []
206 changed = []
183 removed = []
207 removed = []
184
208
185 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
209 # TODO: CHANGE_ACTION_REPLACE: Figure out where it belongs
186 for path, change in editor.changes.iteritems():
210 for path, change in editor.changes.iteritems():
187 # TODO: Decide what to do with directory nodes. Subversion can add
211 # TODO: Decide what to do with directory nodes. Subversion can add
188 # empty directories.
212 # empty directories.
189
213
190 if change.item_kind == svn.core.svn_node_dir:
214 if change.item_kind == svn.core.svn_node_dir:
191 continue
215 continue
192 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
216 if change.action in [svn.repos.CHANGE_ACTION_ADD]:
193 added.append(path)
217 added.append(path)
194 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
218 elif change.action in [svn.repos.CHANGE_ACTION_MODIFY,
195 svn.repos.CHANGE_ACTION_REPLACE]:
219 svn.repos.CHANGE_ACTION_REPLACE]:
196 changed.append(path)
220 changed.append(path)
197 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
221 elif change.action in [svn.repos.CHANGE_ACTION_DELETE]:
198 removed.append(path)
222 removed.append(path)
199 else:
223 else:
200 raise NotImplementedError(
224 raise NotImplementedError(
201 "Action %s not supported on path %s" % (
225 "Action %s not supported on path %s" % (
202 change.action, path))
226 change.action, path))
203
227
204 changes = {
228 changes = {
205 'added': added,
229 'added': added,
206 'changed': changed,
230 'changed': changed,
207 'removed': removed,
231 'removed': removed,
208 }
232 }
209 return changes
233 return changes
210
234
211 def node_history(self, wire, path, revision, limit):
235 def node_history(self, wire, path, revision, limit):
212 cross_copies = False
236 cross_copies = False
213 repo = self._factory.repo(wire)
237 repo = self._factory.repo(wire)
214 fsobj = svn.repos.fs(repo)
238 fsobj = svn.repos.fs(repo)
215 rev_root = svn.fs.revision_root(fsobj, revision)
239 rev_root = svn.fs.revision_root(fsobj, revision)
216
240
217 history_revisions = []
241 history_revisions = []
218 history = svn.fs.node_history(rev_root, path)
242 history = svn.fs.node_history(rev_root, path)
219 history = svn.fs.history_prev(history, cross_copies)
243 history = svn.fs.history_prev(history, cross_copies)
220 while history:
244 while history:
221 __, node_revision = svn.fs.history_location(history)
245 __, node_revision = svn.fs.history_location(history)
222 history_revisions.append(node_revision)
246 history_revisions.append(node_revision)
223 if limit and len(history_revisions) >= limit:
247 if limit and len(history_revisions) >= limit:
224 break
248 break
225 history = svn.fs.history_prev(history, cross_copies)
249 history = svn.fs.history_prev(history, cross_copies)
226 return history_revisions
250 return history_revisions
227
251
228 def node_properties(self, wire, path, revision):
252 def node_properties(self, wire, path, revision):
229 repo = self._factory.repo(wire)
253 repo = self._factory.repo(wire)
230 fsobj = svn.repos.fs(repo)
254 fsobj = svn.repos.fs(repo)
231 rev_root = svn.fs.revision_root(fsobj, revision)
255 rev_root = svn.fs.revision_root(fsobj, revision)
232 return svn.fs.node_proplist(rev_root, path)
256 return svn.fs.node_proplist(rev_root, path)
233
257
234 def file_annotate(self, wire, path, revision):
258 def file_annotate(self, wire, path, revision):
235 abs_path = 'file://' + urllib.pathname2url(
259 abs_path = 'file://' + urllib.pathname2url(
236 vcspath.join(wire['path'], path))
260 vcspath.join(wire['path'], path))
237 file_uri = svn.core.svn_path_canonicalize(abs_path)
261 file_uri = svn.core.svn_path_canonicalize(abs_path)
238
262
239 start_rev = svn_opt_revision_value_t(0)
263 start_rev = svn_opt_revision_value_t(0)
240 peg_rev = svn_opt_revision_value_t(revision)
264 peg_rev = svn_opt_revision_value_t(revision)
241 end_rev = peg_rev
265 end_rev = peg_rev
242
266
243 annotations = []
267 annotations = []
244
268
245 def receiver(line_no, revision, author, date, line, pool):
269 def receiver(line_no, revision, author, date, line, pool):
246 annotations.append((line_no, revision, line))
270 annotations.append((line_no, revision, line))
247
271
248 # TODO: Cannot use blame5, missing typemap function in the swig code
272 # TODO: Cannot use blame5, missing typemap function in the swig code
249 try:
273 try:
250 svn.client.blame2(
274 svn.client.blame2(
251 file_uri, peg_rev, start_rev, end_rev,
275 file_uri, peg_rev, start_rev, end_rev,
252 receiver, svn.client.create_context())
276 receiver, svn.client.create_context())
253 except svn.core.SubversionException as exc:
277 except svn.core.SubversionException as exc:
254 log.exception("Error during blame operation.")
278 log.exception("Error during blame operation.")
255 raise Exception(
279 raise Exception(
256 "Blame not supported or file does not exist at path %s. "
280 "Blame not supported or file does not exist at path %s. "
257 "Error %s." % (path, exc))
281 "Error %s." % (path, exc))
258
282
259 return annotations
283 return annotations
260
284
261 def get_node_type(self, wire, path, rev=None):
285 def get_node_type(self, wire, path, rev=None):
262 repo = self._factory.repo(wire)
286 repo = self._factory.repo(wire)
263 fs_ptr = svn.repos.fs(repo)
287 fs_ptr = svn.repos.fs(repo)
264 if rev is None:
288 if rev is None:
265 rev = svn.fs.youngest_rev(fs_ptr)
289 rev = svn.fs.youngest_rev(fs_ptr)
266 root = svn.fs.revision_root(fs_ptr, rev)
290 root = svn.fs.revision_root(fs_ptr, rev)
267 node = svn.fs.check_path(root, path)
291 node = svn.fs.check_path(root, path)
268 return NODE_TYPE_MAPPING.get(node, None)
292 return NODE_TYPE_MAPPING.get(node, None)
269
293
270 def get_nodes(self, wire, path, revision=None):
294 def get_nodes(self, wire, path, revision=None):
271 repo = self._factory.repo(wire)
295 repo = self._factory.repo(wire)
272 fsobj = svn.repos.fs(repo)
296 fsobj = svn.repos.fs(repo)
273 if revision is None:
297 if revision is None:
274 revision = svn.fs.youngest_rev(fsobj)
298 revision = svn.fs.youngest_rev(fsobj)
275 root = svn.fs.revision_root(fsobj, revision)
299 root = svn.fs.revision_root(fsobj, revision)
276 entries = svn.fs.dir_entries(root, path)
300 entries = svn.fs.dir_entries(root, path)
277 result = []
301 result = []
278 for entry_path, entry_info in entries.iteritems():
302 for entry_path, entry_info in entries.iteritems():
279 result.append(
303 result.append(
280 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
304 (entry_path, NODE_TYPE_MAPPING.get(entry_info.kind, None)))
281 return result
305 return result
282
306
283 def get_file_content(self, wire, path, rev=None):
307 def get_file_content(self, wire, path, rev=None):
284 repo = self._factory.repo(wire)
308 repo = self._factory.repo(wire)
285 fsobj = svn.repos.fs(repo)
309 fsobj = svn.repos.fs(repo)
286 if rev is None:
310 if rev is None:
287 rev = svn.fs.youngest_revision(fsobj)
311 rev = svn.fs.youngest_revision(fsobj)
288 root = svn.fs.revision_root(fsobj, rev)
312 root = svn.fs.revision_root(fsobj, rev)
289 content = svn.core.Stream(svn.fs.file_contents(root, path))
313 content = svn.core.Stream(svn.fs.file_contents(root, path))
290 return content.read()
314 return content.read()
291
315
292 def get_file_size(self, wire, path, revision=None):
316 def get_file_size(self, wire, path, revision=None):
293 repo = self._factory.repo(wire)
317 repo = self._factory.repo(wire)
294 fsobj = svn.repos.fs(repo)
318 fsobj = svn.repos.fs(repo)
295 if revision is None:
319 if revision is None:
296 revision = svn.fs.youngest_revision(fsobj)
320 revision = svn.fs.youngest_revision(fsobj)
297 root = svn.fs.revision_root(fsobj, revision)
321 root = svn.fs.revision_root(fsobj, revision)
298 size = svn.fs.file_length(root, path)
322 size = svn.fs.file_length(root, path)
299 return size
323 return size
300
324
301 def create_repository(self, wire, compatible_version=None):
325 def create_repository(self, wire, compatible_version=None):
302 log.info('Creating Subversion repository in path "%s"', wire['path'])
326 log.info('Creating Subversion repository in path "%s"', wire['path'])
303 self._factory.repo(wire, create=True,
327 self._factory.repo(wire, create=True,
304 compatible_version=compatible_version)
328 compatible_version=compatible_version)
305
329
306 def import_remote_repository(self, wire, src_url):
330 def import_remote_repository(self, wire, src_url):
307 repo_path = wire['path']
331 repo_path = wire['path']
308 if not self.is_path_valid_repository(wire, repo_path):
332 if not self.is_path_valid_repository(wire, repo_path):
309 raise Exception(
333 raise Exception(
310 "Path %s is not a valid Subversion repository." % repo_path)
334 "Path %s is not a valid Subversion repository." % repo_path)
311 # TODO: johbo: URL checks ?
335 # TODO: johbo: URL checks ?
312 rdump = subprocess.Popen(
336 rdump = subprocess.Popen(
313 ['svnrdump', 'dump', '--non-interactive', src_url],
337 ['svnrdump', 'dump', '--non-interactive', src_url],
314 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
338 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
315 load = subprocess.Popen(
339 load = subprocess.Popen(
316 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
340 ['svnadmin', 'load', repo_path], stdin=rdump.stdout)
317
341
318 # TODO: johbo: This can be a very long operation, might be better
342 # TODO: johbo: This can be a very long operation, might be better
319 # to track some kind of status and provide an api to check if the
343 # to track some kind of status and provide an api to check if the
320 # import is done.
344 # import is done.
321 rdump.wait()
345 rdump.wait()
322 load.wait()
346 load.wait()
323
347
324 if rdump.returncode != 0:
348 if rdump.returncode != 0:
325 errors = rdump.stderr.read()
349 errors = rdump.stderr.read()
326 log.error('svnrdump dump failed: statuscode %s: message: %s',
350 log.error('svnrdump dump failed: statuscode %s: message: %s',
327 rdump.returncode, errors)
351 rdump.returncode, errors)
328 reason = 'UNKNOWN'
352 reason = 'UNKNOWN'
329 if 'svnrdump: E230001:' in errors:
353 if 'svnrdump: E230001:' in errors:
330 reason = 'INVALID_CERTIFICATE'
354 reason = 'INVALID_CERTIFICATE'
331 raise Exception(
355 raise Exception(
332 'Failed to dump the remote repository from %s.' % src_url,
356 'Failed to dump the remote repository from %s.' % src_url,
333 reason)
357 reason)
334 if load.returncode != 0:
358 if load.returncode != 0:
335 raise Exception(
359 raise Exception(
336 'Failed to load the dump of remote repository from %s.' %
360 'Failed to load the dump of remote repository from %s.' %
337 (src_url, ))
361 (src_url, ))
338
362
339 def commit(self, wire, message, author, timestamp, updated, removed):
363 def commit(self, wire, message, author, timestamp, updated, removed):
340 assert isinstance(message, str)
364 assert isinstance(message, str)
341 assert isinstance(author, str)
365 assert isinstance(author, str)
342
366
343 repo = self._factory.repo(wire)
367 repo = self._factory.repo(wire)
344 fsobj = svn.repos.fs(repo)
368 fsobj = svn.repos.fs(repo)
345
369
346 rev = svn.fs.youngest_rev(fsobj)
370 rev = svn.fs.youngest_rev(fsobj)
347 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
371 txn = svn.repos.fs_begin_txn_for_commit(repo, rev, author, message)
348 txn_root = svn.fs.txn_root(txn)
372 txn_root = svn.fs.txn_root(txn)
349
373
350 for node in updated:
374 for node in updated:
351 TxnNodeProcessor(node, txn_root).update()
375 TxnNodeProcessor(node, txn_root).update()
352 for node in removed:
376 for node in removed:
353 TxnNodeProcessor(node, txn_root).remove()
377 TxnNodeProcessor(node, txn_root).remove()
354
378
355 commit_id = svn.repos.fs_commit_txn(repo, txn)
379 commit_id = svn.repos.fs_commit_txn(repo, txn)
356
380
357 if timestamp:
381 if timestamp:
358 apr_time = apr_time_t(timestamp)
382 apr_time = apr_time_t(timestamp)
359 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
383 ts_formatted = svn.core.svn_time_to_cstring(apr_time)
360 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
384 svn.fs.change_rev_prop(fsobj, commit_id, 'svn:date', ts_formatted)
361
385
362 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
386 log.debug('Committed revision "%s" to "%s".', commit_id, wire['path'])
363 return commit_id
387 return commit_id
364
388
365 def diff(self, wire, rev1, rev2, path1=None, path2=None,
389 def diff(self, wire, rev1, rev2, path1=None, path2=None,
366 ignore_whitespace=False, context=3):
390 ignore_whitespace=False, context=3):
367
391
368 wire.update(cache=False)
392 wire.update(cache=False)
369 repo = self._factory.repo(wire)
393 repo = self._factory.repo(wire)
370 diff_creator = SvnDiffer(
394 diff_creator = SvnDiffer(
371 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
395 repo, rev1, path1, rev2, path2, ignore_whitespace, context)
372 try:
396 try:
373 return diff_creator.generate_diff()
397 return diff_creator.generate_diff()
374 except svn.core.SubversionException as e:
398 except svn.core.SubversionException as e:
375 log.exception(
399 log.exception(
376 "Error during diff operation operation. "
400 "Error during diff operation operation. "
377 "Path might not exist %s, %s" % (path1, path2))
401 "Path might not exist %s, %s" % (path1, path2))
378 return ""
402 return ""
379
403
380 @reraise_safe_exceptions
404 @reraise_safe_exceptions
381 def is_large_file(self, wire, path):
405 def is_large_file(self, wire, path):
382 return False
406 return False
383
407
384
408
385 class SvnDiffer(object):
409 class SvnDiffer(object):
386 """
410 """
387 Utility to create diffs based on difflib and the Subversion api
411 Utility to create diffs based on difflib and the Subversion api
388 """
412 """
389
413
390 binary_content = False
414 binary_content = False
391
415
392 def __init__(
416 def __init__(
393 self, repo, src_rev, src_path, tgt_rev, tgt_path,
417 self, repo, src_rev, src_path, tgt_rev, tgt_path,
394 ignore_whitespace, context):
418 ignore_whitespace, context):
395 self.repo = repo
419 self.repo = repo
396 self.ignore_whitespace = ignore_whitespace
420 self.ignore_whitespace = ignore_whitespace
397 self.context = context
421 self.context = context
398
422
399 fsobj = svn.repos.fs(repo)
423 fsobj = svn.repos.fs(repo)
400
424
401 self.tgt_rev = tgt_rev
425 self.tgt_rev = tgt_rev
402 self.tgt_path = tgt_path or ''
426 self.tgt_path = tgt_path or ''
403 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
427 self.tgt_root = svn.fs.revision_root(fsobj, tgt_rev)
404 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
428 self.tgt_kind = svn.fs.check_path(self.tgt_root, self.tgt_path)
405
429
406 self.src_rev = src_rev
430 self.src_rev = src_rev
407 self.src_path = src_path or self.tgt_path
431 self.src_path = src_path or self.tgt_path
408 self.src_root = svn.fs.revision_root(fsobj, src_rev)
432 self.src_root = svn.fs.revision_root(fsobj, src_rev)
409 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
433 self.src_kind = svn.fs.check_path(self.src_root, self.src_path)
410
434
411 self._validate()
435 self._validate()
412
436
413 def _validate(self):
437 def _validate(self):
414 if (self.tgt_kind != svn.core.svn_node_none and
438 if (self.tgt_kind != svn.core.svn_node_none and
415 self.src_kind != svn.core.svn_node_none and
439 self.src_kind != svn.core.svn_node_none and
416 self.src_kind != self.tgt_kind):
440 self.src_kind != self.tgt_kind):
417 # TODO: johbo: proper error handling
441 # TODO: johbo: proper error handling
418 raise Exception(
442 raise Exception(
419 "Source and target are not compatible for diff generation. "
443 "Source and target are not compatible for diff generation. "
420 "Source type: %s, target type: %s" %
444 "Source type: %s, target type: %s" %
421 (self.src_kind, self.tgt_kind))
445 (self.src_kind, self.tgt_kind))
422
446
423 def generate_diff(self):
447 def generate_diff(self):
424 buf = StringIO.StringIO()
448 buf = StringIO.StringIO()
425 if self.tgt_kind == svn.core.svn_node_dir:
449 if self.tgt_kind == svn.core.svn_node_dir:
426 self._generate_dir_diff(buf)
450 self._generate_dir_diff(buf)
427 else:
451 else:
428 self._generate_file_diff(buf)
452 self._generate_file_diff(buf)
429 return buf.getvalue()
453 return buf.getvalue()
430
454
431 def _generate_dir_diff(self, buf):
455 def _generate_dir_diff(self, buf):
432 editor = DiffChangeEditor()
456 editor = DiffChangeEditor()
433 editor_ptr, editor_baton = svn.delta.make_editor(editor)
457 editor_ptr, editor_baton = svn.delta.make_editor(editor)
434 svn.repos.dir_delta2(
458 svn.repos.dir_delta2(
435 self.src_root,
459 self.src_root,
436 self.src_path,
460 self.src_path,
437 '', # src_entry
461 '', # src_entry
438 self.tgt_root,
462 self.tgt_root,
439 self.tgt_path,
463 self.tgt_path,
440 editor_ptr, editor_baton,
464 editor_ptr, editor_baton,
441 authorization_callback_allow_all,
465 authorization_callback_allow_all,
442 False, # text_deltas
466 False, # text_deltas
443 svn.core.svn_depth_infinity, # depth
467 svn.core.svn_depth_infinity, # depth
444 False, # entry_props
468 False, # entry_props
445 False, # ignore_ancestry
469 False, # ignore_ancestry
446 )
470 )
447
471
448 for path, __, change in sorted(editor.changes):
472 for path, __, change in sorted(editor.changes):
449 self._generate_node_diff(
473 self._generate_node_diff(
450 buf, change, path, self.tgt_path, path, self.src_path)
474 buf, change, path, self.tgt_path, path, self.src_path)
451
475
452 def _generate_file_diff(self, buf):
476 def _generate_file_diff(self, buf):
453 change = None
477 change = None
454 if self.src_kind == svn.core.svn_node_none:
478 if self.src_kind == svn.core.svn_node_none:
455 change = "add"
479 change = "add"
456 elif self.tgt_kind == svn.core.svn_node_none:
480 elif self.tgt_kind == svn.core.svn_node_none:
457 change = "delete"
481 change = "delete"
458 tgt_base, tgt_path = vcspath.split(self.tgt_path)
482 tgt_base, tgt_path = vcspath.split(self.tgt_path)
459 src_base, src_path = vcspath.split(self.src_path)
483 src_base, src_path = vcspath.split(self.src_path)
460 self._generate_node_diff(
484 self._generate_node_diff(
461 buf, change, tgt_path, tgt_base, src_path, src_base)
485 buf, change, tgt_path, tgt_base, src_path, src_base)
462
486
463 def _generate_node_diff(
487 def _generate_node_diff(
464 self, buf, change, tgt_path, tgt_base, src_path, src_base):
488 self, buf, change, tgt_path, tgt_base, src_path, src_base):
465
489
466 if self.src_rev == self.tgt_rev and tgt_base == src_base:
490 if self.src_rev == self.tgt_rev and tgt_base == src_base:
467 # makes consistent behaviour with git/hg to return empty diff if
491 # makes consistent behaviour with git/hg to return empty diff if
468 # we compare same revisions
492 # we compare same revisions
469 return
493 return
470
494
471 tgt_full_path = vcspath.join(tgt_base, tgt_path)
495 tgt_full_path = vcspath.join(tgt_base, tgt_path)
472 src_full_path = vcspath.join(src_base, src_path)
496 src_full_path = vcspath.join(src_base, src_path)
473
497
474 self.binary_content = False
498 self.binary_content = False
475 mime_type = self._get_mime_type(tgt_full_path)
499 mime_type = self._get_mime_type(tgt_full_path)
476
500
477 if mime_type and not mime_type.startswith('text'):
501 if mime_type and not mime_type.startswith('text'):
478 self.binary_content = True
502 self.binary_content = True
479 buf.write("=" * 67 + '\n')
503 buf.write("=" * 67 + '\n')
480 buf.write("Cannot display: file marked as a binary type.\n")
504 buf.write("Cannot display: file marked as a binary type.\n")
481 buf.write("svn:mime-type = %s\n" % mime_type)
505 buf.write("svn:mime-type = %s\n" % mime_type)
482 buf.write("Index: %s\n" % (tgt_path, ))
506 buf.write("Index: %s\n" % (tgt_path, ))
483 buf.write("=" * 67 + '\n')
507 buf.write("=" * 67 + '\n')
484 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
508 buf.write("diff --git a/%(tgt_path)s b/%(tgt_path)s\n" % {
485 'tgt_path': tgt_path})
509 'tgt_path': tgt_path})
486
510
487 if change == 'add':
511 if change == 'add':
488 # TODO: johbo: SVN is missing a zero here compared to git
512 # TODO: johbo: SVN is missing a zero here compared to git
489 buf.write("new file mode 10644\n")
513 buf.write("new file mode 10644\n")
490
514
491 #TODO(marcink): intro to binary detection of svn patches
515 #TODO(marcink): intro to binary detection of svn patches
492 # if self.binary_content:
516 # if self.binary_content:
493 # buf.write('GIT binary patch\n')
517 # buf.write('GIT binary patch\n')
494
518
495 buf.write("--- /dev/null\t(revision 0)\n")
519 buf.write("--- /dev/null\t(revision 0)\n")
496 src_lines = []
520 src_lines = []
497 else:
521 else:
498 if change == 'delete':
522 if change == 'delete':
499 buf.write("deleted file mode 10644\n")
523 buf.write("deleted file mode 10644\n")
500
524
501 #TODO(marcink): intro to binary detection of svn patches
525 #TODO(marcink): intro to binary detection of svn patches
502 # if self.binary_content:
526 # if self.binary_content:
503 # buf.write('GIT binary patch\n')
527 # buf.write('GIT binary patch\n')
504
528
505 buf.write("--- a/%s\t(revision %s)\n" % (
529 buf.write("--- a/%s\t(revision %s)\n" % (
506 src_path, self.src_rev))
530 src_path, self.src_rev))
507 src_lines = self._svn_readlines(self.src_root, src_full_path)
531 src_lines = self._svn_readlines(self.src_root, src_full_path)
508
532
509 if change == 'delete':
533 if change == 'delete':
510 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
534 buf.write("+++ /dev/null\t(revision %s)\n" % (self.tgt_rev, ))
511 tgt_lines = []
535 tgt_lines = []
512 else:
536 else:
513 buf.write("+++ b/%s\t(revision %s)\n" % (
537 buf.write("+++ b/%s\t(revision %s)\n" % (
514 tgt_path, self.tgt_rev))
538 tgt_path, self.tgt_rev))
515 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
539 tgt_lines = self._svn_readlines(self.tgt_root, tgt_full_path)
516
540
517 if not self.binary_content:
541 if not self.binary_content:
518 udiff = svn_diff.unified_diff(
542 udiff = svn_diff.unified_diff(
519 src_lines, tgt_lines, context=self.context,
543 src_lines, tgt_lines, context=self.context,
520 ignore_blank_lines=self.ignore_whitespace,
544 ignore_blank_lines=self.ignore_whitespace,
521 ignore_case=False,
545 ignore_case=False,
522 ignore_space_changes=self.ignore_whitespace)
546 ignore_space_changes=self.ignore_whitespace)
523 buf.writelines(udiff)
547 buf.writelines(udiff)
524
548
525 def _get_mime_type(self, path):
549 def _get_mime_type(self, path):
526 try:
550 try:
527 mime_type = svn.fs.node_prop(
551 mime_type = svn.fs.node_prop(
528 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
552 self.tgt_root, path, svn.core.SVN_PROP_MIME_TYPE)
529 except svn.core.SubversionException:
553 except svn.core.SubversionException:
530 mime_type = svn.fs.node_prop(
554 mime_type = svn.fs.node_prop(
531 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
555 self.src_root, path, svn.core.SVN_PROP_MIME_TYPE)
532 return mime_type
556 return mime_type
533
557
534 def _svn_readlines(self, fs_root, node_path):
558 def _svn_readlines(self, fs_root, node_path):
535 if self.binary_content:
559 if self.binary_content:
536 return []
560 return []
537 node_kind = svn.fs.check_path(fs_root, node_path)
561 node_kind = svn.fs.check_path(fs_root, node_path)
538 if node_kind not in (
562 if node_kind not in (
539 svn.core.svn_node_file, svn.core.svn_node_symlink):
563 svn.core.svn_node_file, svn.core.svn_node_symlink):
540 return []
564 return []
541 content = svn.core.Stream(
565 content = svn.core.Stream(
542 svn.fs.file_contents(fs_root, node_path)).read()
566 svn.fs.file_contents(fs_root, node_path)).read()
543 return content.splitlines(True)
567 return content.splitlines(True)
544
568
545
569
546 class DiffChangeEditor(svn.delta.Editor):
570 class DiffChangeEditor(svn.delta.Editor):
547 """
571 """
548 Records changes between two given revisions
572 Records changes between two given revisions
549 """
573 """
550
574
551 def __init__(self):
575 def __init__(self):
552 self.changes = []
576 self.changes = []
553
577
554 def delete_entry(self, path, revision, parent_baton, pool=None):
578 def delete_entry(self, path, revision, parent_baton, pool=None):
555 self.changes.append((path, None, 'delete'))
579 self.changes.append((path, None, 'delete'))
556
580
557 def add_file(
581 def add_file(
558 self, path, parent_baton, copyfrom_path, copyfrom_revision,
582 self, path, parent_baton, copyfrom_path, copyfrom_revision,
559 file_pool=None):
583 file_pool=None):
560 self.changes.append((path, 'file', 'add'))
584 self.changes.append((path, 'file', 'add'))
561
585
562 def open_file(self, path, parent_baton, base_revision, file_pool=None):
586 def open_file(self, path, parent_baton, base_revision, file_pool=None):
563 self.changes.append((path, 'file', 'change'))
587 self.changes.append((path, 'file', 'change'))
564
588
565
589
566 def authorization_callback_allow_all(root, path, pool):
590 def authorization_callback_allow_all(root, path, pool):
567 return True
591 return True
568
592
569
593
570 class TxnNodeProcessor(object):
594 class TxnNodeProcessor(object):
571 """
595 """
572 Utility to process the change of one node within a transaction root.
596 Utility to process the change of one node within a transaction root.
573
597
574 It encapsulates the knowledge of how to add, update or remove
598 It encapsulates the knowledge of how to add, update or remove
575 a node for a given transaction root. The purpose is to support the method
599 a node for a given transaction root. The purpose is to support the method
576 `SvnRemote.commit`.
600 `SvnRemote.commit`.
577 """
601 """
578
602
579 def __init__(self, node, txn_root):
603 def __init__(self, node, txn_root):
580 assert isinstance(node['path'], str)
604 assert isinstance(node['path'], str)
581
605
582 self.node = node
606 self.node = node
583 self.txn_root = txn_root
607 self.txn_root = txn_root
584
608
585 def update(self):
609 def update(self):
586 self._ensure_parent_dirs()
610 self._ensure_parent_dirs()
587 self._add_file_if_node_does_not_exist()
611 self._add_file_if_node_does_not_exist()
588 self._update_file_content()
612 self._update_file_content()
589 self._update_file_properties()
613 self._update_file_properties()
590
614
591 def remove(self):
615 def remove(self):
592 svn.fs.delete(self.txn_root, self.node['path'])
616 svn.fs.delete(self.txn_root, self.node['path'])
593 # TODO: Clean up directory if empty
617 # TODO: Clean up directory if empty
594
618
595 def _ensure_parent_dirs(self):
619 def _ensure_parent_dirs(self):
596 curdir = vcspath.dirname(self.node['path'])
620 curdir = vcspath.dirname(self.node['path'])
597 dirs_to_create = []
621 dirs_to_create = []
598 while not self._svn_path_exists(curdir):
622 while not self._svn_path_exists(curdir):
599 dirs_to_create.append(curdir)
623 dirs_to_create.append(curdir)
600 curdir = vcspath.dirname(curdir)
624 curdir = vcspath.dirname(curdir)
601
625
602 for curdir in reversed(dirs_to_create):
626 for curdir in reversed(dirs_to_create):
603 log.debug('Creating missing directory "%s"', curdir)
627 log.debug('Creating missing directory "%s"', curdir)
604 svn.fs.make_dir(self.txn_root, curdir)
628 svn.fs.make_dir(self.txn_root, curdir)
605
629
606 def _svn_path_exists(self, path):
630 def _svn_path_exists(self, path):
607 path_status = svn.fs.check_path(self.txn_root, path)
631 path_status = svn.fs.check_path(self.txn_root, path)
608 return path_status != svn.core.svn_node_none
632 return path_status != svn.core.svn_node_none
609
633
610 def _add_file_if_node_does_not_exist(self):
634 def _add_file_if_node_does_not_exist(self):
611 kind = svn.fs.check_path(self.txn_root, self.node['path'])
635 kind = svn.fs.check_path(self.txn_root, self.node['path'])
612 if kind == svn.core.svn_node_none:
636 if kind == svn.core.svn_node_none:
613 svn.fs.make_file(self.txn_root, self.node['path'])
637 svn.fs.make_file(self.txn_root, self.node['path'])
614
638
615 def _update_file_content(self):
639 def _update_file_content(self):
616 assert isinstance(self.node['content'], str)
640 assert isinstance(self.node['content'], str)
617 handler, baton = svn.fs.apply_textdelta(
641 handler, baton = svn.fs.apply_textdelta(
618 self.txn_root, self.node['path'], None, None)
642 self.txn_root, self.node['path'], None, None)
619 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
643 svn.delta.svn_txdelta_send_string(self.node['content'], handler, baton)
620
644
621 def _update_file_properties(self):
645 def _update_file_properties(self):
622 properties = self.node.get('properties', {})
646 properties = self.node.get('properties', {})
623 for key, value in properties.iteritems():
647 for key, value in properties.iteritems():
624 svn.fs.change_node_prop(
648 svn.fs.change_node_prop(
625 self.txn_root, self.node['path'], key, value)
649 self.txn_root, self.node['path'], key, value)
626
650
627
651
628 def apr_time_t(timestamp):
652 def apr_time_t(timestamp):
629 """
653 """
630 Convert a Python timestamp into APR timestamp type apr_time_t
654 Convert a Python timestamp into APR timestamp type apr_time_t
631 """
655 """
632 return timestamp * 1E6
656 return timestamp * 1E6
633
657
634
658
635 def svn_opt_revision_value_t(num):
659 def svn_opt_revision_value_t(num):
636 """
660 """
637 Put `num` into a `svn_opt_revision_value_t` structure.
661 Put `num` into a `svn_opt_revision_value_t` structure.
638 """
662 """
639 value = svn.core.svn_opt_revision_value_t()
663 value = svn.core.svn_opt_revision_value_t()
640 value.number = num
664 value.number = num
641 revision = svn.core.svn_opt_revision_t()
665 revision = svn.core.svn_opt_revision_t()
642 revision.kind = svn.core.svn_opt_revision_number
666 revision.kind = svn.core.svn_opt_revision_number
643 revision.value = value
667 revision.value = value
644 return revision
668 return revision
@@ -1,549 +1,241 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import contextlib
18 import contextlib
19 import io
19 import io
20 import threading
20 import threading
21 from BaseHTTPServer import BaseHTTPRequestHandler
21 from BaseHTTPServer import BaseHTTPRequestHandler
22 from SocketServer import TCPServer
22 from SocketServer import TCPServer
23
23
24 import mercurial.ui
24 import mercurial.ui
25 import mock
25 import mock
26 import pytest
26 import pytest
27 import simplejson as json
27 import simplejson as json
28
28
29 from vcsserver import hooks
29 from vcsserver import hooks
30
30
31
31
32 class HooksStub(object):
33 """
34 Simulates a Proy4.Proxy object.
35
36 Will always return `result`, no matter which hook has been called on it.
37 """
38
39 def __init__(self, result):
40 self._result = result
41
42 def __call__(self, hooks_uri):
43 return self
44
45 def __enter__(self):
46 return self
47
48 def __exit__(self, exc_type, exc_value, traceback):
49 pass
50
51 def __getattr__(self, name):
52 return mock.Mock(return_value=self._result)
53
54
55 @contextlib.contextmanager
56 def mock_hook_response(
57 status=0, output='', exception=None, exception_args=None):
58 response = {
59 'status': status,
60 'output': output,
61 }
62 if exception:
63 response.update({
64 'exception': exception,
65 'exception_args': exception_args,
66 })
67
68 with mock.patch('Pyro4.Proxy', HooksStub(response)):
69 yield
70
71
72 def get_hg_ui(extras=None):
32 def get_hg_ui(extras=None):
73 """Create a Config object with a valid RC_SCM_DATA entry."""
33 """Create a Config object with a valid RC_SCM_DATA entry."""
74 extras = extras or {}
34 extras = extras or {}
75 required_extras = {
35 required_extras = {
76 'username': '',
36 'username': '',
77 'repository': '',
37 'repository': '',
78 'locked_by': '',
38 'locked_by': '',
79 'scm': '',
39 'scm': '',
80 'make_lock': '',
40 'make_lock': '',
81 'action': '',
41 'action': '',
82 'ip': '',
42 'ip': '',
83 'hooks_uri': 'fake_hooks_uri',
43 'hooks_uri': 'fake_hooks_uri',
84 }
44 }
85 required_extras.update(extras)
45 required_extras.update(extras)
86 hg_ui = mercurial.ui.ui()
46 hg_ui = mercurial.ui.ui()
87 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
88
48
89 return hg_ui
49 return hg_ui
90
50
91
51
92 def test_call_hook_no_error(capsys):
93 extras = {
94 'hooks_uri': 'fake_hook_uri',
95 }
96 expected_output = 'My mock outptut'
97 writer = mock.Mock()
98
99 with mock_hook_response(status=1, output=expected_output):
100 hooks._call_hook('hook_name', extras, writer)
101
102 out, err = capsys.readouterr()
103
104 writer.write.assert_called_with(expected_output)
105 assert err == ''
106
107
108 def test_call_hook_with_exception(capsys):
109 extras = {
110 'hooks_uri': 'fake_hook_uri',
111 }
112 expected_output = 'My mock outptut'
113 writer = mock.Mock()
114
115 with mock_hook_response(status=1, output=expected_output,
116 exception='TypeError',
117 exception_args=('Mock exception', )):
118 with pytest.raises(Exception) as excinfo:
119 hooks._call_hook('hook_name', extras, writer)
120
121 assert excinfo.type == Exception
122 assert 'Mock exception' in str(excinfo.value)
123
124 out, err = capsys.readouterr()
125
126 writer.write.assert_called_with(expected_output)
127 assert err == ''
128
129
130 def test_call_hook_with_locked_exception(capsys):
131 extras = {
132 'hooks_uri': 'fake_hook_uri',
133 }
134 expected_output = 'My mock outptut'
135 writer = mock.Mock()
136
137 with mock_hook_response(status=1, output=expected_output,
138 exception='HTTPLockedRC',
139 exception_args=('message',)):
140 with pytest.raises(Exception) as excinfo:
141 hooks._call_hook('hook_name', extras, writer)
142
143 assert excinfo.value._vcs_kind == 'repo_locked'
144 assert 'message' == str(excinfo.value)
145
146 out, err = capsys.readouterr()
147
148 writer.write.assert_called_with(expected_output)
149 assert err == ''
150
151
152 def test_call_hook_with_stdout():
153 extras = {
154 'hooks_uri': 'fake_hook_uri',
155 }
156 expected_output = 'My mock outptut'
157
158 stdout = io.BytesIO()
159 with mock_hook_response(status=1, output=expected_output):
160 hooks._call_hook('hook_name', extras, stdout)
161
162 assert stdout.getvalue() == expected_output
163
164
165 def test_repo_size():
166 hg_ui = get_hg_ui()
167
168 with mock_hook_response(status=1):
169 assert hooks.repo_size(hg_ui, None) == 1
170
171
172 def test_pre_pull():
173 hg_ui = get_hg_ui()
174
175 with mock_hook_response(status=1):
176 assert hooks.pre_pull(hg_ui, None) == 1
177
178
179 def test_post_pull():
180 hg_ui = get_hg_ui()
181
182 with mock_hook_response(status=1):
183 assert hooks.post_pull(hg_ui, None) == 1
184
185
186 def test_pre_push():
187 hg_ui = get_hg_ui()
188
189 with mock_hook_response(status=1):
190 assert hooks.pre_push(hg_ui, None) == 1
191
192
193 def test_post_push():
194 hg_ui = get_hg_ui()
195
196 with mock_hook_response(status=1):
197 with mock.patch('vcsserver.hooks._rev_range_hash', return_value=[]):
198 assert hooks.post_push(hg_ui, None, None) == 1
199
200
201 def test_git_pre_receive():
202 extras = {
203 'hooks': ['push'],
204 'hooks_uri': 'fake_hook_uri',
205 }
206 with mock_hook_response(status=1):
207 response = hooks.git_pre_receive(None, None,
208 {'RC_SCM_DATA': json.dumps(extras)})
209 assert response == 1
210
211
212 def test_git_pre_receive_is_disabled():
52 def test_git_pre_receive_is_disabled():
213 extras = {'hooks': ['pull']}
53 extras = {'hooks': ['pull']}
214 response = hooks.git_pre_receive(None, None,
54 response = hooks.git_pre_receive(None, None,
215 {'RC_SCM_DATA': json.dumps(extras)})
55 {'RC_SCM_DATA': json.dumps(extras)})
216
56
217 assert response == 0
57 assert response == 0
218
58
219
59
220 def test_git_post_receive_no_subprocess_call():
221 extras = {
222 'hooks': ['push'],
223 'hooks_uri': 'fake_hook_uri',
224 }
225 # Setting revision_lines to '' avoid all subprocess_calls
226 with mock_hook_response(status=1):
227 response = hooks.git_post_receive(None, '',
228 {'RC_SCM_DATA': json.dumps(extras)})
229 assert response == 1
230
231
232 def test_git_post_receive_is_disabled():
60 def test_git_post_receive_is_disabled():
233 extras = {'hooks': ['pull']}
61 extras = {'hooks': ['pull']}
234 response = hooks.git_post_receive(None, '',
62 response = hooks.git_post_receive(None, '',
235 {'RC_SCM_DATA': json.dumps(extras)})
63 {'RC_SCM_DATA': json.dumps(extras)})
236
64
237 assert response == 0
65 assert response == 0
238
66
239
67
240 def test_git_post_receive_calls_repo_size():
68 def test_git_post_receive_calls_repo_size():
241 extras = {'hooks': ['push', 'repo_size']}
69 extras = {'hooks': ['push', 'repo_size']}
242 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
243 hooks.git_post_receive(
71 hooks.git_post_receive(
244 None, '', {'RC_SCM_DATA': json.dumps(extras)})
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
245 extras.update({'commit_ids': []})
73 extras.update({'commit_ids': [],
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
246 expected_calls = [
75 expected_calls = [
247 mock.call('repo_size', extras, mock.ANY),
76 mock.call('repo_size', extras, mock.ANY),
248 mock.call('post_push', extras, mock.ANY),
77 mock.call('post_push', extras, mock.ANY),
249 ]
78 ]
250 assert call_hook_mock.call_args_list == expected_calls
79 assert call_hook_mock.call_args_list == expected_calls
251
80
252
81
253 def test_git_post_receive_does_not_call_disabled_repo_size():
82 def test_git_post_receive_does_not_call_disabled_repo_size():
254 extras = {'hooks': ['push']}
83 extras = {'hooks': ['push']}
255 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
256 hooks.git_post_receive(
85 hooks.git_post_receive(
257 None, '', {'RC_SCM_DATA': json.dumps(extras)})
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
258 extras.update({'commit_ids': []})
87 extras.update({'commit_ids': [],
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
259 expected_calls = [
89 expected_calls = [
260 mock.call('post_push', extras, mock.ANY)
90 mock.call('post_push', extras, mock.ANY)
261 ]
91 ]
262 assert call_hook_mock.call_args_list == expected_calls
92 assert call_hook_mock.call_args_list == expected_calls
263
93
264
94
265 def test_repo_size_exception_does_not_affect_git_post_receive():
95 def test_repo_size_exception_does_not_affect_git_post_receive():
266 extras = {'hooks': ['push', 'repo_size']}
96 extras = {'hooks': ['push', 'repo_size']}
267 status = 0
97 status = 0
268
98
269 def side_effect(name, *args, **kwargs):
99 def side_effect(name, *args, **kwargs):
270 if name == 'repo_size':
100 if name == 'repo_size':
271 raise Exception('Fake exception')
101 raise Exception('Fake exception')
272 else:
102 else:
273 return status
103 return status
274
104
275 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
276 call_hook_mock.side_effect = side_effect
106 call_hook_mock.side_effect = side_effect
277 result = hooks.git_post_receive(
107 result = hooks.git_post_receive(
278 None, '', {'RC_SCM_DATA': json.dumps(extras)})
108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
279 assert result == status
109 assert result == status
280
110
281
111
282 @mock.patch('vcsserver.hooks._run_command')
283 def test_git_post_receive_first_commit_sub_branch(cmd_mock):
284 def cmd_mock_returns(args):
285 if args == ['git', 'show', 'HEAD']:
286 raise
287 if args == ['git', 'for-each-ref', '--format=%(refname)',
288 'refs/heads/*']:
289 return 'refs/heads/test-branch2/sub-branch'
290 if args == ['git', 'log', '--reverse', '--pretty=format:%H', '--',
291 '9695eef57205c17566a3ae543be187759b310bb7', '--not',
292 'refs/heads/test-branch2/sub-branch']:
293 return ''
294
295 cmd_mock.side_effect = cmd_mock_returns
296
297 extras = {
298 'hooks': ['push'],
299 'hooks_uri': 'fake_hook_uri'
300 }
301 rev_lines = ['0000000000000000000000000000000000000000 '
302 '9695eef57205c17566a3ae543be187759b310bb7 '
303 'refs/heads/feature/sub-branch\n']
304 with mock_hook_response(status=0):
305 response = hooks.git_post_receive(None, rev_lines,
306 {'RC_SCM_DATA': json.dumps(extras)})
307
308 calls = [
309 mock.call(['git', 'show', 'HEAD']),
310 mock.call(['git', 'symbolic-ref', 'HEAD',
311 'refs/heads/feature/sub-branch']),
312 ]
313 cmd_mock.assert_has_calls(calls, any_order=True)
314 assert response == 0
315
316
317 @mock.patch('vcsserver.hooks._run_command')
318 def test_git_post_receive_first_commit_revs(cmd_mock):
319 extras = {
320 'hooks': ['push'],
321 'hooks_uri': 'fake_hook_uri'
322 }
323 rev_lines = [
324 '0000000000000000000000000000000000000000 '
325 '9695eef57205c17566a3ae543be187759b310bb7 refs/heads/master\n']
326 with mock_hook_response(status=0):
327 response = hooks.git_post_receive(
328 None, rev_lines, {'RC_SCM_DATA': json.dumps(extras)})
329
330 calls = [
331 mock.call(['git', 'show', 'HEAD']),
332 mock.call(['git', 'for-each-ref', '--format=%(refname)',
333 'refs/heads/*']),
334 mock.call(['git', 'log', '--reverse', '--pretty=format:%H',
335 '--', '9695eef57205c17566a3ae543be187759b310bb7', '--not',
336 ''])
337 ]
338 cmd_mock.assert_has_calls(calls, any_order=True)
339
340 assert response == 0
341
342
343 def test_git_pre_pull():
344 extras = {
345 'hooks': ['pull'],
346 'hooks_uri': 'fake_hook_uri',
347 }
348 with mock_hook_response(status=1, output='foo'):
349 assert hooks.git_pre_pull(extras) == hooks.HookResponse(1, 'foo')
350
351
352 def test_git_pre_pull_exception_is_caught():
353 extras = {
354 'hooks': ['pull'],
355 'hooks_uri': 'fake_hook_uri',
356 }
357 with mock_hook_response(status=2, exception=Exception('foo')):
358 assert hooks.git_pre_pull(extras).status == 128
359
360
361 def test_git_pre_pull_is_disabled():
112 def test_git_pre_pull_is_disabled():
362 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
363
114
364
115
365 def test_git_post_pull():
366 extras = {
367 'hooks': ['pull'],
368 'hooks_uri': 'fake_hook_uri',
369 }
370 with mock_hook_response(status=1, output='foo'):
371 assert hooks.git_post_pull(extras) == hooks.HookResponse(1, 'foo')
372
373
374 def test_git_post_pull_exception_is_caught():
375 extras = {
376 'hooks': ['pull'],
377 'hooks_uri': 'fake_hook_uri',
378 }
379 with mock_hook_response(status=2, exception='Exception',
380 exception_args=('foo',)):
381 assert hooks.git_post_pull(extras).status == 128
382
383
384 def test_git_post_pull_is_disabled():
116 def test_git_post_pull_is_disabled():
385 assert (
117 assert (
386 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
387
119
388
120
389 class TestGetHooksClient(object):
121 class TestGetHooksClient(object):
390 def test_returns_pyro_client_when_protocol_matches(self):
391 hooks_uri = 'localhost:8000'
392 result = hooks._get_hooks_client({
393 'hooks_uri': hooks_uri,
394 'hooks_protocol': 'pyro4'
395 })
396 assert isinstance(result, hooks.HooksPyro4Client)
397 assert result.hooks_uri == hooks_uri
398
122
399 def test_returns_http_client_when_protocol_matches(self):
123 def test_returns_http_client_when_protocol_matches(self):
400 hooks_uri = 'localhost:8000'
124 hooks_uri = 'localhost:8000'
401 result = hooks._get_hooks_client({
125 result = hooks._get_hooks_client({
402 'hooks_uri': hooks_uri,
126 'hooks_uri': hooks_uri,
403 'hooks_protocol': 'http'
127 'hooks_protocol': 'http'
404 })
128 })
405 assert isinstance(result, hooks.HooksHttpClient)
129 assert isinstance(result, hooks.HooksHttpClient)
406 assert result.hooks_uri == hooks_uri
130 assert result.hooks_uri == hooks_uri
407
131
408 def test_returns_pyro4_client_when_no_protocol_is_specified(self):
409 hooks_uri = 'localhost:8000'
410 result = hooks._get_hooks_client({
411 'hooks_uri': hooks_uri
412 })
413 assert isinstance(result, hooks.HooksPyro4Client)
414 assert result.hooks_uri == hooks_uri
415
416 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
417 fake_module = mock.Mock()
133 fake_module = mock.Mock()
418 import_patcher = mock.patch.object(
134 import_patcher = mock.patch.object(
419 hooks.importlib, 'import_module', return_value=fake_module)
135 hooks.importlib, 'import_module', return_value=fake_module)
420 fake_module_name = 'fake.module'
136 fake_module_name = 'fake.module'
421 with import_patcher as import_mock:
137 with import_patcher as import_mock:
422 result = hooks._get_hooks_client(
138 result = hooks._get_hooks_client(
423 {'hooks_module': fake_module_name})
139 {'hooks_module': fake_module_name})
424
140
425 import_mock.assert_called_once_with(fake_module_name)
141 import_mock.assert_called_once_with(fake_module_name)
426 assert isinstance(result, hooks.HooksDummyClient)
142 assert isinstance(result, hooks.HooksDummyClient)
427 assert result._hooks_module == fake_module
143 assert result._hooks_module == fake_module
428
144
429
145
430 class TestHooksHttpClient(object):
146 class TestHooksHttpClient(object):
431 def test_init_sets_hooks_uri(self):
147 def test_init_sets_hooks_uri(self):
432 uri = 'localhost:3000'
148 uri = 'localhost:3000'
433 client = hooks.HooksHttpClient(uri)
149 client = hooks.HooksHttpClient(uri)
434 assert client.hooks_uri == uri
150 assert client.hooks_uri == uri
435
151
436 def test_serialize_returns_json_string(self):
152 def test_serialize_returns_json_string(self):
437 client = hooks.HooksHttpClient('localhost:3000')
153 client = hooks.HooksHttpClient('localhost:3000')
438 hook_name = 'test'
154 hook_name = 'test'
439 extras = {
155 extras = {
440 'first': 1,
156 'first': 1,
441 'second': 'two'
157 'second': 'two'
442 }
158 }
443 result = client._serialize(hook_name, extras)
159 result = client._serialize(hook_name, extras)
444 expected_result = json.dumps({
160 expected_result = json.dumps({
445 'method': hook_name,
161 'method': hook_name,
446 'extras': extras
162 'extras': extras
447 })
163 })
448 assert result == expected_result
164 assert result == expected_result
449
165
450 def test_call_queries_http_server(self, http_mirror):
166 def test_call_queries_http_server(self, http_mirror):
451 client = hooks.HooksHttpClient(http_mirror.uri)
167 client = hooks.HooksHttpClient(http_mirror.uri)
452 hook_name = 'test'
168 hook_name = 'test'
453 extras = {
169 extras = {
454 'first': 1,
170 'first': 1,
455 'second': 'two'
171 'second': 'two'
456 }
172 }
457 result = client(hook_name, extras)
173 result = client(hook_name, extras)
458 expected_result = {
174 expected_result = {
459 'method': hook_name,
175 'method': hook_name,
460 'extras': extras
176 'extras': extras
461 }
177 }
462 assert result == expected_result
178 assert result == expected_result
463
179
464
180
465 class TestHooksDummyClient(object):
181 class TestHooksDummyClient(object):
466 def test_init_imports_hooks_module(self):
182 def test_init_imports_hooks_module(self):
467 hooks_module_name = 'rhodecode.fake.module'
183 hooks_module_name = 'rhodecode.fake.module'
468 hooks_module = mock.MagicMock()
184 hooks_module = mock.MagicMock()
469
185
470 import_patcher = mock.patch.object(
186 import_patcher = mock.patch.object(
471 hooks.importlib, 'import_module', return_value=hooks_module)
187 hooks.importlib, 'import_module', return_value=hooks_module)
472 with import_patcher as import_mock:
188 with import_patcher as import_mock:
473 client = hooks.HooksDummyClient(hooks_module_name)
189 client = hooks.HooksDummyClient(hooks_module_name)
474 import_mock.assert_called_once_with(hooks_module_name)
190 import_mock.assert_called_once_with(hooks_module_name)
475 assert client._hooks_module == hooks_module
191 assert client._hooks_module == hooks_module
476
192
477 def test_call_returns_hook_result(self):
193 def test_call_returns_hook_result(self):
478 hooks_module_name = 'rhodecode.fake.module'
194 hooks_module_name = 'rhodecode.fake.module'
479 hooks_module = mock.MagicMock()
195 hooks_module = mock.MagicMock()
480 import_patcher = mock.patch.object(
196 import_patcher = mock.patch.object(
481 hooks.importlib, 'import_module', return_value=hooks_module)
197 hooks.importlib, 'import_module', return_value=hooks_module)
482 with import_patcher:
198 with import_patcher:
483 client = hooks.HooksDummyClient(hooks_module_name)
199 client = hooks.HooksDummyClient(hooks_module_name)
484
200
485 result = client('post_push', {})
201 result = client('post_push', {})
486 hooks_module.Hooks.assert_called_once_with()
202 hooks_module.Hooks.assert_called_once_with()
487 assert result == hooks_module.Hooks().__enter__().post_push()
203 assert result == hooks_module.Hooks().__enter__().post_push()
488
204
489
205
490 class TestHooksPyro4Client(object):
491 def test_init_sets_hooks_uri(self):
492 uri = 'localhost:3000'
493 client = hooks.HooksPyro4Client(uri)
494 assert client.hooks_uri == uri
495
496 def test_call_returns_hook_value(self):
497 hooks_uri = 'localhost:3000'
498 client = hooks.HooksPyro4Client(hooks_uri)
499 hooks_module = mock.Mock()
500 context_manager = mock.MagicMock()
501 context_manager.__enter__.return_value = hooks_module
502 pyro4_patcher = mock.patch.object(
503 hooks.Pyro4, 'Proxy', return_value=context_manager)
504 extras = {
505 'test': 'test'
506 }
507 with pyro4_patcher as pyro4_mock:
508 result = client('post_push', extras)
509 pyro4_mock.assert_called_once_with(hooks_uri)
510 hooks_module.post_push.assert_called_once_with(extras)
511 assert result == hooks_module.post_push.return_value
512
513
514 @pytest.fixture
206 @pytest.fixture
515 def http_mirror(request):
207 def http_mirror(request):
516 server = MirrorHttpServer()
208 server = MirrorHttpServer()
517 request.addfinalizer(server.stop)
209 request.addfinalizer(server.stop)
518 return server
210 return server
519
211
520
212
521 class MirrorHttpHandler(BaseHTTPRequestHandler):
213 class MirrorHttpHandler(BaseHTTPRequestHandler):
522 def do_POST(self):
214 def do_POST(self):
523 length = int(self.headers['Content-Length'])
215 length = int(self.headers['Content-Length'])
524 body = self.rfile.read(length).decode('utf-8')
216 body = self.rfile.read(length).decode('utf-8')
525 self.send_response(200)
217 self.send_response(200)
526 self.end_headers()
218 self.end_headers()
527 self.wfile.write(body)
219 self.wfile.write(body)
528
220
529
221
530 class MirrorHttpServer(object):
222 class MirrorHttpServer(object):
531 ip_address = '127.0.0.1'
223 ip_address = '127.0.0.1'
532 port = 0
224 port = 0
533
225
534 def __init__(self):
226 def __init__(self):
535 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
536 _, self.port = self._daemon.server_address
228 _, self.port = self._daemon.server_address
537 self._thread = threading.Thread(target=self._daemon.serve_forever)
229 self._thread = threading.Thread(target=self._daemon.serve_forever)
538 self._thread.daemon = True
230 self._thread.daemon = True
539 self._thread.start()
231 self._thread.start()
540
232
541 def stop(self):
233 def stop(self):
542 self._daemon.shutdown()
234 self._daemon.shutdown()
543 self._thread.join()
235 self._thread.join()
544 self._daemon = None
236 self._daemon = None
545 self._thread = None
237 self._thread = None
546
238
547 @property
239 @property
548 def uri(self):
240 def uri(self):
549 return '{}:{}'.format(self.ip_address, self.port)
241 return '{}:{}'.format(self.ip_address, self.port)
@@ -1,57 +1,57 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import mock
18 import mock
19 import pytest
19 import pytest
20
20
21 from vcsserver import main
21 from vcsserver import http_main
22 from vcsserver.base import obfuscate_qs
22 from vcsserver.base import obfuscate_qs
23
23
24
24
25 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
25 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
26 @mock.patch('vcsserver.hgpatches.patch_largefiles_capabilities')
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
27 def test_applies_largefiles_patch(patch_largefiles_capabilities):
28 main.main([])
28 http_main.main([])
29 patch_largefiles_capabilities.assert_called_once_with()
29 patch_largefiles_capabilities.assert_called_once_with()
30
30
31
31
32 @mock.patch('vcsserver.main.VcsServerCommand', mock.Mock())
32 @mock.patch('vcsserver.http_main.VCS', mock.Mock())
33 @mock.patch('vcsserver.main.MercurialFactory', None)
33 @mock.patch('vcsserver.http_main.MercurialFactory', None)
34 @mock.patch(
34 @mock.patch(
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
35 'vcsserver.hgpatches.patch_largefiles_capabilities',
36 mock.Mock(side_effect=Exception("Must not be called")))
36 mock.Mock(side_effect=Exception("Must not be called")))
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
37 def test_applies_largefiles_patch_only_if_mercurial_is_available():
38 main.main([])
38 http_main.main([])
39
39
40
40
41 @pytest.mark.parametrize('given, expected', [
41 @pytest.mark.parametrize('given, expected', [
42 ('bad', 'bad'),
42 ('bad', 'bad'),
43 ('query&foo=bar', 'query&foo=bar'),
43 ('query&foo=bar', 'query&foo=bar'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
44 ('equery&auth_token=bar', 'equery&auth_token=*****'),
45 ('a;b;c;query&foo=bar&auth_token=secret',
45 ('a;b;c;query&foo=bar&auth_token=secret',
46 'a&b&c&query&foo=bar&auth_token=*****'),
46 'a&b&c&query&foo=bar&auth_token=*****'),
47 ('', ''),
47 ('', ''),
48 (None, None),
48 (None, None),
49 ('foo=bar', 'foo=bar'),
49 ('foo=bar', 'foo=bar'),
50 ('auth_token=secret', 'auth_token=*****'),
50 ('auth_token=secret', 'auth_token=*****'),
51 ('auth_token=secret&api_key=secret2',
51 ('auth_token=secret&api_key=secret2',
52 'auth_token=*****&api_key=*****'),
52 'auth_token=*****&api_key=*****'),
53 ('auth_token=secret&api_key=secret2&param=value',
53 ('auth_token=secret&api_key=secret2&param=value',
54 'auth_token=*****&api_key=*****&param=value'),
54 'auth_token=*****&api_key=*****&param=value'),
55 ])
55 ])
56 def test_obfuscate_qs(given, expected):
56 def test_obfuscate_qs(given, expected):
57 assert expected == obfuscate_qs(given)
57 assert expected == obfuscate_qs(given)
@@ -1,60 +1,60 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18
18
19
19
20 import time
20 import time
21 import logging
21 import logging
22
22
23
23
24 from vcsserver.utils import safe_str
24 from vcsserver.utils import safe_str
25
25
26
26
27 log = logging.getLogger(__name__)
27 log = logging.getLogger(__name__)
28
28
29
29
30 def get_access_path(request):
30 def get_access_path(request):
31 environ = request.environ
31 environ = request.environ
32 return environ.get('PATH_INFO')
32 return environ.get('PATH_INFO')
33
33
34
34
35 class RequestWrapperTween(object):
35 class RequestWrapperTween(object):
36 def __init__(self, handler, registry):
36 def __init__(self, handler, registry):
37 self.handler = handler
37 self.handler = handler
38 self.registry = registry
38 self.registry = registry
39
39
40 # one-time configuration code goes here
40 # one-time configuration code goes here
41
41
42 def __call__(self, request):
42 def __call__(self, request):
43 start = time.time()
43 start = time.time()
44 try:
44 try:
45 response = self.handler(request)
45 response = self.handler(request)
46 finally:
46 finally:
47 end = time.time()
47 end = time.time()
48
48
49 log.info('IP: %s Request to %s time: %.3fs' % (
49 log.info('IP: %s Request to path: `%s` time: %.3fs' % (
50 '127.0.0.1',
50 '127.0.0.1',
51 safe_str(get_access_path(request)), end - start)
51 safe_str(get_access_path(request)), end - start)
52 )
52 )
53
53
54 return response
54 return response
55
55
56
56
57 def includeme(config):
57 def includeme(config):
58 config.add_tween(
58 config.add_tween(
59 'vcsserver.tweens.RequestWrapperTween',
59 'vcsserver.tweens.RequestWrapperTween',
60 )
60 )
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
1 NO CONTENT: file was removed
NO CONTENT: file was removed
This diff has been collapsed as it changes many lines, (508 lines changed) Show them Hide them
1 NO CONTENT: file was removed
NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now