##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r572:fd48aa4e merge stable
parent child Browse files
Show More
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.13.3
2 current_version = 4.14.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.13.3
12 state = in_progress
13 version = 4.14.0
16 14
@@ -1,87 +1,87 b''
1 1 ################################################################################
2 2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 3 ################################################################################
5 4
6 5
7 6 [server:main]
8 7 ## COMMON ##
9 8 host = 0.0.0.0
10 9 port = 9900
11 10
12 11 use = egg:waitress#main
13 12
14 13
15 14 [app:main]
16 15 use = egg:rhodecode-vcsserver
17 16
18 17 pyramid.default_locale_name = en
19 18 pyramid.includes =
20 19
21 20 ## default locale used by VCS systems
22 21 locale = en_US.UTF-8
23 22
24 23
25 24 ## path to binaries for vcsserver, it should be set by the installer
26 25 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
27 26 core.binary_dir = ""
28 27
29 ## custom exception store path, defaults to TMPDIR
30 exception_tracker.store_path =
28 ## Custom exception store path, defaults to TMPDIR
29 ## This is used to store exception from RhodeCode in shared directory
30 #exception_tracker.store_path =
31 31
32 32 ## Default cache dir for caches. Putting this into a ramdisk
33 33 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
34 ## large ammount of space
34 ## large amount of space
35 35 cache_dir = %(here)s/rcdev/data
36 36
37 37 ## cache region for storing repo_objects cache
38 38 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
39 39 ## cache auto-expires after N seconds
40 40 rc_cache.repo_object.expiration_time = 300
41 41 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
42 42 rc_cache.repo_object.max_size = 100
43 43
44 44
45 45 ################################
46 46 ### LOGGING CONFIGURATION ####
47 47 ################################
48 48 [loggers]
49 49 keys = root, vcsserver
50 50
51 51 [handlers]
52 52 keys = console
53 53
54 54 [formatters]
55 55 keys = generic
56 56
57 57 #############
58 58 ## LOGGERS ##
59 59 #############
60 60 [logger_root]
61 61 level = NOTSET
62 62 handlers = console
63 63
64 64 [logger_vcsserver]
65 65 level = DEBUG
66 66 handlers =
67 67 qualname = vcsserver
68 68 propagate = 1
69 69
70 70
71 71 ##############
72 72 ## HANDLERS ##
73 73 ##############
74 74
75 75 [handler_console]
76 76 class = StreamHandler
77 77 args = (sys.stderr,)
78 78 level = DEBUG
79 79 formatter = generic
80 80
81 81 ################
82 82 ## FORMATTERS ##
83 83 ################
84 84
85 85 [formatter_generic]
86 86 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
87 87 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,108 +1,108 b''
1 1 ################################################################################
2 2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 3 ################################################################################
5 4
6 5
7 6 [server:main]
8 7 ## COMMON ##
9 8 host = 127.0.0.1
10 9 port = 9900
11 10
12 11
13 12 ##########################
14 13 ## GUNICORN WSGI SERVER ##
15 14 ##########################
16 15 ## run with gunicorn --log-config vcsserver.ini --paste vcsserver.ini
17 16 use = egg:gunicorn#main
18 17 ## Sets the number of process workers. Recommended
19 18 ## value is (2 * NUMBER_OF_CPUS + 1), eg 2CPU = 5 workers
20 19 workers = 2
21 20 ## process name
22 21 proc_name = rhodecode_vcsserver
23 22 ## type of worker class, currently `sync` is the only option allowed.
24 23 worker_class = sync
25 24 ## The maximum number of simultaneous clients. Valid only for Gevent
26 25 #worker_connections = 10
27 26 ## max number of requests that worker will handle before being gracefully
28 27 ## restarted, could prevent memory leaks
29 28 max_requests = 1000
30 29 max_requests_jitter = 30
31 30 ## amount of time a worker can spend with handling a request before it
32 31 ## gets killed and restarted. Set to 6hrs
33 32 timeout = 21600
34 33
35 34
36 35 [app:main]
37 36 use = egg:rhodecode-vcsserver
38 37
39 38 pyramid.default_locale_name = en
40 39 pyramid.includes =
41 40
42 41 ## default locale used by VCS systems
43 42 locale = en_US.UTF-8
44 43
45 44
46 45 ## path to binaries for vcsserver, it should be set by the installer
47 46 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
48 47 core.binary_dir = ""
49 48
50 ## custom exception store path, defaults to TMPDIR
51 exception_tracker.store_path =
49 ## Custom exception store path, defaults to TMPDIR
50 ## This is used to store exception from RhodeCode in shared directory
51 #exception_tracker.store_path =
52 52
53 53 ## Default cache dir for caches. Putting this into a ramdisk
54 54 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
55 ## large ammount of space
55 ## large amount of space
56 56 cache_dir = %(here)s/rcdev/data
57 57
58 58 ## cache region for storing repo_objects cache
59 59 rc_cache.repo_object.backend = dogpile.cache.rc.memory_lru
60 60 ## cache auto-expires after N seconds
61 61 rc_cache.repo_object.expiration_time = 300
62 62 ## max size of LRU, old values will be discarded if the size of cache reaches max_size
63 63 rc_cache.repo_object.max_size = 100
64 64
65 65
66 66 ################################
67 67 ### LOGGING CONFIGURATION ####
68 68 ################################
69 69 [loggers]
70 70 keys = root, vcsserver
71 71
72 72 [handlers]
73 73 keys = console
74 74
75 75 [formatters]
76 76 keys = generic
77 77
78 78 #############
79 79 ## LOGGERS ##
80 80 #############
81 81 [logger_root]
82 82 level = NOTSET
83 83 handlers = console
84 84
85 85 [logger_vcsserver]
86 86 level = DEBUG
87 87 handlers =
88 88 qualname = vcsserver
89 89 propagate = 1
90 90
91 91
92 92 ##############
93 93 ## HANDLERS ##
94 94 ##############
95 95
96 96 [handler_console]
97 97 class = StreamHandler
98 98 args = (sys.stderr,)
99 99 level = DEBUG
100 100 formatter = generic
101 101
102 102 ################
103 103 ## FORMATTERS ##
104 104 ################
105 105
106 106 [formatter_generic]
107 107 format = %(asctime)s.%(msecs)03d [%(process)d] %(levelname)-5.5s [%(name)s] %(message)s
108 108 datefmt = %Y-%m-%d %H:%M:%S
@@ -1,178 +1,196 b''
1 1 # Nix environment for the community edition
2 2 #
3 3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 4 # derivation. For advanced tweaks to pimp up the development environment we use
5 5 # "shell.nix" so that it does not have to clutter this file.
6 6
7 7 args@
8 8 { pythonPackages ? "python27Packages"
9 9 , pythonExternalOverrides ? self: super: {}
10 10 , doCheck ? false
11 11 , ...
12 12 }:
13 13
14 let pkgs_ = (import <nixpkgs> {}); in
14 let
15 pkgs_ = (import <nixpkgs> {});
16 in
15 17
16 18 let
17
18 # TODO: Currently we ignore the passed in pkgs, instead we should use it
19 # somehow as a base and apply overlays to it.
20 19 pkgs = import <nixpkgs> {
21 20 overlays = [
22 21 (import ./pkgs/overlays.nix)
23 22 ];
24 inherit (pkgs_)
23 inherit
24 (pkgs_)
25 25 system;
26 26 };
27 27
28 28 # Works with the new python-packages, still can fallback to the old
29 29 # variant.
30 30 basePythonPackagesUnfix = basePythonPackages.__unfix__ or (
31 31 self: basePythonPackages.override (a: { inherit self; }));
32 32
33 33 # Evaluates to the last segment of a file system path.
34 34 basename = path: with pkgs.lib; last (splitString "/" path);
35 35
36 36 # source code filter used as arugment to builtins.filterSource.
37 37 src-filter = path: type: with pkgs.lib;
38 38 let
39 39 ext = last (splitString "." path);
40 40 in
41 41 !builtins.elem (basename path) [
42 42 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
43 "bower_components" "node_modules"
43 "node_modules" "node_binaries"
44 44 "build" "data" "result" "tmp"] &&
45 45 !builtins.elem ext ["egg-info" "pyc"] &&
46 46 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
47 47 # it would still be good to restore it since we want to ignore "result-*".
48 48 !hasPrefix "result" path;
49 49
50 50 sources =
51 51 let
52 inherit (pkgs.lib) all isString attrValues;
52 inherit
53 (pkgs.lib)
54 all
55 isString
56 attrValues;
53 57 sourcesConfig = pkgs.config.rc.sources or {};
54 58 in
55 59 # Ensure that sources are configured as strings. Using a path
56 60 # would result in a copy into the nix store.
57 61 assert all isString (attrValues sourcesConfig);
58 62 sourcesConfig;
59 63
60 64 version = builtins.readFile "${rhodecode-vcsserver-src}/vcsserver/VERSION";
61 65 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
62 66
63 67 pythonLocalOverrides = self: super: {
64 68 rhodecode-vcsserver =
65 69 let
66 70 releaseName = "RhodeCodeVCSServer-${version}";
67 71 in super.rhodecode-vcsserver.override (attrs: {
68 72 inherit
69 73 doCheck
70 74 version;
71 75
72 76 name = "rhodecode-vcsserver-${version}";
73 77 releaseName = releaseName;
74 78 src = rhodecode-vcsserver-src;
75 79 dontStrip = true; # prevent strip, we don't need it.
76 80
77 81 # expose following attributed outside
78 82 passthru = {
79 83 pythonPackages = self;
80 84 };
81 85
82 86 propagatedBuildInputs =
83 87 attrs.propagatedBuildInputs or [] ++ [
84 88 pkgs.git
85 89 pkgs.subversion
86 90 ];
87 91
88 92 # set some default locale env variables
89 93 LC_ALL = "en_US.UTF-8";
90 94 LOCALE_ARCHIVE =
91 95 if pkgs.stdenv.isLinux
92 96 then "${pkgs.glibcLocales}/lib/locale/locale-archive"
93 97 else "";
94 98
95 99 # Add bin directory to path so that tests can find 'vcsserver'.
96 100 preCheck = ''
97 101 export PATH="$out/bin:$PATH"
98 102 '';
99 103
100 104 # custom check phase for testing
101 105 checkPhase = ''
102 106 runHook preCheck
103 107 PYTHONHASHSEED=random py.test -vv -p no:sugar -r xw --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
104 108 runHook postCheck
105 109 '';
106 110
107 111 postCheck = ''
108 112 echo "Cleanup of vcsserver/tests"
109 113 rm -rf $out/lib/${self.python.libPrefix}/site-packages/vcsserver/tests
110 114 '';
111 115
112 116 postInstall = ''
113 117 echo "Writing vcsserver meta information for rccontrol to nix-support/rccontrol"
114 118 mkdir -p $out/nix-support/rccontrol
115 119 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
116 120 echo "DONE: vcsserver meta information for rccontrol written"
117 121
118 122 mkdir -p $out/etc
119 123 cp configs/production.ini $out/etc
120 124 echo "DONE: saved vcsserver production.ini into $out/etc"
121 125
122 126 # python based programs need to be wrapped
123 127 mkdir -p $out/bin
124 ln -s ${self.python}/bin/python $out/bin
125 ln -s ${self.pyramid}/bin/* $out/bin/
128 ln -s ${self.python}/bin/python $out/bin/
126 129 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
130 ln -s ${self.pyramid}/bin/prequest $out/bin/
131 ln -s ${self.pyramid}/bin/pserve $out/bin/
127 132
128 133 # Symlink version control utilities
129 134 # We ensure that always the correct version is available as a symlink.
130 135 # So that users calling them via the profile path will always use the
131 # correct version.
136 # correct version. Wrapping is required so those can "import"
137 # vcsserver python hooks.
132 138
133 139 ln -s ${pkgs.git}/bin/git $out/bin
134 140 ln -s ${self.mercurial}/bin/hg $out/bin
135 141 ln -s ${pkgs.subversion}/bin/svn* $out/bin
142
136 143 echo "DONE: created symlinks into $out/bin"
144 DEPS="$out/bin/*"
137 145
138 for file in $out/bin/*;
146 # wrap only dependency scripts, they require to have full PYTHONPATH set
147 # to be able to import all packages
148 for file in $DEPS;
139 149 do
140 150 wrapProgram $file \
141 151 --prefix PATH : $PATH \
142 152 --prefix PYTHONPATH : $PYTHONPATH \
143 153 --set PYTHONHASHSEED random
144 154 done
155
145 156 echo "DONE: vcsserver binary wrapping"
146 157
147 158 '';
148 159
149 160 });
150 161 };
151 162
152 163 basePythonPackages = with builtins;
153 164 if isAttrs pythonPackages then
154 165 pythonPackages
155 166 else
156 167 getAttr pythonPackages pkgs;
157 168
158 169 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
159 inherit pkgs;
160 inherit (pkgs) fetchurl fetchgit fetchhg;
170 inherit
171 pkgs;
172 inherit
173 (pkgs)
174 fetchurl
175 fetchgit
176 fetchhg;
161 177 };
162 178
163 179 pythonVCSServerOverrides = import ./pkgs/python-packages-overrides.nix {
164 inherit pkgs basePythonPackages;
180 inherit
181 pkgs
182 basePythonPackages;
165 183 };
166 184
167 185
168 186 # Apply all overrides and fix the final package set
169 187 myPythonPackagesUnfix = with pkgs.lib;
170 188 (extends pythonExternalOverrides
171 189 (extends pythonLocalOverrides
172 190 (extends pythonVCSServerOverrides
173 191 (extends pythonGeneratedPackages
174 192 basePythonPackagesUnfix))));
175 193
176 194 myPythonPackages = (pkgs.lib.fix myPythonPackagesUnfix);
177 195
178 196 in myPythonPackages.rhodecode-vcsserver
@@ -1,45 +1,45 b''
1 1 self: super: {
2 2 # bump GIT version
3 3 git = super.lib.overrideDerivation super.git (oldAttrs: {
4 4 name = "git-2.17.2";
5 5 src = self.fetchurl {
6 6 url = "https://www.kernel.org/pub/software/scm/git/git-2.17.2.tar.xz";
7 7 sha256 = "1ghljlxmyqphx13qspy382cpl2pbkbwbhqm7w7z57r9mkhswx668";
8 8 };
9 9
10 10 patches = [
11 ./git_patches/docbook2texi.patch
12 ./git_patches/symlinks-in-bin.patch
13 ./git_patches/git-sh-i18n.patch
14 ./git_patches/ssh-path.patch
11 ./patches/git/docbook2texi.patch
12 ./patches/git/symlinks-in-bin.patch
13 ./patches/git/git-sh-i18n.patch
14 ./patches/git/ssh-path.patch
15 15 ];
16 16
17 17 });
18 18
19 19 # Override subversion derivation to
20 20 # - activate python bindings
21 21 subversion =
22 22 let
23 23 subversionWithPython = super.subversion.override {
24 24 httpSupport = true;
25 25 pythonBindings = true;
26 26 python = self.python27Packages.python;
27 27 };
28 28 in
29 29 super.lib.overrideDerivation subversionWithPython (oldAttrs: {
30 30 name = "subversion-1.10.2";
31 31 src = self.fetchurl {
32 32 url = "https://archive.apache.org/dist/subversion/subversion-1.10.2.tar.gz";
33 33 sha256 = "0xv5z2bg0lw7057g913yc13f60nfj257wvmsq22pr33m4syf26sg";
34 34 };
35 35
36 36 ## use internal lz4/utf8proc because it is stable and shipped with SVN
37 37 configureFlags = oldAttrs.configureFlags ++ [
38 38 " --with-lz4=internal"
39 39 " --with-utf8proc=internal"
40 40 ];
41 41
42 42
43 43 });
44 44
45 45 }
1 NO CONTENT: file renamed from pkgs/git_patches/docbook2texi.patch to pkgs/patches/git/docbook2texi.patch
1 NO CONTENT: file renamed from pkgs/git_patches/git-sh-i18n.patch to pkgs/patches/git/git-sh-i18n.patch
1 NO CONTENT: file renamed from pkgs/git_patches/ssh-path.patch to pkgs/patches/git/ssh-path.patch
@@ -1,949 +1,950 b''
1 1 # Generated by pip2nix 0.8.0.dev1
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 { pkgs, fetchurl, fetchgit, fetchhg }:
5 5
6 6 self: super: {
7 7 "atomicwrites" = super.buildPythonPackage {
8 name = "atomicwrites-1.1.5";
8 name = "atomicwrites-1.2.1";
9 9 doCheck = false;
10 10 src = fetchurl {
11 url = "https://files.pythonhosted.org/packages/a1/e1/2d9bc76838e6e6667fde5814aa25d7feb93d6fa471bf6816daac2596e8b2/atomicwrites-1.1.5.tar.gz";
12 sha256 = "11bm90fwm2avvf4f3ib8g925w7jr4m11vcsinn1bi6ns4bm32214";
11 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
12 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.mit ];
16 16 };
17 17 };
18 18 "attrs" = super.buildPythonPackage {
19 name = "attrs-18.1.0";
19 name = "attrs-18.2.0";
20 20 doCheck = false;
21 21 src = fetchurl {
22 url = "https://files.pythonhosted.org/packages/e4/ac/a04671e118b57bee87dabca1e0f2d3bda816b7a551036012d0ca24190e71/attrs-18.1.0.tar.gz";
23 sha256 = "0yzqz8wv3w1srav5683a55v49i0szkm47dyrnkd56fqs8j8ypl70";
22 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
23 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
24 24 };
25 25 meta = {
26 26 license = [ pkgs.lib.licenses.mit ];
27 27 };
28 28 };
29 29 "backports.shutil-get-terminal-size" = super.buildPythonPackage {
30 30 name = "backports.shutil-get-terminal-size-1.0.0";
31 31 doCheck = false;
32 32 src = fetchurl {
33 33 url = "https://files.pythonhosted.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
34 34 sha256 = "107cmn7g3jnbkp826zlj8rrj19fam301qvaqf0f3905f5217lgki";
35 35 };
36 36 meta = {
37 37 license = [ pkgs.lib.licenses.mit ];
38 38 };
39 39 };
40 40 "beautifulsoup4" = super.buildPythonPackage {
41 41 name = "beautifulsoup4-4.6.3";
42 42 doCheck = false;
43 43 src = fetchurl {
44 44 url = "https://files.pythonhosted.org/packages/88/df/86bffad6309f74f3ff85ea69344a078fc30003270c8df6894fca7a3c72ff/beautifulsoup4-4.6.3.tar.gz";
45 45 sha256 = "041dhalzjciw6qyzzq7a2k4h1yvyk76xigp35hv5ibnn448ydy4h";
46 46 };
47 47 meta = {
48 48 license = [ pkgs.lib.licenses.mit ];
49 49 };
50 50 };
51 51 "configobj" = super.buildPythonPackage {
52 52 name = "configobj-5.0.6";
53 53 doCheck = false;
54 54 propagatedBuildInputs = [
55 55 self."six"
56 56 ];
57 57 src = fetchurl {
58 58 url = "https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c";
59 59 sha256 = "1hhcxirwvg58grlfr177b3awhbq8hlx1l3lh69ifl1ki7lfd1s1x";
60 60 };
61 61 meta = {
62 62 license = [ pkgs.lib.licenses.bsdOriginal ];
63 63 };
64 64 };
65 65 "cov-core" = super.buildPythonPackage {
66 66 name = "cov-core-1.15.0";
67 67 doCheck = false;
68 68 propagatedBuildInputs = [
69 69 self."coverage"
70 70 ];
71 71 src = fetchurl {
72 72 url = "https://files.pythonhosted.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
73 73 sha256 = "0k3np9ymh06yv1ib96sb6wfsxjkqhmik8qfsn119vnhga9ywc52a";
74 74 };
75 75 meta = {
76 76 license = [ pkgs.lib.licenses.mit ];
77 77 };
78 78 };
79 79 "coverage" = super.buildPythonPackage {
80 name = "coverage-3.7.1";
80 name = "coverage-4.5.1";
81 81 doCheck = false;
82 82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
84 sha256 = "0knlbq79g2ww6xzsyknj9rirrgrgc983dpa2d9nkdf31mb2a3bni";
83 url = "https://files.pythonhosted.org/packages/35/fe/e7df7289d717426093c68d156e0fd9117c8f4872b6588e8a8928a0f68424/coverage-4.5.1.tar.gz";
84 sha256 = "1wbrzpxka3xd4nmmkc6q0ir343d91kymwsm8pbmwa0d2a7q4ir2n";
85 85 };
86 86 meta = {
87 license = [ pkgs.lib.licenses.bsdOriginal ];
87 license = [ pkgs.lib.licenses.asl20 ];
88 88 };
89 89 };
90 90 "decorator" = super.buildPythonPackage {
91 91 name = "decorator-4.1.2";
92 92 doCheck = false;
93 93 src = fetchurl {
94 94 url = "https://files.pythonhosted.org/packages/bb/e0/f6e41e9091e130bf16d4437dabbac3993908e4d6485ecbc985ef1352db94/decorator-4.1.2.tar.gz";
95 95 sha256 = "1d8npb11kxyi36mrvjdpcjij76l5zfyrz2f820brf0l0rcw4vdkw";
96 96 };
97 97 meta = {
98 98 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
99 99 };
100 100 };
101 101 "dogpile.cache" = super.buildPythonPackage {
102 name = "dogpile.cache-0.6.6";
102 name = "dogpile.cache-0.6.7";
103 103 doCheck = false;
104 104 src = fetchurl {
105 url = "https://files.pythonhosted.org/packages/48/ca/604154d835c3668efb8a31bd979b0ea4bf39c2934a40ffecc0662296cb51/dogpile.cache-0.6.6.tar.gz";
106 sha256 = "1h8n1lxd4l2qvahfkiinljkqz7pww7w3sgag0j8j9ixbl2h4wk84";
105 url = "https://files.pythonhosted.org/packages/ee/bd/440da735a11c6087eed7cc8747fc4b995cbac2464168682f8ee1c8e43844/dogpile.cache-0.6.7.tar.gz";
106 sha256 = "1aw8rx8vhb75y7zc6gi67g21sw057jdx7i8m3jq7kf3nqavxx9zw";
107 107 };
108 108 meta = {
109 109 license = [ pkgs.lib.licenses.bsdOriginal ];
110 110 };
111 111 };
112 112 "dogpile.core" = super.buildPythonPackage {
113 113 name = "dogpile.core-0.4.1";
114 114 doCheck = false;
115 115 src = fetchurl {
116 116 url = "https://files.pythonhosted.org/packages/0e/77/e72abc04c22aedf874301861e5c1e761231c288b5de369c18be8f4b5c9bb/dogpile.core-0.4.1.tar.gz";
117 117 sha256 = "0xpdvg4kr1isfkrh1rfsh7za4q5a5s6l2kf9wpvndbwf3aqjyrdy";
118 118 };
119 119 meta = {
120 120 license = [ pkgs.lib.licenses.bsdOriginal ];
121 121 };
122 122 };
123 123 "dulwich" = super.buildPythonPackage {
124 124 name = "dulwich-0.13.0";
125 125 doCheck = false;
126 126 src = fetchurl {
127 127 url = "https://files.pythonhosted.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
128 128 sha256 = "0f1jwvrh549c4rgavkn3wizrch904s73s4fmrxykxy9cw8s57lwf";
129 129 };
130 130 meta = {
131 131 license = [ pkgs.lib.licenses.gpl2Plus ];
132 132 };
133 133 };
134 134 "enum34" = super.buildPythonPackage {
135 135 name = "enum34-1.1.6";
136 136 doCheck = false;
137 137 src = fetchurl {
138 138 url = "https://files.pythonhosted.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
139 139 sha256 = "1cgm5ng2gcfrkrm3hc22brl6chdmv67b9zvva9sfs7gn7dwc9n4a";
140 140 };
141 141 meta = {
142 142 license = [ pkgs.lib.licenses.bsdOriginal ];
143 143 };
144 144 };
145 145 "funcsigs" = super.buildPythonPackage {
146 146 name = "funcsigs-1.0.2";
147 147 doCheck = false;
148 148 src = fetchurl {
149 149 url = "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
150 150 sha256 = "0l4g5818ffyfmfs1a924811azhjj8ax9xd1cffr1mzd3ycn0zfx7";
151 151 };
152 152 meta = {
153 153 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
154 154 };
155 155 };
156 156 "gevent" = super.buildPythonPackage {
157 name = "gevent-1.3.5";
157 name = "gevent-1.3.6";
158 158 doCheck = false;
159 159 propagatedBuildInputs = [
160 160 self."greenlet"
161 161 ];
162 162 src = fetchurl {
163 url = "https://files.pythonhosted.org/packages/e6/0a/fc345c6e6161f84484870dbcaa58e427c10bd9bdcd08a69bed3d6b398bf1/gevent-1.3.5.tar.gz";
164 sha256 = "1w3gydxirgd2f60c5yv579w4903ds9s4g3587ik4jby97hgqc5bz";
163 url = "https://files.pythonhosted.org/packages/49/13/aa4bb3640b5167fe58875d3d7e65390cdb14f9682a41a741a566bb560842/gevent-1.3.6.tar.gz";
164 sha256 = "1ih4k73dqz2zb561hda99vbanja3m6cdch3mgxxn1mla3qwkqhbv";
165 165 };
166 166 meta = {
167 167 license = [ pkgs.lib.licenses.mit ];
168 168 };
169 169 };
170 170 "gprof2dot" = super.buildPythonPackage {
171 171 name = "gprof2dot-2017.9.19";
172 172 doCheck = false;
173 173 src = fetchurl {
174 174 url = "https://files.pythonhosted.org/packages/9d/36/f977122502979f3dfb50704979c9ed70e6b620787942b089bf1af15f5aba/gprof2dot-2017.9.19.tar.gz";
175 175 sha256 = "17ih23ld2nzgc3xwgbay911l6lh96jp1zshmskm17n1gg2i7mg6f";
176 176 };
177 177 meta = {
178 178 license = [ { fullName = "GNU Lesser General Public License v3 or later (LGPLv3+)"; } { fullName = "LGPL"; } ];
179 179 };
180 180 };
181 181 "greenlet" = super.buildPythonPackage {
182 name = "greenlet-0.4.13";
182 name = "greenlet-0.4.15";
183 183 doCheck = false;
184 184 src = fetchurl {
185 url = "https://files.pythonhosted.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
186 sha256 = "1r412gfx25jrdiv444prmz5a8igrfabwnwqyr6b52ypq7ga87vqg";
185 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
186 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
187 187 };
188 188 meta = {
189 189 license = [ pkgs.lib.licenses.mit ];
190 190 };
191 191 };
192 192 "gunicorn" = super.buildPythonPackage {
193 193 name = "gunicorn-19.9.0";
194 194 doCheck = false;
195 195 src = fetchurl {
196 196 url = "https://files.pythonhosted.org/packages/47/52/68ba8e5e8ba251e54006a49441f7ccabca83b6bef5aedacb4890596c7911/gunicorn-19.9.0.tar.gz";
197 197 sha256 = "1wzlf4xmn6qjirh5w81l6i6kqjnab1n1qqkh7zsj1yb6gh4n49ps";
198 198 };
199 199 meta = {
200 200 license = [ pkgs.lib.licenses.mit ];
201 201 };
202 202 };
203 203 "hg-evolve" = super.buildPythonPackage {
204 204 name = "hg-evolve-8.0.1";
205 205 doCheck = false;
206 206 src = fetchurl {
207 207 url = "https://files.pythonhosted.org/packages/06/1a/c5c12d8f117426f05285a820ee5a23121882f5381104e86276b72598934f/hg-evolve-8.0.1.tar.gz";
208 208 sha256 = "1brafifb42k71gl7qssb5m3ijnm7y30lfvm90z8xxcr2fgz19p29";
209 209 };
210 210 meta = {
211 211 license = [ { fullName = "GPLv2+"; } ];
212 212 };
213 213 };
214 214 "hgsubversion" = super.buildPythonPackage {
215 215 name = "hgsubversion-1.9.2";
216 216 doCheck = false;
217 217 propagatedBuildInputs = [
218 218 self."mercurial"
219 219 self."subvertpy"
220 220 ];
221 221 src = fetchurl {
222 222 url = "https://files.pythonhosted.org/packages/05/80/3a3cef10dd65e86528ef8d7ac57a41ebc782d0f3c6cfa4fed021aa9fbee0/hgsubversion-1.9.2.tar.gz";
223 223 sha256 = "16490narhq14vskml3dam8g5y3w3hdqj3g8bgm2b0c0i85l1xvcz";
224 224 };
225 225 meta = {
226 226 license = [ pkgs.lib.licenses.gpl1 ];
227 227 };
228 228 };
229 229 "hupper" = super.buildPythonPackage {
230 name = "hupper-1.3";
230 name = "hupper-1.3.1";
231 231 doCheck = false;
232 232 src = fetchurl {
233 url = "https://files.pythonhosted.org/packages/51/0c/96335b1f2f32245fb871eea5bb9773196505ddb71fad15190056a282df9e/hupper-1.3.tar.gz";
234 sha256 = "1pkyrm9c2crc32ps00k1ahnc5clj3pjwiarc7j0x8aykwih7ff10";
233 url = "https://files.pythonhosted.org/packages/cf/4b/467b826a84c8594b81f414b5ab6794e981951dac90ca40abaf9ea1cb36b0/hupper-1.3.1.tar.gz";
234 sha256 = "03mf13n6i4dd60wlb9m99ddl4m3lmly70cjp7f82vdkibfl1v6l9";
235 235 };
236 236 meta = {
237 237 license = [ pkgs.lib.licenses.mit ];
238 238 };
239 239 };
240 240 "ipdb" = super.buildPythonPackage {
241 241 name = "ipdb-0.11";
242 242 doCheck = false;
243 243 propagatedBuildInputs = [
244 244 self."setuptools"
245 245 self."ipython"
246 246 ];
247 247 src = fetchurl {
248 248 url = "https://files.pythonhosted.org/packages/80/fe/4564de08f174f3846364b3add8426d14cebee228f741c27e702b2877e85b/ipdb-0.11.tar.gz";
249 249 sha256 = "02m0l8wrhhd3z7dg3czn5ys1g5pxib516hpshdzp7rxzsxgcd0bh";
250 250 };
251 251 meta = {
252 252 license = [ pkgs.lib.licenses.bsdOriginal ];
253 253 };
254 254 };
255 255 "ipython" = super.buildPythonPackage {
256 256 name = "ipython-5.1.0";
257 257 doCheck = false;
258 258 propagatedBuildInputs = [
259 259 self."setuptools"
260 260 self."decorator"
261 261 self."pickleshare"
262 262 self."simplegeneric"
263 263 self."traitlets"
264 264 self."prompt-toolkit"
265 265 self."pygments"
266 266 self."pexpect"
267 267 self."backports.shutil-get-terminal-size"
268 268 self."pathlib2"
269 269 self."pexpect"
270 270 ];
271 271 src = fetchurl {
272 272 url = "https://files.pythonhosted.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
273 273 sha256 = "0qdrf6aj9kvjczd5chj1my8y2iq09am9l8bb2a1334a52d76kx3y";
274 274 };
275 275 meta = {
276 276 license = [ pkgs.lib.licenses.bsdOriginal ];
277 277 };
278 278 };
279 279 "ipython-genutils" = super.buildPythonPackage {
280 280 name = "ipython-genutils-0.2.0";
281 281 doCheck = false;
282 282 src = fetchurl {
283 283 url = "https://files.pythonhosted.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
284 284 sha256 = "1a4bc9y8hnvq6cp08qs4mckgm6i6ajpndp4g496rvvzcfmp12bpb";
285 285 };
286 286 meta = {
287 287 license = [ pkgs.lib.licenses.bsdOriginal ];
288 288 };
289 289 };
290 290 "mako" = super.buildPythonPackage {
291 291 name = "mako-1.0.7";
292 292 doCheck = false;
293 293 propagatedBuildInputs = [
294 294 self."markupsafe"
295 295 ];
296 296 src = fetchurl {
297 297 url = "https://files.pythonhosted.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
298 298 sha256 = "1bi5gnr8r8dva06qpyx4kgjc6spm2k1y908183nbbaylggjzs0jf";
299 299 };
300 300 meta = {
301 301 license = [ pkgs.lib.licenses.mit ];
302 302 };
303 303 };
304 304 "markupsafe" = super.buildPythonPackage {
305 305 name = "markupsafe-1.0";
306 306 doCheck = false;
307 307 src = fetchurl {
308 308 url = "https://files.pythonhosted.org/packages/4d/de/32d741db316d8fdb7680822dd37001ef7a448255de9699ab4bfcbdf4172b/MarkupSafe-1.0.tar.gz";
309 309 sha256 = "0rdn1s8x9ni7ss8rfiacj7x1085lx8mh2zdwqslnw8xc3l4nkgm6";
310 310 };
311 311 meta = {
312 312 license = [ pkgs.lib.licenses.bsdOriginal ];
313 313 };
314 314 };
315 315 "mercurial" = super.buildPythonPackage {
316 316 name = "mercurial-4.6.2";
317 317 doCheck = false;
318 318 src = fetchurl {
319 319 url = "https://files.pythonhosted.org/packages/d9/fb/c7ecf2b7fd349878dbf45b8390b8db735cef73d49dd9ce8a364b4ca3a846/mercurial-4.6.2.tar.gz";
320 320 sha256 = "1bv6wgcdx8glihjjfg22khhc52mclsn4kwfqvzbzlg0b42h4xl0w";
321 321 };
322 322 meta = {
323 323 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
324 324 };
325 325 };
326 326 "mock" = super.buildPythonPackage {
327 327 name = "mock-1.0.1";
328 328 doCheck = false;
329 329 src = fetchurl {
330 330 url = "https://files.pythonhosted.org/packages/a2/52/7edcd94f0afb721a2d559a5b9aae8af4f8f2c79bc63fdbe8a8a6c9b23bbe/mock-1.0.1.tar.gz";
331 331 sha256 = "0kzlsbki6q0awf89rc287f3aj8x431lrajf160a70z0ikhnxsfdq";
332 332 };
333 333 meta = {
334 334 license = [ pkgs.lib.licenses.bsdOriginal ];
335 335 };
336 336 };
337 337 "more-itertools" = super.buildPythonPackage {
338 338 name = "more-itertools-4.3.0";
339 339 doCheck = false;
340 340 propagatedBuildInputs = [
341 341 self."six"
342 342 ];
343 343 src = fetchurl {
344 344 url = "https://files.pythonhosted.org/packages/88/ff/6d485d7362f39880810278bdc906c13300db05485d9c65971dec1142da6a/more-itertools-4.3.0.tar.gz";
345 345 sha256 = "17h3na0rdh8xq30w4b9pizgkdxmm51896bxw600x84jflg9vaxn4";
346 346 };
347 347 meta = {
348 348 license = [ pkgs.lib.licenses.mit ];
349 349 };
350 350 };
351 351 "msgpack-python" = super.buildPythonPackage {
352 352 name = "msgpack-python-0.5.6";
353 353 doCheck = false;
354 354 src = fetchurl {
355 355 url = "https://files.pythonhosted.org/packages/8a/20/6eca772d1a5830336f84aca1d8198e5a3f4715cd1c7fc36d3cc7f7185091/msgpack-python-0.5.6.tar.gz";
356 356 sha256 = "16wh8qgybmfh4pjp8vfv78mdlkxfmcasg78lzlnm6nslsfkci31p";
357 357 };
358 358 meta = {
359 359 license = [ pkgs.lib.licenses.asl20 ];
360 360 };
361 361 };
362 362 "pastedeploy" = super.buildPythonPackage {
363 363 name = "pastedeploy-1.5.2";
364 364 doCheck = false;
365 365 src = fetchurl {
366 366 url = "https://files.pythonhosted.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
367 367 sha256 = "1jz3m4hq8v6hyhfjz9425nd3nvn52cvbfipdcd72krjmla4qz1fm";
368 368 };
369 369 meta = {
370 370 license = [ pkgs.lib.licenses.mit ];
371 371 };
372 372 };
373 373 "pathlib2" = super.buildPythonPackage {
374 name = "pathlib2-2.3.0";
374 name = "pathlib2-2.3.2";
375 375 doCheck = false;
376 376 propagatedBuildInputs = [
377 377 self."six"
378 378 self."scandir"
379 379 ];
380 380 src = fetchurl {
381 url = "https://files.pythonhosted.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
382 sha256 = "1cx5gs2v9j2vnzmcrbq5l8fq2mwrr1h6pyf1sjdji2w1bavm09fk";
381 url = "https://files.pythonhosted.org/packages/db/a8/7d6439c1aec525ed70810abee5b7d7f3aa35347f59bc28343e8f62019aa2/pathlib2-2.3.2.tar.gz";
382 sha256 = "10yb0iv5x2hs631rcppkhbddx799d3h8pcwmkbh2a66ns3w71ccf";
383 383 };
384 384 meta = {
385 385 license = [ pkgs.lib.licenses.mit ];
386 386 };
387 387 };
388 388 "pexpect" = super.buildPythonPackage {
389 389 name = "pexpect-4.6.0";
390 390 doCheck = false;
391 391 propagatedBuildInputs = [
392 392 self."ptyprocess"
393 393 ];
394 394 src = fetchurl {
395 395 url = "https://files.pythonhosted.org/packages/89/43/07d07654ee3e25235d8cea4164cdee0ec39d1fda8e9203156ebe403ffda4/pexpect-4.6.0.tar.gz";
396 396 sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia";
397 397 };
398 398 meta = {
399 399 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
400 400 };
401 401 };
402 402 "pickleshare" = super.buildPythonPackage {
403 name = "pickleshare-0.7.4";
403 name = "pickleshare-0.7.5";
404 404 doCheck = false;
405 405 propagatedBuildInputs = [
406 406 self."pathlib2"
407 407 ];
408 408 src = fetchurl {
409 url = "https://files.pythonhosted.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
410 sha256 = "0yvk14dzxk7g6qpr7iw23vzqbsr0dh4ij4xynkhnzpfz4xr2bac4";
409 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
410 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
411 411 };
412 412 meta = {
413 413 license = [ pkgs.lib.licenses.mit ];
414 414 };
415 415 };
416 416 "plaster" = super.buildPythonPackage {
417 417 name = "plaster-1.0";
418 418 doCheck = false;
419 419 propagatedBuildInputs = [
420 420 self."setuptools"
421 421 ];
422 422 src = fetchurl {
423 423 url = "https://files.pythonhosted.org/packages/37/e1/56d04382d718d32751017d32f351214384e529b794084eee20bb52405563/plaster-1.0.tar.gz";
424 424 sha256 = "1hy8k0nv2mxq94y5aysk6hjk9ryb4bsd13g83m60hcyzxz3wflc3";
425 425 };
426 426 meta = {
427 427 license = [ pkgs.lib.licenses.mit ];
428 428 };
429 429 };
430 430 "plaster-pastedeploy" = super.buildPythonPackage {
431 431 name = "plaster-pastedeploy-0.6";
432 432 doCheck = false;
433 433 propagatedBuildInputs = [
434 434 self."pastedeploy"
435 435 self."plaster"
436 436 ];
437 437 src = fetchurl {
438 438 url = "https://files.pythonhosted.org/packages/3f/e7/6a6833158d2038ec40085433308a1e164fd1dac595513f6dd556d5669bb8/plaster_pastedeploy-0.6.tar.gz";
439 439 sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2";
440 440 };
441 441 meta = {
442 442 license = [ pkgs.lib.licenses.mit ];
443 443 };
444 444 };
445 445 "pluggy" = super.buildPythonPackage {
446 name = "pluggy-0.6.0";
446 name = "pluggy-0.8.0";
447 447 doCheck = false;
448 448 src = fetchurl {
449 url = "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz";
450 sha256 = "1zqckndfn85l1cd8pndw212zg1bq9fkg1nnj32kp2mppppsyg2kz";
449 url = "https://files.pythonhosted.org/packages/65/25/81d0de17cd00f8ca994a4e74e3c4baf7cd25072c0b831dad5c7d9d6138f8/pluggy-0.8.0.tar.gz";
450 sha256 = "1580p47l2zqzsza8jcnw1h2wh3vvmygk6ly8bvi4w0g8j14sjys4";
451 451 };
452 452 meta = {
453 453 license = [ pkgs.lib.licenses.mit ];
454 454 };
455 455 };
456 456 "prompt-toolkit" = super.buildPythonPackage {
457 457 name = "prompt-toolkit-1.0.15";
458 458 doCheck = false;
459 459 propagatedBuildInputs = [
460 460 self."six"
461 461 self."wcwidth"
462 462 ];
463 463 src = fetchurl {
464 464 url = "https://files.pythonhosted.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
465 465 sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5";
466 466 };
467 467 meta = {
468 468 license = [ pkgs.lib.licenses.bsdOriginal ];
469 469 };
470 470 };
471 471 "psutil" = super.buildPythonPackage {
472 name = "psutil-5.4.6";
472 name = "psutil-5.4.7";
473 473 doCheck = false;
474 474 src = fetchurl {
475 url = "https://files.pythonhosted.org/packages/51/9e/0f8f5423ce28c9109807024f7bdde776ed0b1161de20b408875de7e030c3/psutil-5.4.6.tar.gz";
476 sha256 = "1xmw4qi6hnrhw81xqzkvmsm9im7j2vkk4v26ycjwq2jczqsmlvk8";
475 url = "https://files.pythonhosted.org/packages/7d/9a/1e93d41708f8ed2b564395edfa3389f0fd6d567597401c2e5e2775118d8b/psutil-5.4.7.tar.gz";
476 sha256 = "0fsgmvzwbdbszkwfnqhib8jcxm4w6zyhvlxlcda0rfm5cyqj4qsv";
477 477 };
478 478 meta = {
479 479 license = [ pkgs.lib.licenses.bsdOriginal ];
480 480 };
481 481 };
482 482 "ptyprocess" = super.buildPythonPackage {
483 483 name = "ptyprocess-0.6.0";
484 484 doCheck = false;
485 485 src = fetchurl {
486 486 url = "https://files.pythonhosted.org/packages/7d/2d/e4b8733cf79b7309d84c9081a4ab558c89d8c89da5961bf4ddb050ca1ce0/ptyprocess-0.6.0.tar.gz";
487 487 sha256 = "1h4lcd3w5nrxnsk436ar7fwkiy5rfn5wj2xwy9l0r4mdqnf2jgwj";
488 488 };
489 489 meta = {
490 490 license = [ ];
491 491 };
492 492 };
493 493 "py" = super.buildPythonPackage {
494 name = "py-1.5.3";
494 name = "py-1.6.0";
495 495 doCheck = false;
496 496 src = fetchurl {
497 url = "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz";
498 sha256 = "10gq2lckvgwlk9w6yzijhzkarx44hsaknd0ypa08wlnpjnsgmj99";
497 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
498 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
499 499 };
500 500 meta = {
501 501 license = [ pkgs.lib.licenses.mit ];
502 502 };
503 503 };
504 504 "pygments" = super.buildPythonPackage {
505 505 name = "pygments-2.2.0";
506 506 doCheck = false;
507 507 src = fetchurl {
508 508 url = "https://files.pythonhosted.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
509 509 sha256 = "1k78qdvir1yb1c634nkv6rbga8wv4289xarghmsbbvzhvr311bnv";
510 510 };
511 511 meta = {
512 512 license = [ pkgs.lib.licenses.bsdOriginal ];
513 513 };
514 514 };
515 515 "pyramid" = super.buildPythonPackage {
516 516 name = "pyramid-1.9.2";
517 517 doCheck = false;
518 518 propagatedBuildInputs = [
519 519 self."setuptools"
520 520 self."webob"
521 521 self."repoze.lru"
522 522 self."zope.interface"
523 523 self."zope.deprecation"
524 524 self."venusian"
525 525 self."translationstring"
526 526 self."pastedeploy"
527 527 self."plaster"
528 528 self."plaster-pastedeploy"
529 529 self."hupper"
530 530 ];
531 531 src = fetchurl {
532 532 url = "https://files.pythonhosted.org/packages/a0/c1/b321d07cfc4870541989ad131c86a1d593bfe802af0eca9718a0dadfb97a/pyramid-1.9.2.tar.gz";
533 533 sha256 = "09drsl0346nchgxp2j7sa5hlk7mkhfld9wvbd0wicacrp26a92fg";
534 534 };
535 535 meta = {
536 536 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
537 537 };
538 538 };
539 539 "pyramid-mako" = super.buildPythonPackage {
540 540 name = "pyramid-mako-1.0.2";
541 541 doCheck = false;
542 542 propagatedBuildInputs = [
543 543 self."pyramid"
544 544 self."mako"
545 545 ];
546 546 src = fetchurl {
547 547 url = "https://files.pythonhosted.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
548 548 sha256 = "18gk2vliq8z4acblsl6yzgbvnr9rlxjlcqir47km7kvlk1xri83d";
549 549 };
550 550 meta = {
551 551 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
552 552 };
553 553 };
554 554 "pytest" = super.buildPythonPackage {
555 name = "pytest-3.6.0";
555 name = "pytest-3.8.2";
556 556 doCheck = false;
557 557 propagatedBuildInputs = [
558 558 self."py"
559 559 self."six"
560 560 self."setuptools"
561 561 self."attrs"
562 562 self."more-itertools"
563 563 self."atomicwrites"
564 564 self."pluggy"
565 565 self."funcsigs"
566 self."pathlib2"
566 567 ];
567 568 src = fetchurl {
568 url = "https://files.pythonhosted.org/packages/67/6a/5bcdc22f8dbada1d2910d6e1a3a03f6b14306c78f81122890735b28be4bf/pytest-3.6.0.tar.gz";
569 sha256 = "0bdfazvjjbxssqzyvkb3m2x2in7xv56ipr899l00s87k7815sm9r";
569 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
570 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
570 571 };
571 572 meta = {
572 573 license = [ pkgs.lib.licenses.mit ];
573 574 };
574 575 };
575 576 "pytest-cov" = super.buildPythonPackage {
576 name = "pytest-cov-2.5.1";
577 name = "pytest-cov-2.6.0";
577 578 doCheck = false;
578 579 propagatedBuildInputs = [
579 580 self."pytest"
580 581 self."coverage"
581 582 ];
582 583 src = fetchurl {
583 url = "https://files.pythonhosted.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 sha256 = "0bbfpwdh9k3636bxc88vz9fa7vf4akchgn513ql1vd0xy4n7bah3";
584 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
585 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
585 586 };
586 587 meta = {
587 588 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
588 589 };
589 590 };
590 591 "pytest-profiling" = super.buildPythonPackage {
591 592 name = "pytest-profiling-1.3.0";
592 593 doCheck = false;
593 594 propagatedBuildInputs = [
594 595 self."six"
595 596 self."pytest"
596 597 self."gprof2dot"
597 598 ];
598 599 src = fetchurl {
599 600 url = "https://files.pythonhosted.org/packages/f5/34/4626126e041a51ef50a80d0619519b18d20aef249aac25b0d0fdd47e57ee/pytest-profiling-1.3.0.tar.gz";
600 601 sha256 = "08r5afx5z22yvpmsnl91l4amsy1yxn8qsmm61mhp06mz8zjs51kb";
601 602 };
602 603 meta = {
603 604 license = [ pkgs.lib.licenses.mit ];
604 605 };
605 606 };
606 607 "pytest-runner" = super.buildPythonPackage {
607 608 name = "pytest-runner-4.2";
608 609 doCheck = false;
609 610 src = fetchurl {
610 611 url = "https://files.pythonhosted.org/packages/9e/b7/fe6e8f87f9a756fd06722216f1b6698ccba4d269eac6329d9f0c441d0f93/pytest-runner-4.2.tar.gz";
611 612 sha256 = "1gkpyphawxz38ni1gdq1fmwyqcg02m7ypzqvv46z06crwdxi2gyj";
612 613 };
613 614 meta = {
614 615 license = [ pkgs.lib.licenses.mit ];
615 616 };
616 617 };
617 618 "pytest-sugar" = super.buildPythonPackage {
618 619 name = "pytest-sugar-0.9.1";
619 620 doCheck = false;
620 621 propagatedBuildInputs = [
621 622 self."pytest"
622 623 self."termcolor"
623 624 ];
624 625 src = fetchurl {
625 626 url = "https://files.pythonhosted.org/packages/3e/6a/a3f909083079d03bde11d06ab23088886bbe25f2c97fbe4bb865e2bf05bc/pytest-sugar-0.9.1.tar.gz";
626 627 sha256 = "0b4av40dv30727m54v211r0nzwjp2ajkjgxix6j484qjmwpw935b";
627 628 };
628 629 meta = {
629 630 license = [ pkgs.lib.licenses.bsdOriginal ];
630 631 };
631 632 };
632 633 "pytest-timeout" = super.buildPythonPackage {
633 name = "pytest-timeout-1.2.1";
634 name = "pytest-timeout-1.3.2";
634 635 doCheck = false;
635 636 propagatedBuildInputs = [
636 637 self."pytest"
637 638 ];
638 639 src = fetchurl {
639 url = "https://files.pythonhosted.org/packages/be/e9/a9106b8bc87521c6813060f50f7d1fdc15665bc1bbbe71c0ffc1c571aaa2/pytest-timeout-1.2.1.tar.gz";
640 sha256 = "1kdp6qbh5v1168l99rba5yfzvy05gmzkmkhldgp36p9xcdjd5dv8";
640 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
641 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
641 642 };
642 643 meta = {
643 644 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
644 645 };
645 646 };
646 647 "repoze.lru" = super.buildPythonPackage {
647 648 name = "repoze.lru-0.7";
648 649 doCheck = false;
649 650 src = fetchurl {
650 651 url = "https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz";
651 652 sha256 = "0xzz1aw2smy8hdszrq8yhnklx6w1r1mf55061kalw3iq35gafa84";
652 653 };
653 654 meta = {
654 655 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
655 656 };
656 657 };
657 658 "rhodecode-vcsserver" = super.buildPythonPackage {
658 name = "rhodecode-vcsserver-4.13.3";
659 name = "rhodecode-vcsserver-4.14.0";
659 660 buildInputs = [
660 661 self."pytest"
661 662 self."py"
662 663 self."pytest-cov"
663 664 self."pytest-sugar"
664 665 self."pytest-runner"
665 666 self."pytest-profiling"
667 self."pytest-timeout"
666 668 self."gprof2dot"
667 self."pytest-timeout"
668 669 self."mock"
669 670 self."webtest"
670 671 self."cov-core"
671 672 self."coverage"
672 673 self."configobj"
673 674 ];
674 675 doCheck = true;
675 676 propagatedBuildInputs = [
676 677 self."configobj"
677 678 self."atomicwrites"
678 679 self."attrs"
679 680 self."dogpile.cache"
680 681 self."dogpile.core"
681 682 self."decorator"
682 683 self."dulwich"
683 684 self."hgsubversion"
684 685 self."hg-evolve"
685 686 self."mako"
686 687 self."markupsafe"
687 688 self."mercurial"
688 689 self."msgpack-python"
689 690 self."pastedeploy"
690 691 self."psutil"
691 692 self."pyramid"
692 693 self."pyramid-mako"
693 694 self."pygments"
694 695 self."pathlib2"
695 696 self."repoze.lru"
696 697 self."simplejson"
697 698 self."subprocess32"
698 self."setproctitle"
699 699 self."subvertpy"
700 700 self."six"
701 701 self."translationstring"
702 702 self."webob"
703 703 self."zope.deprecation"
704 704 self."zope.interface"
705 705 self."gevent"
706 706 self."greenlet"
707 707 self."gunicorn"
708 708 self."waitress"
709 self."setproctitle"
709 710 self."ipdb"
710 711 self."ipython"
711 712 self."pytest"
712 713 self."py"
713 714 self."pytest-cov"
714 715 self."pytest-sugar"
715 716 self."pytest-runner"
716 717 self."pytest-profiling"
718 self."pytest-timeout"
717 719 self."gprof2dot"
718 self."pytest-timeout"
719 720 self."mock"
720 721 self."webtest"
721 722 self."cov-core"
722 723 self."coverage"
723 724 ];
724 725 src = ./.;
725 726 meta = {
726 727 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
727 728 };
728 729 };
729 730 "scandir" = super.buildPythonPackage {
730 731 name = "scandir-1.9.0";
731 732 doCheck = false;
732 733 src = fetchurl {
733 734 url = "https://files.pythonhosted.org/packages/16/2a/557af1181e6b4e30254d5a6163b18f5053791ca66e251e77ab08887e8fe3/scandir-1.9.0.tar.gz";
734 735 sha256 = "0r3hvf1a9jm1rkqgx40gxkmccknkaiqjavs8lccgq9s8khh5x5s4";
735 736 };
736 737 meta = {
737 738 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
738 739 };
739 740 };
740 741 "setproctitle" = super.buildPythonPackage {
741 742 name = "setproctitle-1.1.10";
742 743 doCheck = false;
743 744 src = fetchurl {
744 745 url = "https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz";
745 746 sha256 = "163kplw9dcrw0lffq1bvli5yws3rngpnvrxrzdw89pbphjjvg0v2";
746 747 };
747 748 meta = {
748 749 license = [ pkgs.lib.licenses.bsdOriginal ];
749 750 };
750 751 };
751 752 "setuptools" = super.buildPythonPackage {
752 name = "setuptools-40.1.0";
753 name = "setuptools-40.4.3";
753 754 doCheck = false;
754 755 src = fetchurl {
755 url = "https://files.pythonhosted.org/packages/5a/df/b2e3d9693bb0dcbeac516a73dd7a9eb82b126ae52e4a74605a9b01beddd5/setuptools-40.1.0.zip";
756 sha256 = "0w1blx5ajga5y15dci0mddk49cf2xpq0mp7rp7jrqr2diqk00ib6";
756 url = "https://files.pythonhosted.org/packages/6e/9c/6a003320b00ef237f94aa74e4ad66c57a7618f6c79d67527136e2544b728/setuptools-40.4.3.zip";
757 sha256 = "058v6zns4634n4al2nmmvp15j8nrgwn8wjrbdks47wk3vm05gg5c";
757 758 };
758 759 meta = {
759 760 license = [ pkgs.lib.licenses.mit ];
760 761 };
761 762 };
762 763 "simplegeneric" = super.buildPythonPackage {
763 764 name = "simplegeneric-0.8.1";
764 765 doCheck = false;
765 766 src = fetchurl {
766 767 url = "https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
767 768 sha256 = "0wwi1c6md4vkbcsfsf8dklf3vr4mcdj4mpxkanwgb6jb1432x5yw";
768 769 };
769 770 meta = {
770 771 license = [ pkgs.lib.licenses.zpl21 ];
771 772 };
772 773 };
773 774 "simplejson" = super.buildPythonPackage {
774 775 name = "simplejson-3.11.1";
775 776 doCheck = false;
776 777 src = fetchurl {
777 778 url = "https://files.pythonhosted.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
778 779 sha256 = "1rr58dppsq73p0qcd9bsw066cdd3v63sqv7j6sqni8frvm4jv8h1";
779 780 };
780 781 meta = {
781 782 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
782 783 };
783 784 };
784 785 "six" = super.buildPythonPackage {
785 786 name = "six-1.11.0";
786 787 doCheck = false;
787 788 src = fetchurl {
788 789 url = "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz";
789 790 sha256 = "1scqzwc51c875z23phj48gircqjgnn3af8zy2izjwmnlxrxsgs3h";
790 791 };
791 792 meta = {
792 793 license = [ pkgs.lib.licenses.mit ];
793 794 };
794 795 };
795 796 "subprocess32" = super.buildPythonPackage {
796 name = "subprocess32-3.5.1";
797 name = "subprocess32-3.5.2";
797 798 doCheck = false;
798 799 src = fetchurl {
799 url = "https://files.pythonhosted.org/packages/de/fb/fd3e91507021e2aecdb081d1b920082628d6b8869ead845e3e87b3d2e2ca/subprocess32-3.5.1.tar.gz";
800 sha256 = "0wgi3bfnssid1g6h0v803z3k1wjal6il16nr3r9c587cfzwfkv0q";
800 url = "https://files.pythonhosted.org/packages/c3/5f/7117737fc7114061837a4f51670d863dd7f7f9c762a6546fa8a0dcfe61c8/subprocess32-3.5.2.tar.gz";
801 sha256 = "11v62shwmdys48g7ncs3a8jwwnkcl8d4zcwy6dk73z1zy2f9hazb";
801 802 };
802 803 meta = {
803 804 license = [ pkgs.lib.licenses.psfl ];
804 805 };
805 806 };
806 807 "subvertpy" = super.buildPythonPackage {
807 808 name = "subvertpy-0.10.1";
808 809 doCheck = false;
809 810 src = fetchurl {
810 811 url = "https://files.pythonhosted.org/packages/9d/76/99fa82affce75f5ac0f7dbe513796c3f37311ace0c68e1b063683b4f9b99/subvertpy-0.10.1.tar.gz";
811 812 sha256 = "061ncy9wjz3zyv527avcrdyk0xygyssyy7p1644nhzhwp8zpybij";
812 813 };
813 814 meta = {
814 815 license = [ pkgs.lib.licenses.lgpl21Plus pkgs.lib.licenses.gpl2Plus ];
815 816 };
816 817 };
817 818 "termcolor" = super.buildPythonPackage {
818 819 name = "termcolor-1.1.0";
819 820 doCheck = false;
820 821 src = fetchurl {
821 822 url = "https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
822 823 sha256 = "0fv1vq14rpqwgazxg4981904lfyp84mnammw7y046491cv76jv8x";
823 824 };
824 825 meta = {
825 826 license = [ pkgs.lib.licenses.mit ];
826 827 };
827 828 };
828 829 "traitlets" = super.buildPythonPackage {
829 830 name = "traitlets-4.3.2";
830 831 doCheck = false;
831 832 propagatedBuildInputs = [
832 833 self."ipython-genutils"
833 834 self."six"
834 835 self."decorator"
835 836 self."enum34"
836 837 ];
837 838 src = fetchurl {
838 839 url = "https://files.pythonhosted.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
839 840 sha256 = "0dbq7sx26xqz5ixs711k5nc88p8a0nqyz6162pwks5dpcz9d4jww";
840 841 };
841 842 meta = {
842 843 license = [ pkgs.lib.licenses.bsdOriginal ];
843 844 };
844 845 };
845 846 "translationstring" = super.buildPythonPackage {
846 847 name = "translationstring-1.3";
847 848 doCheck = false;
848 849 src = fetchurl {
849 850 url = "https://files.pythonhosted.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
850 851 sha256 = "0bdpcnd9pv0131dl08h4zbcwmgc45lyvq3pa224xwan5b3x4rr2f";
851 852 };
852 853 meta = {
853 854 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
854 855 };
855 856 };
856 857 "venusian" = super.buildPythonPackage {
857 858 name = "venusian-1.1.0";
858 859 doCheck = false;
859 860 src = fetchurl {
860 861 url = "https://files.pythonhosted.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
861 862 sha256 = "0zapz131686qm0gazwy8bh11vr57pr89jbwbl50s528sqy9f80lr";
862 863 };
863 864 meta = {
864 865 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
865 866 };
866 867 };
867 868 "waitress" = super.buildPythonPackage {
868 869 name = "waitress-1.1.0";
869 870 doCheck = false;
870 871 src = fetchurl {
871 872 url = "https://files.pythonhosted.org/packages/3c/68/1c10dd5c556872ceebe88483b0436140048d39de83a84a06a8baa8136f4f/waitress-1.1.0.tar.gz";
872 873 sha256 = "1a85gyji0kajc3p0s1pwwfm06w4wfxjkvvl4rnrz3h164kbd6g6k";
873 874 };
874 875 meta = {
875 876 license = [ pkgs.lib.licenses.zpl21 ];
876 877 };
877 878 };
878 879 "wcwidth" = super.buildPythonPackage {
879 880 name = "wcwidth-0.1.7";
880 881 doCheck = false;
881 882 src = fetchurl {
882 883 url = "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
883 884 sha256 = "0pn6dflzm609m4r3i8ik5ni9ijjbb5fa3vg1n7hn6vkd49r77wrx";
884 885 };
885 886 meta = {
886 887 license = [ pkgs.lib.licenses.mit ];
887 888 };
888 889 };
889 890 "webob" = super.buildPythonPackage {
890 891 name = "webob-1.7.4";
891 892 doCheck = false;
892 893 src = fetchurl {
893 894 url = "https://files.pythonhosted.org/packages/75/34/731e23f52371852dfe7490a61644826ba7fe70fd52a377aaca0f4956ba7f/WebOb-1.7.4.tar.gz";
894 895 sha256 = "1na01ljg04z40il7vcrn8g29vaw7nvg1xvhk64cr4jys5wcay44d";
895 896 };
896 897 meta = {
897 898 license = [ pkgs.lib.licenses.mit ];
898 899 };
899 900 };
900 901 "webtest" = super.buildPythonPackage {
901 902 name = "webtest-2.0.29";
902 903 doCheck = false;
903 904 propagatedBuildInputs = [
904 905 self."six"
905 906 self."webob"
906 907 self."waitress"
907 908 self."beautifulsoup4"
908 909 ];
909 910 src = fetchurl {
910 911 url = "https://files.pythonhosted.org/packages/94/de/8f94738be649997da99c47b104aa3c3984ecec51a1d8153ed09638253d56/WebTest-2.0.29.tar.gz";
911 912 sha256 = "0bcj1ica5lnmj5zbvk46x28kgphcsgh7sfnwjmn0cr94mhawrg6v";
912 913 };
913 914 meta = {
914 915 license = [ pkgs.lib.licenses.mit ];
915 916 };
916 917 };
917 918 "zope.deprecation" = super.buildPythonPackage {
918 919 name = "zope.deprecation-4.3.0";
919 920 doCheck = false;
920 921 propagatedBuildInputs = [
921 922 self."setuptools"
922 923 ];
923 924 src = fetchurl {
924 925 url = "https://files.pythonhosted.org/packages/a1/18/2dc5e6bfe64fdc3b79411b67464c55bb0b43b127051a20f7f492ab767758/zope.deprecation-4.3.0.tar.gz";
925 926 sha256 = "095jas41wbxgmw95kwdxqhbc3bgihw2hzj9b3qpdg85apcsf2lkx";
926 927 };
927 928 meta = {
928 929 license = [ pkgs.lib.licenses.zpl21 ];
929 930 };
930 931 };
931 932 "zope.interface" = super.buildPythonPackage {
932 933 name = "zope.interface-4.5.0";
933 934 doCheck = false;
934 935 propagatedBuildInputs = [
935 936 self."setuptools"
936 937 ];
937 938 src = fetchurl {
938 939 url = "https://files.pythonhosted.org/packages/ac/8a/657532df378c2cd2a1fe6b12be3b4097521570769d4852ec02c24bd3594e/zope.interface-4.5.0.tar.gz";
939 940 sha256 = "0k67m60ij06wkg82n15qgyn96waf4pmrkhv0njpkfzpmv5q89hsp";
940 941 };
941 942 meta = {
942 943 license = [ pkgs.lib.licenses.zpl21 ];
943 944 };
944 945 };
945 946
946 947 ### Test requirements
947 948
948 949
949 950 }
@@ -1,16 +1,14 b''
1 1 # This file defines how to "build" for packaging.
2 2
3 { pkgs ? import <nixpkgs> {}
4 , doCheck ? true
3 { doCheck ? true
5 4 }:
6 5
7 6 let
8 7 vcsserver = import ./default.nix {
9 8 inherit
10 doCheck
11 pkgs;
9 doCheck;
12 10 };
13 11
14 12 in {
15 13 build = vcsserver;
16 14 }
@@ -1,48 +1,48 b''
1 1 ## dependencies
2 2
3 3 # our custom configobj
4 4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
5 atomicwrites==1.1.5
6 attrs==18.1.0
7 dogpile.cache==0.6.6
5 atomicwrites==1.2.1
6 attrs==18.2.0
7 dogpile.cache==0.6.7
8 8 dogpile.core==0.4.1
9 9 decorator==4.1.2
10 10 dulwich==0.13.0
11 11 hgsubversion==1.9.2
12 12 hg-evolve==8.0.1
13 13 mako==1.0.7
14 14 markupsafe==1.0.0
15 15 mercurial==4.6.2
16 16 msgpack-python==0.5.6
17 17
18 18 pastedeploy==1.5.2
19 psutil==5.4.6
19 psutil==5.4.7
20 20 pyramid==1.9.2
21 21 pyramid-mako==1.0.2
22 22
23 23 pygments==2.2.0
24 pathlib2==2.3.0
24 pathlib2==2.3.2
25 25 repoze.lru==0.7
26 26 simplejson==3.11.1
27 subprocess32==3.5.1
28 setproctitle==1.1.10
27 subprocess32==3.5.2
29 28 subvertpy==0.10.1
30 29
31 30 six==1.11.0
32 31 translationstring==1.3
33 32 webob==1.7.4
34 33 zope.deprecation==4.3.0
35 34 zope.interface==4.5.0
36 35
37 36 ## http servers
38 gevent==1.3.5
39 greenlet==0.4.13
37 gevent==1.3.6
38 greenlet==0.4.15
40 39 gunicorn==19.9.0
41 40 waitress==1.1.0
41 setproctitle==1.1.10
42 42
43 43 ## debug
44 44 ipdb==0.11.0
45 45 ipython==5.1.0
46 46
47 47 ## test related requirements
48 48 -r requirements_test.txt
@@ -1,14 +1,14 b''
1 1 # test related requirements
2 pytest==3.6.0
3 py==1.5.3
4 pytest-cov==2.5.1
2 pytest==3.8.2
3 py==1.6.0
4 pytest-cov==2.6.0
5 5 pytest-sugar==0.9.1
6 6 pytest-runner==4.2.0
7 7 pytest-profiling==1.3.0
8 pytest-timeout==1.3.2
8 9 gprof2dot==2017.9.19
9 pytest-timeout==1.2.1
10 10
11 11 mock==1.0.1
12 12 webtest==2.0.29
13 13 cov-core==1.15.0
14 coverage==3.7.1
14 coverage==4.5.1
@@ -1,139 +1,136 b''
1 1 # -*- coding: utf-8 -*-
2 2 # RhodeCode VCSServer provides access to different vcs backends via network.
3 3 # Copyright (C) 2014-2017 RodeCode GmbH
4 4 #
5 5 # This program is free software; you can redistribute it and/or modify
6 6 # it under the terms of the GNU General Public License as published by
7 7 # the Free Software Foundation; either version 3 of the License, or
8 8 # (at your option) any later version.
9 9 #
10 10 # This program is distributed in the hope that it will be useful,
11 11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 13 # GNU General Public License for more details.
14 14 #
15 15 # You should have received a copy of the GNU General Public License
16 16 # along with this program; if not, write to the Free Software Foundation,
17 17 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 18
19 19 # Import early to make sure things are patched up properly
20 20 from setuptools import setup, find_packages
21 21
22 22 import os
23 23 import sys
24 24 import pkgutil
25 25 import platform
26 26 import codecs
27 27
28 28 try: # for pip >= 10
29 29 from pip._internal.req import parse_requirements
30 30 except ImportError: # for pip <= 9.0.3
31 31 from pip.req import parse_requirements
32 32
33 33 try: # for pip >= 10
34 34 from pip._internal.download import PipSession
35 35 except ImportError: # for pip <= 9.0.3
36 36 from pip.download import PipSession
37 37
38 38
39 39
40 40 if sys.version_info < (2, 7):
41 41 raise Exception('VCSServer requires Python 2.7 or later')
42 42
43 43 here = os.path.abspath(os.path.dirname(__file__))
44 44
45 45 # defines current platform
46 46 __platform__ = platform.system()
47 47 __license__ = 'GPL V3'
48 48 __author__ = 'RhodeCode GmbH'
49 49 __url__ = 'https://code.rhodecode.com'
50 50 is_windows = __platform__ in ('Windows',)
51 51
52 52
53 53 def _get_requirements(req_filename, exclude=None, extras=None):
54 54 extras = extras or []
55 55 exclude = exclude or []
56 56
57 57 try:
58 58 parsed = parse_requirements(
59 59 os.path.join(here, req_filename), session=PipSession())
60 60 except TypeError:
61 61 # try pip < 6.0.0, that doesn't support session
62 62 parsed = parse_requirements(os.path.join(here, req_filename))
63 63
64 64 requirements = []
65 65 for ir in parsed:
66 66 if ir.req and ir.name not in exclude:
67 67 requirements.append(str(ir.req))
68 68 return requirements + extras
69 69
70 70
71 71 # requirements extract
72 72 setup_requirements = ['pytest-runner']
73 73 install_requirements = _get_requirements(
74 74 'requirements.txt', exclude=['setuptools'])
75 75 test_requirements = _get_requirements(
76 76 'requirements_test.txt', extras=['configobj'])
77 77
78 78
79 79 def get_version():
80 80 version = pkgutil.get_data('vcsserver', 'VERSION')
81 81 return version.strip()
82 82
83 83
84 84 # additional files that goes into package itself
85 85 package_data = {
86 86 '': ['*.txt', '*.rst'],
87 87 'configs': ['*.ini'],
88 88 'vcsserver': ['VERSION'],
89 89 }
90 90
91 91 description = 'Version Control System Server'
92 92 keywords = ' '.join([
93 93 'CLI', 'RhodeCode', 'RhodeCode Enterprise', 'RhodeCode Tools'])
94 94
95 95 # README/DESCRIPTION generation
96 96 readme_file = 'README.rst'
97 97 changelog_file = 'CHANGES.rst'
98 98 try:
99 99 long_description = codecs.open(readme_file).read() + '\n\n' + \
100 100 codecs.open(changelog_file).read()
101 101 except IOError as err:
102 102 sys.stderr.write(
103 103 "[WARNING] Cannot find file specified as long_description (%s)\n "
104 104 "or changelog (%s) skipping that file" % (readme_file, changelog_file))
105 105 long_description = description
106 106
107 107
108 108 setup(
109 109 name='rhodecode-vcsserver',
110 110 version=get_version(),
111 111 description=description,
112 112 long_description=long_description,
113 113 keywords=keywords,
114 114 license=__license__,
115 115 author=__author__,
116 116 author_email='admin@rhodecode.com',
117 117 url=__url__,
118 118 setup_requires=setup_requirements,
119 119 install_requires=install_requirements,
120 120 tests_require=test_requirements,
121 121 zip_safe=False,
122 122 packages=find_packages(exclude=["docs", "tests*"]),
123 123 package_data=package_data,
124 124 include_package_data=True,
125 125 classifiers=[
126 126 'Development Status :: 6 - Mature',
127 127 'Intended Audience :: Developers',
128 128 'Operating System :: OS Independent',
129 129 'Topic :: Software Development :: Version Control',
130 130 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
131 131 'Programming Language :: Python :: 2.7',
132 132 ],
133 133 entry_points={
134 'console_scripts': [
135 'vcsserver=vcsserver.main:main',
136 ],
137 134 'paste.app_factory': ['main=vcsserver.http_main:main']
138 135 },
139 136 )
@@ -1,67 +1,66 b''
1 1 # This file contains the adjustments which are desired for a development
2 2 # environment.
3 3
4 4 { pkgs ? (import <nixpkgs> {})
5 5 , pythonPackages ? "python27Packages"
6 6 , doCheck ? false
7 7 }:
8 8
9 9 let
10 10
11 11 vcsserver = import ./default.nix {
12 12 inherit
13 pkgs
14 13 doCheck;
15 14 };
16 15
17 16 vcs-pythonPackages = vcsserver.pythonPackages;
18 17
19 18 in vcsserver.override (attrs: {
20 19 # Avoid that we dump any sources into the store when entering the shell and
21 20 # make development a little bit more convenient.
22 21 src = null;
23 22
24 23 # Add dependencies which are useful for the development environment.
25 24 buildInputs =
26 25 attrs.buildInputs ++
27 26 (with vcs-pythonPackages; [
28 27 ipdb
29 28 ]);
30 29
31 30 # place to inject some required libs from develop installs
32 31 propagatedBuildInputs =
33 32 attrs.propagatedBuildInputs ++
34 33 [];
35 34
36 35
37 36 # Make sure we execute both hooks
38 37 shellHook = ''
39 38 runHook preShellHook
40 39 runHook postShellHook
41 40 '';
42 41
43 42 preShellHook = ''
44 43 echo "Entering VCS-Shell"
45 44
46 45 # Custom prompt to distinguish from other dev envs.
47 46 export PS1="\n\[\033[1;32m\][VCS-shell:\w]$\[\033[0m\] "
48 47
49 48 # Set locale
50 49 export LC_ALL="en_US.UTF-8"
51 50
52 51 # Setup a temporary directory.
53 52 tmp_path=$(mktemp -d)
54 53 export PATH="$tmp_path/bin:$PATH"
55 54 export PYTHONPATH="$tmp_path/${vcs-pythonPackages.python.sitePackages}:$PYTHONPATH"
56 55 mkdir -p $tmp_path/${vcs-pythonPackages.python.sitePackages}
57 56
58 57 # Develop installation
59 58 echo "[BEGIN]: develop install of rhodecode-vcsserver"
60 59 python setup.py develop --prefix $tmp_path --allow-hosts ""
61 60 '';
62 61
63 62 postShellHook = ''
64 63
65 64 '';
66 65
67 66 })
@@ -1,1 +1,1 b''
1 4.13.3 No newline at end of file
1 4.14.0 No newline at end of file
@@ -1,675 +1,728 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import collections
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import traceback
24 24 import urllib
25 25 import urllib2
26 26 from functools import wraps
27 27
28 28 from dulwich import index, objects
29 29 from dulwich.client import HttpGitClient, LocalGitClient
30 30 from dulwich.errors import (
31 31 NotGitRepository, ChecksumMismatch, WrongObjectException,
32 32 MissingCommitError, ObjectMissing, HangupException,
33 33 UnexpectedCommandError)
34 34 from dulwich.repo import Repo as DulwichRepo, Tag
35 35 from dulwich.server import update_server_info
36 36
37 37 from vcsserver import exceptions, settings, subprocessio
38 38 from vcsserver.utils import safe_str
39 39 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
40 40 from vcsserver.hgcompat import (
41 41 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
42 42 from vcsserver.git_lfs.lib import LFSOidStore
43 43
44 44 DIR_STAT = stat.S_IFDIR
45 45 FILE_MODE = stat.S_IFMT
46 46 GIT_LINK = objects.S_IFGITLINK
47 47
48 48 log = logging.getLogger(__name__)
49 49
50 50
51 51 def reraise_safe_exceptions(func):
52 52 """Converts Dulwich exceptions to something neutral."""
53 53 @wraps(func)
54 54 def wrapper(*args, **kwargs):
55 55 try:
56 56 return func(*args, **kwargs)
57 57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 58 ObjectMissing) as e:
59 raise exceptions.LookupException(e)(e.message)
59 exc = exceptions.LookupException(e)
60 raise exc(e)
60 61 except (HangupException, UnexpectedCommandError) as e:
61 raise exceptions.VcsException(e)(e.message)
62 exc = exceptions.VcsException(e)
63 raise exc(e)
62 64 except Exception as e:
63 65 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 66 # (KeyError on empty repos), we cannot track this and catch all
65 67 # exceptions, it's an exceptions from other handlers
66 68 #if not hasattr(e, '_vcs_kind'):
67 69 #log.exception("Unhandled exception in git remote call")
68 70 #raise_from_original(exceptions.UnhandledException)
69 71 raise
70 72 return wrapper
71 73
72 74
73 75 class Repo(DulwichRepo):
74 76 """
75 77 A wrapper for dulwich Repo class.
76 78
77 79 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
78 80 "Too many open files" error. We need to close all opened file descriptors
79 81 once the repo object is destroyed.
80 82
81 83 TODO: mikhail: please check if we need this wrapper after updating dulwich
82 84 to 0.12.0 +
83 85 """
84 86 def __del__(self):
85 87 if hasattr(self, 'object_store'):
86 88 self.close()
87 89
88 90
89 91 class GitFactory(RepoFactory):
90 92 repo_type = 'git'
91 93
92 94 def _create_repo(self, wire, create):
93 95 repo_path = str_to_dulwich(wire['path'])
94 96 return Repo(repo_path)
95 97
96 98
97 99 class GitRemote(object):
98 100
99 101 def __init__(self, factory):
100 102 self._factory = factory
101
103 self.peeled_ref_marker = '^{}'
102 104 self._bulk_methods = {
103 105 "author": self.commit_attribute,
104 106 "date": self.get_object_attrs,
105 107 "message": self.commit_attribute,
106 108 "parents": self.commit_attribute,
107 109 "_commit": self.revision,
108 110 }
109 111
110 112 def _wire_to_config(self, wire):
111 113 if 'config' in wire:
112 114 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
113 115 return {}
114 116
115 117 def _assign_ref(self, wire, ref, commit_id):
116 118 repo = self._factory.repo(wire)
117 119 repo[ref] = commit_id
118 120
119 121 @reraise_safe_exceptions
120 122 def add_object(self, wire, content):
121 123 repo = self._factory.repo(wire)
122 124 blob = objects.Blob()
123 125 blob.set_raw_string(content)
124 126 repo.object_store.add_object(blob)
125 127 return blob.id
126 128
127 129 @reraise_safe_exceptions
128 130 def assert_correct_path(self, wire):
129 131 path = wire.get('path')
130 132 try:
131 133 self._factory.repo(wire)
132 134 except NotGitRepository as e:
133 135 tb = traceback.format_exc()
134 136 log.debug("Invalid Git path `%s`, tb: %s", path, tb)
135 137 return False
136 138
137 139 return True
138 140
139 141 @reraise_safe_exceptions
140 142 def bare(self, wire):
141 143 repo = self._factory.repo(wire)
142 144 return repo.bare
143 145
144 146 @reraise_safe_exceptions
145 147 def blob_as_pretty_string(self, wire, sha):
146 148 repo = self._factory.repo(wire)
147 149 return repo[sha].as_pretty_string()
148 150
149 151 @reraise_safe_exceptions
150 152 def blob_raw_length(self, wire, sha):
151 153 repo = self._factory.repo(wire)
152 154 blob = repo[sha]
153 155 return blob.raw_length()
154 156
155 157 def _parse_lfs_pointer(self, raw_content):
156 158
157 159 spec_string = 'version https://git-lfs.github.com/spec'
158 160 if raw_content and raw_content.startswith(spec_string):
159 161 pattern = re.compile(r"""
160 162 (?:\n)?
161 163 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
162 164 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
163 165 ^size[ ](?P<oid_size>[0-9]+)\n
164 166 (?:\n)?
165 167 """, re.VERBOSE | re.MULTILINE)
166 168 match = pattern.match(raw_content)
167 169 if match:
168 170 return match.groupdict()
169 171
170 172 return {}
171 173
172 174 @reraise_safe_exceptions
173 175 def is_large_file(self, wire, sha):
174 176 repo = self._factory.repo(wire)
175 177 blob = repo[sha]
176 178 return self._parse_lfs_pointer(blob.as_raw_string())
177 179
178 180 @reraise_safe_exceptions
179 181 def in_largefiles_store(self, wire, oid):
180 182 repo = self._factory.repo(wire)
181 183 conf = self._wire_to_config(wire)
182 184
183 185 store_location = conf.get('vcs_git_lfs_store_location')
184 186 if store_location:
185 187 repo_name = repo.path
186 188 store = LFSOidStore(
187 189 oid=oid, repo=repo_name, store_location=store_location)
188 190 return store.has_oid()
189 191
190 192 return False
191 193
192 194 @reraise_safe_exceptions
193 195 def store_path(self, wire, oid):
194 196 repo = self._factory.repo(wire)
195 197 conf = self._wire_to_config(wire)
196 198
197 199 store_location = conf.get('vcs_git_lfs_store_location')
198 200 if store_location:
199 201 repo_name = repo.path
200 202 store = LFSOidStore(
201 203 oid=oid, repo=repo_name, store_location=store_location)
202 204 return store.oid_path
203 205 raise ValueError('Unable to fetch oid with path {}'.format(oid))
204 206
205 207 @reraise_safe_exceptions
206 208 def bulk_request(self, wire, rev, pre_load):
207 209 result = {}
208 210 for attr in pre_load:
209 211 try:
210 212 method = self._bulk_methods[attr]
211 213 args = [wire, rev]
212 214 if attr == "date":
213 215 args.extend(["commit_time", "commit_timezone"])
214 216 elif attr in ["author", "message", "parents"]:
215 217 args.append(attr)
216 218 result[attr] = method(*args)
217 219 except KeyError as e:
218 220 raise exceptions.VcsException(e)(
219 221 "Unknown bulk attribute: %s" % attr)
220 222 return result
221 223
222 224 def _build_opener(self, url):
223 225 handlers = []
224 226 url_obj = url_parser(url)
225 227 _, authinfo = url_obj.authinfo()
226 228
227 229 if authinfo:
228 230 # create a password manager
229 231 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
230 232 passmgr.add_password(*authinfo)
231 233
232 234 handlers.extend((httpbasicauthhandler(passmgr),
233 235 httpdigestauthhandler(passmgr)))
234 236
235 237 return urllib2.build_opener(*handlers)
236 238
237 239 @reraise_safe_exceptions
238 240 def check_url(self, url, config):
239 241 url_obj = url_parser(url)
240 242 test_uri, _ = url_obj.authinfo()
241 243 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
242 244 url_obj.query = obfuscate_qs(url_obj.query)
243 245 cleaned_uri = str(url_obj)
244 246 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
245 247
246 248 if not test_uri.endswith('info/refs'):
247 249 test_uri = test_uri.rstrip('/') + '/info/refs'
248 250
249 251 o = self._build_opener(url)
250 252 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
251 253
252 254 q = {"service": 'git-upload-pack'}
253 255 qs = '?%s' % urllib.urlencode(q)
254 256 cu = "%s%s" % (test_uri, qs)
255 257 req = urllib2.Request(cu, None, {})
256 258
257 259 try:
258 260 log.debug("Trying to open URL %s", cleaned_uri)
259 261 resp = o.open(req)
260 262 if resp.code != 200:
261 263 raise exceptions.URLError()('Return Code is not 200')
262 264 except Exception as e:
263 265 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
264 266 # means it cannot be cloned
265 267 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
266 268
267 269 # now detect if it's proper git repo
268 270 gitdata = resp.read()
269 271 if 'service=git-upload-pack' in gitdata:
270 272 pass
271 273 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
272 274 # old style git can return some other format !
273 275 pass
274 276 else:
275 277 raise exceptions.URLError()(
276 278 "url [%s] does not look like an git" % (cleaned_uri,))
277 279
278 280 return True
279 281
280 282 @reraise_safe_exceptions
281 283 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
282 remote_refs = self.fetch(wire, url, apply_refs=False)
284 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
285 remote_refs = self.pull(wire, url, apply_refs=False)
283 286 repo = self._factory.repo(wire)
284 287 if isinstance(valid_refs, list):
285 288 valid_refs = tuple(valid_refs)
286 289
287 290 for k in remote_refs:
288 291 # only parse heads/tags and skip so called deferred tags
289 292 if k.startswith(valid_refs) and not k.endswith(deferred):
290 293 repo[k] = remote_refs[k]
291 294
292 295 if update_after_clone:
293 296 # we want to checkout HEAD
294 297 repo["HEAD"] = remote_refs["HEAD"]
295 298 index.build_index_from_tree(repo.path, repo.index_path(),
296 299 repo.object_store, repo["HEAD"].tree)
297 300
298 301 # TODO: this is quite complex, check if that can be simplified
299 302 @reraise_safe_exceptions
300 303 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
301 304 repo = self._factory.repo(wire)
302 305 object_store = repo.object_store
303 306
304 307 # Create tree and populates it with blobs
305 308 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
306 309
307 310 for node in updated:
308 311 # Compute subdirs if needed
309 312 dirpath, nodename = vcspath.split(node['path'])
310 313 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
311 314 parent = commit_tree
312 315 ancestors = [('', parent)]
313 316
314 317 # Tries to dig for the deepest existing tree
315 318 while dirnames:
316 319 curdir = dirnames.pop(0)
317 320 try:
318 321 dir_id = parent[curdir][1]
319 322 except KeyError:
320 323 # put curdir back into dirnames and stops
321 324 dirnames.insert(0, curdir)
322 325 break
323 326 else:
324 327 # If found, updates parent
325 328 parent = repo[dir_id]
326 329 ancestors.append((curdir, parent))
327 330 # Now parent is deepest existing tree and we need to create
328 331 # subtrees for dirnames (in reverse order)
329 332 # [this only applies for nodes from added]
330 333 new_trees = []
331 334
332 335 blob = objects.Blob.from_string(node['content'])
333 336
334 337 if dirnames:
335 338 # If there are trees which should be created we need to build
336 339 # them now (in reverse order)
337 340 reversed_dirnames = list(reversed(dirnames))
338 341 curtree = objects.Tree()
339 342 curtree[node['node_path']] = node['mode'], blob.id
340 343 new_trees.append(curtree)
341 344 for dirname in reversed_dirnames[:-1]:
342 345 newtree = objects.Tree()
343 346 newtree[dirname] = (DIR_STAT, curtree.id)
344 347 new_trees.append(newtree)
345 348 curtree = newtree
346 349 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
347 350 else:
348 351 parent.add(
349 352 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
350 353
351 354 new_trees.append(parent)
352 355 # Update ancestors
353 356 reversed_ancestors = reversed(
354 357 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
355 358 for parent, tree, path in reversed_ancestors:
356 359 parent[path] = (DIR_STAT, tree.id)
357 360 object_store.add_object(tree)
358 361
359 362 object_store.add_object(blob)
360 363 for tree in new_trees:
361 364 object_store.add_object(tree)
362 365
363 366 for node_path in removed:
364 367 paths = node_path.split('/')
365 368 tree = commit_tree
366 369 trees = [tree]
367 370 # Traverse deep into the forest...
368 371 for path in paths:
369 372 try:
370 373 obj = repo[tree[path][1]]
371 374 if isinstance(obj, objects.Tree):
372 375 trees.append(obj)
373 376 tree = obj
374 377 except KeyError:
375 378 break
376 379 # Cut down the blob and all rotten trees on the way back...
377 380 for path, tree in reversed(zip(paths, trees)):
378 381 del tree[path]
379 382 if tree:
380 383 # This tree still has elements - don't remove it or any
381 384 # of it's parents
382 385 break
383 386
384 387 object_store.add_object(commit_tree)
385 388
386 389 # Create commit
387 390 commit = objects.Commit()
388 391 commit.tree = commit_tree.id
389 392 for k, v in commit_data.iteritems():
390 393 setattr(commit, k, v)
391 394 object_store.add_object(commit)
392 395
393 396 ref = 'refs/heads/%s' % branch
394 397 repo.refs[ref] = commit.id
395 398
396 399 return commit.id
397 400
398 401 @reraise_safe_exceptions
399 def fetch(self, wire, url, apply_refs=True, refs=None):
402 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
400 403 if url != 'default' and '://' not in url:
401 404 client = LocalGitClient(url)
402 405 else:
403 406 url_obj = url_parser(url)
404 407 o = self._build_opener(url)
405 408 url, _ = url_obj.authinfo()
406 409 client = HttpGitClient(base_url=url, opener=o)
407 410 repo = self._factory.repo(wire)
408 411
409 412 determine_wants = repo.object_store.determine_wants_all
410 413 if refs:
411 414 def determine_wants_requested(references):
412 415 return [references[r] for r in references if r in refs]
413 416 determine_wants = determine_wants_requested
414 417
415 418 try:
416 419 remote_refs = client.fetch(
417 420 path=url, target=repo, determine_wants=determine_wants)
418 421 except NotGitRepository as e:
419 422 log.warning(
420 423 'Trying to fetch from "%s" failed, not a Git repository.', url)
421 424 # Exception can contain unicode which we convert
422 425 raise exceptions.AbortException(e)(repr(e))
423 426
424 427 # mikhail: client.fetch() returns all the remote refs, but fetches only
425 428 # refs filtered by `determine_wants` function. We need to filter result
426 429 # as well
427 430 if refs:
428 431 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
429 432
430 433 if apply_refs:
431 434 # TODO: johbo: Needs proper test coverage with a git repository
432 435 # that contains a tag object, so that we would end up with
433 436 # a peeled ref at this point.
434 PEELED_REF_MARKER = '^{}'
435 437 for k in remote_refs:
436 if k.endswith(PEELED_REF_MARKER):
437 log.info("Skipping peeled reference %s", k)
438 if k.endswith(self.peeled_ref_marker):
439 log.debug("Skipping peeled reference %s", k)
438 440 continue
439 441 repo[k] = remote_refs[k]
440 442
441 if refs:
443 if refs and not update_after:
442 444 # mikhail: explicitly set the head to the last ref.
443 445 repo['HEAD'] = remote_refs[refs[-1]]
444 446
445 # TODO: mikhail: should we return remote_refs here to be
446 # consistent?
447 else:
448 return remote_refs
447 if update_after:
448 # we want to checkout HEAD
449 repo["HEAD"] = remote_refs["HEAD"]
450 index.build_index_from_tree(repo.path, repo.index_path(),
451 repo.object_store, repo["HEAD"].tree)
452 return remote_refs
453
454 @reraise_safe_exceptions
455 def sync_fetch(self, wire, url, refs=None):
456 repo = self._factory.repo(wire)
457 if refs and not isinstance(refs, (list, tuple)):
458 refs = [refs]
459
460 # get all remote refs we'll use to fetch later
461 output, __ = self.run_git_command(
462 wire, ['ls-remote', url], fail_on_stderr=False,
463 _copts=['-c', 'core.askpass=""'],
464 extra_env={'GIT_TERMINAL_PROMPT': '0'})
465
466 remote_refs = collections.OrderedDict()
467 fetch_refs = []
468
469 for ref_line in output.splitlines():
470 sha, ref = ref_line.split('\t')
471 sha = sha.strip()
472 if ref in remote_refs:
473 # duplicate, skip
474 continue
475 if ref.endswith(self.peeled_ref_marker):
476 log.debug("Skipping peeled reference %s", ref)
477 continue
478 # don't sync HEAD
479 if ref in ['HEAD']:
480 continue
481
482 remote_refs[ref] = sha
483
484 if refs and sha in refs:
485 # we filter fetch using our specified refs
486 fetch_refs.append('{}:{}'.format(ref, ref))
487 elif not refs:
488 fetch_refs.append('{}:{}'.format(ref, ref))
489
490 if fetch_refs:
491 _out, _err = self.run_git_command(
492 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs,
493 fail_on_stderr=False,
494 _copts=['-c', 'core.askpass=""'],
495 extra_env={'GIT_TERMINAL_PROMPT': '0'})
496
497 return remote_refs
449 498
450 499 @reraise_safe_exceptions
451 500 def sync_push(self, wire, url, refs=None):
452 if self.check_url(url, wire):
453 repo = self._factory.repo(wire)
454 self.run_git_command(
455 wire, ['push', url, '--mirror'], fail_on_stderr=False,
456 _copts=['-c', 'core.askpass=""'],
457 extra_env={'GIT_TERMINAL_PROMPT': '0'})
501 if not self.check_url(url, wire):
502 return
503
504 repo = self._factory.repo(wire)
505 self.run_git_command(
506 wire, ['push', url, '--mirror'], fail_on_stderr=False,
507 _copts=['-c', 'core.askpass=""'],
508 extra_env={'GIT_TERMINAL_PROMPT': '0'})
458 509
459 510 @reraise_safe_exceptions
460 511 def get_remote_refs(self, wire, url):
461 512 repo = Repo(url)
462 513 return repo.get_refs()
463 514
464 515 @reraise_safe_exceptions
465 516 def get_description(self, wire):
466 517 repo = self._factory.repo(wire)
467 518 return repo.get_description()
468 519
469 520 @reraise_safe_exceptions
470 521 def get_file_history(self, wire, file_path, commit_id, limit):
471 522 repo = self._factory.repo(wire)
472 523 include = [commit_id]
473 524 paths = [file_path]
474 525
475 526 walker = repo.get_walker(include, paths=paths, max_entries=limit)
476 527 return [x.commit.id for x in walker]
477 528
478 529 @reraise_safe_exceptions
479 530 def get_missing_revs(self, wire, rev1, rev2, path2):
480 531 repo = self._factory.repo(wire)
481 532 LocalGitClient(thin_packs=False).fetch(path2, repo)
482 533
483 534 wire_remote = wire.copy()
484 535 wire_remote['path'] = path2
485 536 repo_remote = self._factory.repo(wire_remote)
486 537 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
487 538
488 539 revs = [
489 540 x.commit.id
490 541 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
491 542 return revs
492 543
493 544 @reraise_safe_exceptions
494 545 def get_object(self, wire, sha):
495 546 repo = self._factory.repo(wire)
496 547 obj = repo.get_object(sha)
497 548 commit_id = obj.id
498 549
499 550 if isinstance(obj, Tag):
500 551 commit_id = obj.object[1]
501 552
502 553 return {
503 554 'id': obj.id,
504 555 'type': obj.type_name,
505 556 'commit_id': commit_id
506 557 }
507 558
508 559 @reraise_safe_exceptions
509 560 def get_object_attrs(self, wire, sha, *attrs):
510 561 repo = self._factory.repo(wire)
511 562 obj = repo.get_object(sha)
512 563 return list(getattr(obj, a) for a in attrs)
513 564
514 565 @reraise_safe_exceptions
515 566 def get_refs(self, wire):
516 567 repo = self._factory.repo(wire)
517 568 result = {}
518 569 for ref, sha in repo.refs.as_dict().items():
519 570 peeled_sha = repo.get_peeled(ref)
520 571 result[ref] = peeled_sha
521 572 return result
522 573
523 574 @reraise_safe_exceptions
524 575 def get_refs_path(self, wire):
525 576 repo = self._factory.repo(wire)
526 577 return repo.refs.path
527 578
528 579 @reraise_safe_exceptions
529 580 def head(self, wire, show_exc=True):
530 581 repo = self._factory.repo(wire)
531 582 try:
532 583 return repo.head()
533 584 except Exception:
534 585 if show_exc:
535 586 raise
536 587
537 588 @reraise_safe_exceptions
538 589 def init(self, wire):
539 590 repo_path = str_to_dulwich(wire['path'])
540 591 self.repo = Repo.init(repo_path)
541 592
542 593 @reraise_safe_exceptions
543 594 def init_bare(self, wire):
544 595 repo_path = str_to_dulwich(wire['path'])
545 596 self.repo = Repo.init_bare(repo_path)
546 597
547 598 @reraise_safe_exceptions
548 599 def revision(self, wire, rev):
549 600 repo = self._factory.repo(wire)
550 601 obj = repo[rev]
551 602 obj_data = {
552 603 'id': obj.id,
553 604 }
554 605 try:
555 606 obj_data['tree'] = obj.tree
556 607 except AttributeError:
557 608 pass
558 609 return obj_data
559 610
560 611 @reraise_safe_exceptions
561 612 def commit_attribute(self, wire, rev, attr):
562 613 repo = self._factory.repo(wire)
563 614 obj = repo[rev]
564 615 return getattr(obj, attr)
565 616
566 617 @reraise_safe_exceptions
567 618 def set_refs(self, wire, key, value):
568 619 repo = self._factory.repo(wire)
569 620 repo.refs[key] = value
570 621
571 622 @reraise_safe_exceptions
572 623 def remove_ref(self, wire, key):
573 624 repo = self._factory.repo(wire)
574 625 del repo.refs[key]
575 626
576 627 @reraise_safe_exceptions
577 628 def tree_changes(self, wire, source_id, target_id):
578 629 repo = self._factory.repo(wire)
579 630 source = repo[source_id].tree if source_id else None
580 631 target = repo[target_id].tree
581 632 result = repo.object_store.tree_changes(source, target)
582 633 return list(result)
583 634
584 635 @reraise_safe_exceptions
585 636 def tree_items(self, wire, tree_id):
586 637 repo = self._factory.repo(wire)
587 638 tree = repo[tree_id]
588 639
589 640 result = []
590 641 for item in tree.iteritems():
591 642 item_sha = item.sha
592 643 item_mode = item.mode
593 644
594 645 if FILE_MODE(item_mode) == GIT_LINK:
595 646 item_type = "link"
596 647 else:
597 648 item_type = repo[item_sha].type_name
598 649
599 650 result.append((item.path, item_mode, item_sha, item_type))
600 651 return result
601 652
602 653 @reraise_safe_exceptions
603 654 def update_server_info(self, wire):
604 655 repo = self._factory.repo(wire)
605 656 update_server_info(repo)
606 657
607 658 @reraise_safe_exceptions
608 659 def discover_git_version(self):
609 660 stdout, _ = self.run_git_command(
610 661 {}, ['--version'], _bare=True, _safe=True)
611 662 prefix = 'git version'
612 663 if stdout.startswith(prefix):
613 664 stdout = stdout[len(prefix):]
614 665 return stdout.strip()
615 666
616 667 @reraise_safe_exceptions
617 668 def run_git_command(self, wire, cmd, **opts):
618 669 path = wire.get('path', None)
619 670
620 671 if path and os.path.isdir(path):
621 672 opts['cwd'] = path
622 673
623 674 if '_bare' in opts:
624 675 _copts = []
625 676 del opts['_bare']
626 677 else:
627 678 _copts = ['-c', 'core.quotepath=false', ]
628 679 safe_call = False
629 680 if '_safe' in opts:
630 681 # no exc on failure
631 682 del opts['_safe']
632 683 safe_call = True
633 684
634 685 if '_copts' in opts:
635 686 _copts.extend(opts['_copts'] or [])
636 687 del opts['_copts']
637 688
638 689 gitenv = os.environ.copy()
639 690 gitenv.update(opts.pop('extra_env', {}))
640 691 # need to clean fix GIT_DIR !
641 692 if 'GIT_DIR' in gitenv:
642 693 del gitenv['GIT_DIR']
643 694 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
644 695 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
645 696
646 697 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
698 _opts = {'env': gitenv, 'shell': False}
647 699
648 700 try:
649 _opts = {'env': gitenv, 'shell': False}
650 701 _opts.update(opts)
651 702 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
652 703
653 704 return ''.join(p), ''.join(p.error)
654 705 except (EnvironmentError, OSError) as err:
655 706 cmd = ' '.join(cmd) # human friendly CMD
656 707 tb_err = ("Couldn't run git command (%s).\n"
657 "Original error was:%s\n" % (cmd, err))
708 "Original error was:%s\n"
709 "Call options:%s\n"
710 % (cmd, err, _opts))
658 711 log.exception(tb_err)
659 712 if safe_call:
660 713 return '', err
661 714 else:
662 715 raise exceptions.VcsException()(tb_err)
663 716
664 717 @reraise_safe_exceptions
665 718 def install_hooks(self, wire, force=False):
666 719 from vcsserver.hook_utils import install_git_hooks
667 720 repo = self._factory.repo(wire)
668 721 return install_git_hooks(repo.path, repo.bare, force_create=force)
669 722
670 723
671 724 def str_to_dulwich(value):
672 725 """
673 726 Dulwich 0.10.1a requires `unicode` objects to be passed in.
674 727 """
675 728 return value.decode(settings.WIRE_ENCODING)
@@ -1,793 +1,795 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23
24 24 from hgext import largefiles, rebase
25 25 from hgext.strip import strip as hgext_strip
26 26 from mercurial import commands
27 27 from mercurial import unionrepo
28 28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 makepeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 37 RepoLookupError, InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 baseui.setconfig('ui', 'paginate', 'never')
57 57 # force mercurial to only use 1 thread, otherwise it may try to set a
58 58 # signal in a non-main thread, thus generating a ValueError.
59 59 baseui.setconfig('worker', 'numcpus', 1)
60 60
61 61 # If there is no config for the largefiles extension, we explicitly disable
62 62 # it here. This overrides settings from repositories hgrc file. Recent
63 63 # mercurial versions enable largefiles in hgrc on clone from largefile
64 64 # repo.
65 65 if not baseui.hasconfig('extensions', 'largefiles'):
66 66 log.debug('Explicitly disable largefiles extension for repo.')
67 67 baseui.setconfig('extensions', 'largefiles', '!')
68 68
69 69 return baseui
70 70
71 71
72 72 def reraise_safe_exceptions(func):
73 73 """Decorator for converting mercurial exceptions to something neutral."""
74 74 def wrapper(*args, **kwargs):
75 75 try:
76 76 return func(*args, **kwargs)
77 77 except (Abort, InterventionRequired) as e:
78 78 raise_from_original(exceptions.AbortException(e))
79 79 except RepoLookupError as e:
80 80 raise_from_original(exceptions.LookupException(e))
81 81 except RequirementError as e:
82 82 raise_from_original(exceptions.RequirementException(e))
83 83 except RepoError as e:
84 84 raise_from_original(exceptions.VcsException(e))
85 85 except LookupError as e:
86 86 raise_from_original(exceptions.LookupException(e))
87 87 except Exception as e:
88 88 if not hasattr(e, '_vcs_kind'):
89 89 log.exception("Unhandled exception in hg remote call")
90 90 raise_from_original(exceptions.UnhandledException(e))
91 91
92 92 raise
93 93 return wrapper
94 94
95 95
96 96 class MercurialFactory(RepoFactory):
97 97 repo_type = 'hg'
98 98
99 99 def _create_config(self, config, hooks=True):
100 100 if not hooks:
101 101 hooks_to_clean = frozenset((
102 102 'changegroup.repo_size', 'preoutgoing.pre_pull',
103 103 'outgoing.pull_logger', 'prechangegroup.pre_push'))
104 104 new_config = []
105 105 for section, option, value in config:
106 106 if section == 'hooks' and option in hooks_to_clean:
107 107 continue
108 108 new_config.append((section, option, value))
109 109 config = new_config
110 110
111 111 baseui = make_ui_from_config(config)
112 112 return baseui
113 113
114 114 def _create_repo(self, wire, create):
115 115 baseui = self._create_config(wire["config"])
116 116 return localrepository(baseui, wire["path"], create)
117 117
118 118
119 119 class HgRemote(object):
120 120
121 121 def __init__(self, factory):
122 122 self._factory = factory
123 123
124 124 self._bulk_methods = {
125 125 "affected_files": self.ctx_files,
126 126 "author": self.ctx_user,
127 127 "branch": self.ctx_branch,
128 128 "children": self.ctx_children,
129 129 "date": self.ctx_date,
130 130 "message": self.ctx_description,
131 131 "parents": self.ctx_parents,
132 132 "status": self.ctx_status,
133 133 "obsolete": self.ctx_obsolete,
134 134 "phase": self.ctx_phase,
135 135 "hidden": self.ctx_hidden,
136 136 "_file_paths": self.ctx_list,
137 137 }
138 138
139 139 @reraise_safe_exceptions
140 140 def discover_hg_version(self):
141 141 from mercurial import util
142 142 return util.version()
143 143
144 144 @reraise_safe_exceptions
145 145 def archive_repo(self, archive_path, mtime, file_info, kind):
146 146 if kind == "tgz":
147 147 archiver = archival.tarit(archive_path, mtime, "gz")
148 148 elif kind == "tbz2":
149 149 archiver = archival.tarit(archive_path, mtime, "bz2")
150 150 elif kind == 'zip':
151 151 archiver = archival.zipit(archive_path, mtime)
152 152 else:
153 153 raise exceptions.ArchiveException()(
154 154 'Remote does not support: "%s".' % kind)
155 155
156 156 for f_path, f_mode, f_is_link, f_content in file_info:
157 157 archiver.addfile(f_path, f_mode, f_is_link, f_content)
158 158 archiver.done()
159 159
160 160 @reraise_safe_exceptions
161 161 def bookmarks(self, wire):
162 162 repo = self._factory.repo(wire)
163 163 return dict(repo._bookmarks)
164 164
165 165 @reraise_safe_exceptions
166 166 def branches(self, wire, normal, closed):
167 167 repo = self._factory.repo(wire)
168 168 iter_branches = repo.branchmap().iterbranches()
169 169 bt = {}
170 170 for branch_name, _heads, tip, is_closed in iter_branches:
171 171 if normal and not is_closed:
172 172 bt[branch_name] = tip
173 173 if closed and is_closed:
174 174 bt[branch_name] = tip
175 175
176 176 return bt
177 177
178 178 @reraise_safe_exceptions
179 179 def bulk_request(self, wire, rev, pre_load):
180 180 result = {}
181 181 for attr in pre_load:
182 182 try:
183 183 method = self._bulk_methods[attr]
184 184 result[attr] = method(wire, rev)
185 185 except KeyError as e:
186 186 raise exceptions.VcsException(e)(
187 187 'Unknown bulk attribute: "%s"' % attr)
188 188 return result
189 189
190 190 @reraise_safe_exceptions
191 191 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
192 192 baseui = self._factory._create_config(wire["config"], hooks=hooks)
193 193 clone(baseui, source, dest, noupdate=not update_after_clone)
194 194
195 195 @reraise_safe_exceptions
196 196 def commitctx(
197 197 self, wire, message, parents, commit_time, commit_timezone,
198 198 user, files, extra, removed, updated):
199 199
200 200 def _filectxfn(_repo, memctx, path):
201 201 """
202 202 Marks given path as added/changed/removed in a given _repo. This is
203 203 for internal mercurial commit function.
204 204 """
205 205
206 206 # check if this path is removed
207 207 if path in removed:
208 208 # returning None is a way to mark node for removal
209 209 return None
210 210
211 211 # check if this path is added
212 212 for node in updated:
213 213 if node['path'] == path:
214 214 return memfilectx(
215 215 _repo,
216 216 changectx=memctx,
217 217 path=node['path'],
218 218 data=node['content'],
219 219 islink=False,
220 220 isexec=bool(node['mode'] & stat.S_IXUSR),
221 221 copied=False)
222 222
223 223 raise exceptions.AbortException()(
224 224 "Given path haven't been marked as added, "
225 225 "changed or removed (%s)" % path)
226 226
227 227 repo = self._factory.repo(wire)
228 228
229 229 commit_ctx = memctx(
230 230 repo=repo,
231 231 parents=parents,
232 232 text=message,
233 233 files=files,
234 234 filectxfn=_filectxfn,
235 235 user=user,
236 236 date=(commit_time, commit_timezone),
237 237 extra=extra)
238 238
239 239 n = repo.commitctx(commit_ctx)
240 240 new_id = hex(n)
241 241
242 242 return new_id
243 243
244 244 @reraise_safe_exceptions
245 245 def ctx_branch(self, wire, revision):
246 246 repo = self._factory.repo(wire)
247 247 ctx = repo[revision]
248 248 return ctx.branch()
249 249
250 250 @reraise_safe_exceptions
251 251 def ctx_children(self, wire, revision):
252 252 repo = self._factory.repo(wire)
253 253 ctx = repo[revision]
254 254 return [child.rev() for child in ctx.children()]
255 255
256 256 @reraise_safe_exceptions
257 257 def ctx_date(self, wire, revision):
258 258 repo = self._factory.repo(wire)
259 259 ctx = repo[revision]
260 260 return ctx.date()
261 261
262 262 @reraise_safe_exceptions
263 263 def ctx_description(self, wire, revision):
264 264 repo = self._factory.repo(wire)
265 265 ctx = repo[revision]
266 266 return ctx.description()
267 267
268 268 @reraise_safe_exceptions
269 269 def ctx_diff(
270 270 self, wire, revision, git=True, ignore_whitespace=True, context=3):
271 271 repo = self._factory.repo(wire)
272 272 ctx = repo[revision]
273 273 result = ctx.diff(
274 274 git=git, ignore_whitespace=ignore_whitespace, context=context)
275 275 return list(result)
276 276
277 277 @reraise_safe_exceptions
278 278 def ctx_files(self, wire, revision):
279 279 repo = self._factory.repo(wire)
280 280 ctx = repo[revision]
281 281 return ctx.files()
282 282
283 283 @reraise_safe_exceptions
284 284 def ctx_list(self, path, revision):
285 285 repo = self._factory.repo(path)
286 286 ctx = repo[revision]
287 287 return list(ctx)
288 288
289 289 @reraise_safe_exceptions
290 290 def ctx_parents(self, wire, revision):
291 291 repo = self._factory.repo(wire)
292 292 ctx = repo[revision]
293 293 return [parent.rev() for parent in ctx.parents()]
294 294
295 295 @reraise_safe_exceptions
296 296 def ctx_phase(self, wire, revision):
297 297 repo = self._factory.repo(wire)
298 298 ctx = repo[revision]
299 299 # public=0, draft=1, secret=3
300 300 return ctx.phase()
301 301
302 302 @reraise_safe_exceptions
303 303 def ctx_obsolete(self, wire, revision):
304 304 repo = self._factory.repo(wire)
305 305 ctx = repo[revision]
306 306 return ctx.obsolete()
307 307
308 308 @reraise_safe_exceptions
309 309 def ctx_hidden(self, wire, revision):
310 310 repo = self._factory.repo(wire)
311 311 ctx = repo[revision]
312 312 return ctx.hidden()
313 313
314 314 @reraise_safe_exceptions
315 315 def ctx_substate(self, wire, revision):
316 316 repo = self._factory.repo(wire)
317 317 ctx = repo[revision]
318 318 return ctx.substate
319 319
320 320 @reraise_safe_exceptions
321 321 def ctx_status(self, wire, revision):
322 322 repo = self._factory.repo(wire)
323 323 ctx = repo[revision]
324 324 status = repo[ctx.p1().node()].status(other=ctx.node())
325 325 # object of status (odd, custom named tuple in mercurial) is not
326 326 # correctly serializable, we make it a list, as the underling
327 327 # API expects this to be a list
328 328 return list(status)
329 329
330 330 @reraise_safe_exceptions
331 331 def ctx_user(self, wire, revision):
332 332 repo = self._factory.repo(wire)
333 333 ctx = repo[revision]
334 334 return ctx.user()
335 335
336 336 @reraise_safe_exceptions
337 337 def check_url(self, url, config):
338 338 _proto = None
339 339 if '+' in url[:url.find('://')]:
340 340 _proto = url[0:url.find('+')]
341 341 url = url[url.find('+') + 1:]
342 342 handlers = []
343 343 url_obj = url_parser(url)
344 344 test_uri, authinfo = url_obj.authinfo()
345 345 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
346 346 url_obj.query = obfuscate_qs(url_obj.query)
347 347
348 348 cleaned_uri = str(url_obj)
349 349 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
350 350
351 351 if authinfo:
352 352 # create a password manager
353 353 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
354 354 passmgr.add_password(*authinfo)
355 355
356 356 handlers.extend((httpbasicauthhandler(passmgr),
357 357 httpdigestauthhandler(passmgr)))
358 358
359 359 o = urllib2.build_opener(*handlers)
360 360 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
361 361 ('Accept', 'application/mercurial-0.1')]
362 362
363 363 q = {"cmd": 'between'}
364 364 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
365 365 qs = '?%s' % urllib.urlencode(q)
366 366 cu = "%s%s" % (test_uri, qs)
367 367 req = urllib2.Request(cu, None, {})
368 368
369 369 try:
370 370 log.debug("Trying to open URL %s", cleaned_uri)
371 371 resp = o.open(req)
372 372 if resp.code != 200:
373 373 raise exceptions.URLError()('Return Code is not 200')
374 374 except Exception as e:
375 375 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
376 376 # means it cannot be cloned
377 377 raise exceptions.URLError(e)("[%s] org_exc: %s" % (cleaned_uri, e))
378 378
379 379 # now check if it's a proper hg repo, but don't do it for svn
380 380 try:
381 381 if _proto == 'svn':
382 382 pass
383 383 else:
384 384 # check for pure hg repos
385 385 log.debug(
386 386 "Verifying if URL is a Mercurial repository: %s",
387 387 cleaned_uri)
388 388 ui = make_ui_from_config(config)
389 389 peer_checker = makepeer(ui, url)
390 390 peer_checker.lookup('tip')
391 391 except Exception as e:
392 392 log.warning("URL is not a valid Mercurial repository: %s",
393 393 cleaned_uri)
394 394 raise exceptions.URLError(e)(
395 395 "url [%s] does not look like an hg repo org_exc: %s"
396 396 % (cleaned_uri, e))
397 397
398 398 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
399 399 return True
400 400
401 401 @reraise_safe_exceptions
402 402 def diff(
403 403 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
404 404 context):
405 405 repo = self._factory.repo(wire)
406 406
407 407 if file_filter:
408 408 match_filter = match(file_filter[0], '', [file_filter[1]])
409 409 else:
410 410 match_filter = file_filter
411 411 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
412 412
413 413 try:
414 414 return "".join(patch.diff(
415 415 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
416 416 except RepoLookupError as e:
417 417 raise exceptions.LookupException(e)()
418 418
419 419 @reraise_safe_exceptions
420 420 def file_history(self, wire, revision, path, limit):
421 421 repo = self._factory.repo(wire)
422 422
423 423 ctx = repo[revision]
424 424 fctx = ctx.filectx(path)
425 425
426 426 def history_iter():
427 427 limit_rev = fctx.rev()
428 428 for obj in reversed(list(fctx.filelog())):
429 429 obj = fctx.filectx(obj)
430 430 if limit_rev >= obj.rev():
431 431 yield obj
432 432
433 433 history = []
434 434 for cnt, obj in enumerate(history_iter()):
435 435 if limit and cnt >= limit:
436 436 break
437 437 history.append(hex(obj.node()))
438 438
439 439 return [x for x in history]
440 440
441 441 @reraise_safe_exceptions
442 442 def file_history_untill(self, wire, revision, path, limit):
443 443 repo = self._factory.repo(wire)
444 444 ctx = repo[revision]
445 445 fctx = ctx.filectx(path)
446 446
447 447 file_log = list(fctx.filelog())
448 448 if limit:
449 449 # Limit to the last n items
450 450 file_log = file_log[-limit:]
451 451
452 452 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
453 453
454 454 @reraise_safe_exceptions
455 455 def fctx_annotate(self, wire, revision, path):
456 456 repo = self._factory.repo(wire)
457 457 ctx = repo[revision]
458 458 fctx = ctx.filectx(path)
459 459
460 460 result = []
461 461 for i, annotate_obj in enumerate(fctx.annotate(), 1):
462 462 ln_no = i
463 463 sha = hex(annotate_obj.fctx.node())
464 464 content = annotate_obj.text
465 465 result.append((ln_no, sha, content))
466 466 return result
467 467
468 468 @reraise_safe_exceptions
469 469 def fctx_data(self, wire, revision, path):
470 470 repo = self._factory.repo(wire)
471 471 ctx = repo[revision]
472 472 fctx = ctx.filectx(path)
473 473 return fctx.data()
474 474
475 475 @reraise_safe_exceptions
476 476 def fctx_flags(self, wire, revision, path):
477 477 repo = self._factory.repo(wire)
478 478 ctx = repo[revision]
479 479 fctx = ctx.filectx(path)
480 480 return fctx.flags()
481 481
482 482 @reraise_safe_exceptions
483 483 def fctx_size(self, wire, revision, path):
484 484 repo = self._factory.repo(wire)
485 485 ctx = repo[revision]
486 486 fctx = ctx.filectx(path)
487 487 return fctx.size()
488 488
489 489 @reraise_safe_exceptions
490 490 def get_all_commit_ids(self, wire, name):
491 491 repo = self._factory.repo(wire)
492 492 revs = repo.filtered(name).changelog.index
493 493 return map(lambda x: hex(x[7]), revs)[:-1]
494 494
495 495 @reraise_safe_exceptions
496 496 def get_config_value(self, wire, section, name, untrusted=False):
497 497 repo = self._factory.repo(wire)
498 498 return repo.ui.config(section, name, untrusted=untrusted)
499 499
500 500 @reraise_safe_exceptions
501 501 def get_config_bool(self, wire, section, name, untrusted=False):
502 502 repo = self._factory.repo(wire)
503 503 return repo.ui.configbool(section, name, untrusted=untrusted)
504 504
505 505 @reraise_safe_exceptions
506 506 def get_config_list(self, wire, section, name, untrusted=False):
507 507 repo = self._factory.repo(wire)
508 508 return repo.ui.configlist(section, name, untrusted=untrusted)
509 509
510 510 @reraise_safe_exceptions
511 511 def is_large_file(self, wire, path):
512 512 return largefiles.lfutil.isstandin(path)
513 513
514 514 @reraise_safe_exceptions
515 515 def in_largefiles_store(self, wire, sha):
516 516 repo = self._factory.repo(wire)
517 517 return largefiles.lfutil.instore(repo, sha)
518 518
519 519 @reraise_safe_exceptions
520 520 def in_user_cache(self, wire, sha):
521 521 repo = self._factory.repo(wire)
522 522 return largefiles.lfutil.inusercache(repo.ui, sha)
523 523
524 524 @reraise_safe_exceptions
525 525 def store_path(self, wire, sha):
526 526 repo = self._factory.repo(wire)
527 527 return largefiles.lfutil.storepath(repo, sha)
528 528
529 529 @reraise_safe_exceptions
530 530 def link(self, wire, sha, path):
531 531 repo = self._factory.repo(wire)
532 532 largefiles.lfutil.link(
533 533 largefiles.lfutil.usercachepath(repo.ui, sha), path)
534 534
535 535 @reraise_safe_exceptions
536 536 def localrepository(self, wire, create=False):
537 537 self._factory.repo(wire, create=create)
538 538
539 539 @reraise_safe_exceptions
540 540 def lookup(self, wire, revision, both):
541 541
542 542 repo = self._factory.repo(wire)
543 543
544 544 if isinstance(revision, int):
545 545 # NOTE(marcink):
546 546 # since Mercurial doesn't support indexes properly
547 547 # we need to shift accordingly by one to get proper index, e.g
548 548 # repo[-1] => repo[-2]
549 549 # repo[0] => repo[-1]
550 550 # repo[1] => repo[2] we also never call repo[0] because
551 551 # it's actually second commit
552 552 if revision <= 0:
553 553 revision = revision + -1
554 554 else:
555 555 revision = revision + 1
556 556
557 557 try:
558 558 ctx = repo[revision]
559 559 except RepoLookupError as e:
560 560 raise exceptions.LookupException(e)(revision)
561 561 except LookupError as e:
562 562 raise exceptions.LookupException(e)(e.name)
563 563
564 564 if not both:
565 565 return ctx.hex()
566 566
567 567 ctx = repo[ctx.hex()]
568 568 return ctx.hex(), ctx.rev()
569 569
570 570 @reraise_safe_exceptions
571 571 def pull(self, wire, url, commit_ids=None):
572 572 repo = self._factory.repo(wire)
573 573 # Disable any prompts for this repo
574 574 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
575 575
576 576 remote = peer(repo, {}, url)
577 577 # Disable any prompts for this remote
578 578 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
579 579
580 580 if commit_ids:
581 581 commit_ids = [bin(commit_id) for commit_id in commit_ids]
582 582
583 583 return exchange.pull(
584 584 repo, remote, heads=commit_ids, force=None).cgresult
585 585
586 586 @reraise_safe_exceptions
587 587 def sync_push(self, wire, url):
588 if self.check_url(url, wire['config']):
589 repo = self._factory.repo(wire)
588 if not self.check_url(url, wire['config']):
589 return
590 590
591 # Disable any prompts for this repo
592 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
591 repo = self._factory.repo(wire)
592
593 # Disable any prompts for this repo
594 repo.ui.setconfig('ui', 'interactive', 'off', '-y')
593 595
594 bookmarks = dict(repo._bookmarks).keys()
595 remote = peer(repo, {}, url)
596 # Disable any prompts for this remote
597 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
596 bookmarks = dict(repo._bookmarks).keys()
597 remote = peer(repo, {}, url)
598 # Disable any prompts for this remote
599 remote.ui.setconfig('ui', 'interactive', 'off', '-y')
598 600
599 return exchange.push(
600 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
601 return exchange.push(
602 repo, remote, newbranch=True, bookmarks=bookmarks).cgresult
601 603
602 604 @reraise_safe_exceptions
603 605 def revision(self, wire, rev):
604 606 repo = self._factory.repo(wire)
605 607 ctx = repo[rev]
606 608 return ctx.rev()
607 609
608 610 @reraise_safe_exceptions
609 611 def rev_range(self, wire, filter):
610 612 repo = self._factory.repo(wire)
611 613 revisions = [rev for rev in revrange(repo, filter)]
612 614 return revisions
613 615
614 616 @reraise_safe_exceptions
615 617 def rev_range_hash(self, wire, node):
616 618 repo = self._factory.repo(wire)
617 619
618 620 def get_revs(repo, rev_opt):
619 621 if rev_opt:
620 622 revs = revrange(repo, rev_opt)
621 623 if len(revs) == 0:
622 624 return (nullrev, nullrev)
623 625 return max(revs), min(revs)
624 626 else:
625 627 return len(repo) - 1, 0
626 628
627 629 stop, start = get_revs(repo, [node + ':'])
628 630 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
629 631 return revs
630 632
631 633 @reraise_safe_exceptions
632 634 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
633 635 other_path = kwargs.pop('other_path', None)
634 636
635 637 # case when we want to compare two independent repositories
636 638 if other_path and other_path != wire["path"]:
637 639 baseui = self._factory._create_config(wire["config"])
638 640 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
639 641 else:
640 642 repo = self._factory.repo(wire)
641 643 return list(repo.revs(rev_spec, *args))
642 644
643 645 @reraise_safe_exceptions
644 646 def strip(self, wire, revision, update, backup):
645 647 repo = self._factory.repo(wire)
646 648 ctx = repo[revision]
647 649 hgext_strip(
648 650 repo.baseui, repo, ctx.node(), update=update, backup=backup)
649 651
650 652 @reraise_safe_exceptions
651 653 def verify(self, wire,):
652 654 repo = self._factory.repo(wire)
653 655 baseui = self._factory._create_config(wire['config'])
654 656 baseui.setconfig('ui', 'quiet', 'false')
655 657 output = io.BytesIO()
656 658
657 659 def write(data, **unused_kwargs):
658 660 output.write(data)
659 661 baseui.write = write
660 662
661 663 repo.ui = baseui
662 664 verify.verify(repo)
663 665 return output.getvalue()
664 666
665 667 @reraise_safe_exceptions
666 668 def tag(self, wire, name, revision, message, local, user,
667 669 tag_time, tag_timezone):
668 670 repo = self._factory.repo(wire)
669 671 ctx = repo[revision]
670 672 node = ctx.node()
671 673
672 674 date = (tag_time, tag_timezone)
673 675 try:
674 676 hg_tag.tag(repo, name, node, message, local, user, date)
675 677 except Abort as e:
676 678 log.exception("Tag operation aborted")
677 679 # Exception can contain unicode which we convert
678 680 raise exceptions.AbortException(e)(repr(e))
679 681
680 682 @reraise_safe_exceptions
681 683 def tags(self, wire):
682 684 repo = self._factory.repo(wire)
683 685 return repo.tags()
684 686
685 687 @reraise_safe_exceptions
686 688 def update(self, wire, node=None, clean=False):
687 689 repo = self._factory.repo(wire)
688 690 baseui = self._factory._create_config(wire['config'])
689 691 commands.update(baseui, repo, node=node, clean=clean)
690 692
691 693 @reraise_safe_exceptions
692 694 def identify(self, wire):
693 695 repo = self._factory.repo(wire)
694 696 baseui = self._factory._create_config(wire['config'])
695 697 output = io.BytesIO()
696 698 baseui.write = output.write
697 699 # This is required to get a full node id
698 700 baseui.debugflag = True
699 701 commands.identify(baseui, repo, id=True)
700 702
701 703 return output.getvalue()
702 704
703 705 @reraise_safe_exceptions
704 706 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
705 707 hooks=True):
706 708 repo = self._factory.repo(wire)
707 709 baseui = self._factory._create_config(wire['config'], hooks=hooks)
708 710
709 711 # Mercurial internally has a lot of logic that checks ONLY if
710 712 # option is defined, we just pass those if they are defined then
711 713 opts = {}
712 714 if bookmark:
713 715 opts['bookmark'] = bookmark
714 716 if branch:
715 717 opts['branch'] = branch
716 718 if revision:
717 719 opts['rev'] = revision
718 720
719 721 commands.pull(baseui, repo, source, **opts)
720 722
721 723 @reraise_safe_exceptions
722 724 def heads(self, wire, branch=None):
723 725 repo = self._factory.repo(wire)
724 726 baseui = self._factory._create_config(wire['config'])
725 727 output = io.BytesIO()
726 728
727 729 def write(data, **unused_kwargs):
728 730 output.write(data)
729 731
730 732 baseui.write = write
731 733 if branch:
732 734 args = [branch]
733 735 else:
734 736 args = []
735 737 commands.heads(baseui, repo, template='{node} ', *args)
736 738
737 739 return output.getvalue()
738 740
739 741 @reraise_safe_exceptions
740 742 def ancestor(self, wire, revision1, revision2):
741 743 repo = self._factory.repo(wire)
742 744 changelog = repo.changelog
743 745 lookup = repo.lookup
744 746 a = changelog.ancestor(lookup(revision1), lookup(revision2))
745 747 return hex(a)
746 748
747 749 @reraise_safe_exceptions
748 750 def push(self, wire, revisions, dest_path, hooks=True,
749 751 push_branches=False):
750 752 repo = self._factory.repo(wire)
751 753 baseui = self._factory._create_config(wire['config'], hooks=hooks)
752 754 commands.push(baseui, repo, dest=dest_path, rev=revisions,
753 755 new_branch=push_branches)
754 756
755 757 @reraise_safe_exceptions
756 758 def merge(self, wire, revision):
757 759 repo = self._factory.repo(wire)
758 760 baseui = self._factory._create_config(wire['config'])
759 761 repo.ui.setconfig('ui', 'merge', 'internal:dump')
760 762
761 763 # In case of sub repositories are used mercurial prompts the user in
762 764 # case of merge conflicts or different sub repository sources. By
763 765 # setting the interactive flag to `False` mercurial doesn't prompt the
764 766 # used but instead uses a default value.
765 767 repo.ui.setconfig('ui', 'interactive', False)
766 768
767 769 commands.merge(baseui, repo, rev=revision)
768 770
769 771 @reraise_safe_exceptions
770 772 def commit(self, wire, message, username, close_branch=False):
771 773 repo = self._factory.repo(wire)
772 774 baseui = self._factory._create_config(wire['config'])
773 775 repo.ui.setconfig('ui', 'username', username)
774 776 commands.commit(baseui, repo, message=message, close_branch=close_branch)
775 777
776 778 @reraise_safe_exceptions
777 779 def rebase(self, wire, source=None, dest=None, abort=False):
778 780 repo = self._factory.repo(wire)
779 781 baseui = self._factory._create_config(wire['config'])
780 782 repo.ui.setconfig('ui', 'merge', 'internal:dump')
781 783 rebase.rebase(
782 784 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
783 785
784 786 @reraise_safe_exceptions
785 787 def bookmark(self, wire, bookmark, revision=None):
786 788 repo = self._factory.repo(wire)
787 789 baseui = self._factory._create_config(wire['config'])
788 790 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
789 791
790 792 @reraise_safe_exceptions
791 793 def install_hooks(self, wire, force=False):
792 794 # we don't need any special hooks for Mercurial
793 795 pass
@@ -1,658 +1,700 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 21 import os
22 22 import sys
23 23 import logging
24 24 import collections
25 25 import importlib
26 26 import base64
27 27
28 28 from httplib import HTTPConnection
29 29
30 30
31 31 import mercurial.scmutil
32 32 import mercurial.node
33 33 import simplejson as json
34 34
35 35 from vcsserver import exceptions, subprocessio, settings
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39
40 40 class HooksHttpClient(object):
41 41 connection = None
42 42
43 43 def __init__(self, hooks_uri):
44 44 self.hooks_uri = hooks_uri
45 45
46 46 def __call__(self, method, extras):
47 47 connection = HTTPConnection(self.hooks_uri)
48 48 body = self._serialize(method, extras)
49 49 try:
50 50 connection.request('POST', '/', body)
51 51 except Exception:
52 52 log.error('Connection failed on %s', connection)
53 53 raise
54 54 response = connection.getresponse()
55 55 return json.loads(response.read())
56 56
57 57 def _serialize(self, hook_name, extras):
58 58 data = {
59 59 'method': hook_name,
60 60 'extras': extras
61 61 }
62 62 return json.dumps(data)
63 63
64 64
65 65 class HooksDummyClient(object):
66 66 def __init__(self, hooks_module):
67 67 self._hooks_module = importlib.import_module(hooks_module)
68 68
69 69 def __call__(self, hook_name, extras):
70 70 with self._hooks_module.Hooks() as hooks:
71 71 return getattr(hooks, hook_name)(extras)
72 72
73 73
74 74 class RemoteMessageWriter(object):
75 75 """Writer base class."""
76 76 def write(self, message):
77 77 raise NotImplementedError()
78 78
79 79
80 80 class HgMessageWriter(RemoteMessageWriter):
81 81 """Writer that knows how to send messages to mercurial clients."""
82 82
83 83 def __init__(self, ui):
84 84 self.ui = ui
85 85
86 86 def write(self, message):
87 87 # TODO: Check why the quiet flag is set by default.
88 88 old = self.ui.quiet
89 89 self.ui.quiet = False
90 90 self.ui.status(message.encode('utf-8'))
91 91 self.ui.quiet = old
92 92
93 93
94 94 class GitMessageWriter(RemoteMessageWriter):
95 95 """Writer that knows how to send messages to git clients."""
96 96
97 97 def __init__(self, stdout=None):
98 98 self.stdout = stdout or sys.stdout
99 99
100 100 def write(self, message):
101 101 self.stdout.write(message.encode('utf-8'))
102 102
103 103
104 104 class SvnMessageWriter(RemoteMessageWriter):
105 105 """Writer that knows how to send messages to svn clients."""
106 106
107 107 def __init__(self, stderr=None):
108 108 # SVN needs data sent to stderr for back-to-client messaging
109 109 self.stderr = stderr or sys.stderr
110 110
111 111 def write(self, message):
112 112 self.stderr.write(message.encode('utf-8'))
113 113
114 114
115 115 def _handle_exception(result):
116 116 exception_class = result.get('exception')
117 117 exception_traceback = result.get('exception_traceback')
118 118
119 119 if exception_traceback:
120 120 log.error('Got traceback from remote call:%s', exception_traceback)
121 121
122 122 if exception_class == 'HTTPLockedRC':
123 123 raise exceptions.RepositoryLockedException()(*result['exception_args'])
124 124 elif exception_class == 'HTTPBranchProtected':
125 125 raise exceptions.RepositoryBranchProtectedException()(*result['exception_args'])
126 126 elif exception_class == 'RepositoryError':
127 127 raise exceptions.VcsException()(*result['exception_args'])
128 128 elif exception_class:
129 129 raise Exception('Got remote exception "%s" with args "%s"' %
130 130 (exception_class, result['exception_args']))
131 131
132 132
133 133 def _get_hooks_client(extras):
134 134 if 'hooks_uri' in extras:
135 135 protocol = extras.get('hooks_protocol')
136 136 return HooksHttpClient(extras['hooks_uri'])
137 137 else:
138 138 return HooksDummyClient(extras['hooks_module'])
139 139
140 140
141 141 def _call_hook(hook_name, extras, writer):
142 142 hooks_client = _get_hooks_client(extras)
143 143 log.debug('Hooks, using client:%s', hooks_client)
144 144 result = hooks_client(hook_name, extras)
145 145 log.debug('Hooks got result: %s', result)
146 146
147 147 _handle_exception(result)
148 148 writer.write(result['output'])
149 149
150 150 return result['status']
151 151
152 152
153 153 def _extras_from_ui(ui):
154 154 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
155 155 if not hook_data:
156 156 # maybe it's inside environ ?
157 157 env_hook_data = os.environ.get('RC_SCM_DATA')
158 158 if env_hook_data:
159 159 hook_data = env_hook_data
160 160
161 161 extras = {}
162 162 if hook_data:
163 163 extras = json.loads(hook_data)
164 164 return extras
165 165
166 166
167 167 def _rev_range_hash(repo, node, check_heads=False):
168 168
169 169 commits = []
170 170 revs = []
171 171 start = repo[node].rev()
172 172 end = len(repo)
173 173 for rev in range(start, end):
174 174 revs.append(rev)
175 175 ctx = repo[rev]
176 176 commit_id = mercurial.node.hex(ctx.node())
177 177 branch = ctx.branch()
178 178 commits.append((commit_id, branch))
179 179
180 180 parent_heads = []
181 181 if check_heads:
182 182 parent_heads = _check_heads(repo, start, end, revs)
183 183 return commits, parent_heads
184 184
185 185
186 186 def _check_heads(repo, start, end, commits):
187 187 changelog = repo.changelog
188 188 parents = set()
189 189
190 190 for new_rev in commits:
191 191 for p in changelog.parentrevs(new_rev):
192 192 if p == mercurial.node.nullrev:
193 193 continue
194 194 if p < start:
195 195 parents.add(p)
196 196
197 197 for p in parents:
198 198 branch = repo[p].branch()
199 199 # The heads descending from that parent, on the same branch
200 200 parent_heads = set([p])
201 201 reachable = set([p])
202 202 for x in xrange(p + 1, end):
203 203 if repo[x].branch() != branch:
204 204 continue
205 205 for pp in changelog.parentrevs(x):
206 206 if pp in reachable:
207 207 reachable.add(x)
208 208 parent_heads.discard(pp)
209 209 parent_heads.add(x)
210 210 # More than one head? Suggest merging
211 211 if len(parent_heads) > 1:
212 212 return list(parent_heads)
213 213
214 214 return []
215 215
216 216
217 def _get_git_env():
218 env = {}
219 for k, v in os.environ.items():
220 if k.startswith('GIT'):
221 env[k] = v
222
223 # serialized version
224 return [(k, v) for k, v in env.items()]
225
226
227 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
228 env = {}
229 for k, v in os.environ.items():
230 if k.startswith('HG'):
231 env[k] = v
232
233 env['HG_NODE'] = old_rev
234 env['HG_NODE_LAST'] = new_rev
235 env['HG_TXNID'] = txnid
236 env['HG_PENDING'] = repo_path
237
238 return [(k, v) for k, v in env.items()]
239
240
217 241 def repo_size(ui, repo, **kwargs):
218 242 extras = _extras_from_ui(ui)
219 243 return _call_hook('repo_size', extras, HgMessageWriter(ui))
220 244
221 245
222 246 def pre_pull(ui, repo, **kwargs):
223 247 extras = _extras_from_ui(ui)
224 248 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
225 249
226 250
227 251 def pre_pull_ssh(ui, repo, **kwargs):
228 252 extras = _extras_from_ui(ui)
229 253 if extras and extras.get('SSH'):
230 254 return pre_pull(ui, repo, **kwargs)
231 255 return 0
232 256
233 257
234 258 def post_pull(ui, repo, **kwargs):
235 259 extras = _extras_from_ui(ui)
236 260 return _call_hook('post_pull', extras, HgMessageWriter(ui))
237 261
238 262
239 263 def post_pull_ssh(ui, repo, **kwargs):
240 264 extras = _extras_from_ui(ui)
241 265 if extras and extras.get('SSH'):
242 266 return post_pull(ui, repo, **kwargs)
243 267 return 0
244 268
245 269
246 270 def pre_push(ui, repo, node=None, **kwargs):
247 271 """
248 272 Mercurial pre_push hook
249 273 """
250 274 extras = _extras_from_ui(ui)
251 275 detect_force_push = extras.get('detect_force_push')
252 276
253 277 rev_data = []
254 278 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
255 279 branches = collections.defaultdict(list)
256 280 commits, _heads = _rev_range_hash(repo, node, check_heads=detect_force_push)
257 281 for commit_id, branch in commits:
258 282 branches[branch].append(commit_id)
259 283
260 284 for branch, commits in branches.items():
261 285 old_rev = kwargs.get('node_last') or commits[0]
262 286 rev_data.append({
287 'total_commits': len(commits),
263 288 'old_rev': old_rev,
264 289 'new_rev': commits[-1],
265 290 'ref': '',
266 291 'type': 'branch',
267 292 'name': branch,
268 293 })
269 294
270 295 for push_ref in rev_data:
271 296 push_ref['multiple_heads'] = _heads
272 297
298 repo_path = os.path.join(
299 extras.get('repo_store', ''), extras.get('repository', ''))
300 push_ref['hg_env'] = _get_hg_env(
301 old_rev=push_ref['old_rev'],
302 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
303 repo_path=repo_path)
304
305 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
273 306 extras['commit_ids'] = rev_data
307
274 308 return _call_hook('pre_push', extras, HgMessageWriter(ui))
275 309
276 310
277 311 def pre_push_ssh(ui, repo, node=None, **kwargs):
278 312 extras = _extras_from_ui(ui)
279 313 if extras.get('SSH'):
280 314 return pre_push(ui, repo, node, **kwargs)
281 315
282 316 return 0
283 317
284 318
285 319 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
286 320 """
287 321 Mercurial pre_push hook for SSH
288 322 """
289 323 extras = _extras_from_ui(ui)
290 324 if extras.get('SSH'):
291 325 permission = extras['SSH_PERMISSIONS']
292 326
293 327 if 'repository.write' == permission or 'repository.admin' == permission:
294 328 return 0
295 329
296 330 # non-zero ret code
297 331 return 1
298 332
299 333 return 0
300 334
301 335
302 336 def post_push(ui, repo, node, **kwargs):
303 337 """
304 338 Mercurial post_push hook
305 339 """
306 340 extras = _extras_from_ui(ui)
307 341
308 342 commit_ids = []
309 343 branches = []
310 344 bookmarks = []
311 345 tags = []
312 346
313 347 commits, _heads = _rev_range_hash(repo, node)
314 348 for commit_id, branch in commits:
315 349 commit_ids.append(commit_id)
316 350 if branch not in branches:
317 351 branches.append(branch)
318 352
319 353 if hasattr(ui, '_rc_pushkey_branches'):
320 354 bookmarks = ui._rc_pushkey_branches
321 355
356 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
322 357 extras['commit_ids'] = commit_ids
323 358 extras['new_refs'] = {
324 359 'branches': branches,
325 360 'bookmarks': bookmarks,
326 361 'tags': tags
327 362 }
328 363
329 364 return _call_hook('post_push', extras, HgMessageWriter(ui))
330 365
331 366
332 367 def post_push_ssh(ui, repo, node, **kwargs):
333 368 """
334 369 Mercurial post_push hook for SSH
335 370 """
336 371 if _extras_from_ui(ui).get('SSH'):
337 372 return post_push(ui, repo, node, **kwargs)
338 373 return 0
339 374
340 375
341 376 def key_push(ui, repo, **kwargs):
342 377 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
343 378 # store new bookmarks in our UI object propagated later to post_push
344 379 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
345 380 return
346 381
347 382
348 383 # backward compat
349 384 log_pull_action = post_pull
350 385
351 386 # backward compat
352 387 log_push_action = post_push
353 388
354 389
355 390 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
356 391 """
357 392 Old hook name: keep here for backward compatibility.
358 393
359 394 This is only required when the installed git hooks are not upgraded.
360 395 """
361 396 pass
362 397
363 398
364 399 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
365 400 """
366 401 Old hook name: keep here for backward compatibility.
367 402
368 403 This is only required when the installed git hooks are not upgraded.
369 404 """
370 405 pass
371 406
372 407
373 408 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
374 409
375 410
376 411 def git_pre_pull(extras):
377 412 """
378 413 Pre pull hook.
379 414
380 415 :param extras: dictionary containing the keys defined in simplevcs
381 416 :type extras: dict
382 417
383 418 :return: status code of the hook. 0 for success.
384 419 :rtype: int
385 420 """
386 421 if 'pull' not in extras['hooks']:
387 422 return HookResponse(0, '')
388 423
389 424 stdout = io.BytesIO()
390 425 try:
391 426 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
392 427 except Exception as error:
393 428 status = 128
394 429 stdout.write('ERROR: %s\n' % str(error))
395 430
396 431 return HookResponse(status, stdout.getvalue())
397 432
398 433
399 434 def git_post_pull(extras):
400 435 """
401 436 Post pull hook.
402 437
403 438 :param extras: dictionary containing the keys defined in simplevcs
404 439 :type extras: dict
405 440
406 441 :return: status code of the hook. 0 for success.
407 442 :rtype: int
408 443 """
409 444 if 'pull' not in extras['hooks']:
410 445 return HookResponse(0, '')
411 446
412 447 stdout = io.BytesIO()
413 448 try:
414 449 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
415 450 except Exception as error:
416 451 status = 128
417 452 stdout.write('ERROR: %s\n' % error)
418 453
419 454 return HookResponse(status, stdout.getvalue())
420 455
421 456
422 457 def _parse_git_ref_lines(revision_lines):
423 458 rev_data = []
424 459 for revision_line in revision_lines or []:
425 460 old_rev, new_rev, ref = revision_line.strip().split(' ')
426 461 ref_data = ref.split('/', 2)
427 462 if ref_data[1] in ('tags', 'heads'):
428 463 rev_data.append({
464 # NOTE(marcink):
465 # we're unable to tell total_commits for git at this point
466 # but we set the variable for consistency with GIT
467 'total_commits': -1,
429 468 'old_rev': old_rev,
430 469 'new_rev': new_rev,
431 470 'ref': ref,
432 471 'type': ref_data[1],
433 472 'name': ref_data[2],
434 473 })
435 474 return rev_data
436 475
437 476
438 477 def git_pre_receive(unused_repo_path, revision_lines, env):
439 478 """
440 479 Pre push hook.
441 480
442 481 :param extras: dictionary containing the keys defined in simplevcs
443 482 :type extras: dict
444 483
445 484 :return: status code of the hook. 0 for success.
446 485 :rtype: int
447 486 """
448 487 extras = json.loads(env['RC_SCM_DATA'])
449 488 rev_data = _parse_git_ref_lines(revision_lines)
450 489 if 'push' not in extras['hooks']:
451 490 return 0
452 491 empty_commit_id = '0' * 40
453 492
454 493 detect_force_push = extras.get('detect_force_push')
455 494
456 495 for push_ref in rev_data:
457 496 # store our git-env which holds the temp store
458 push_ref['git_env'] = [
459 (k, v) for k, v in os.environ.items() if k.startswith('GIT')]
497 push_ref['git_env'] = _get_git_env()
460 498 push_ref['pruned_sha'] = ''
461 499 if not detect_force_push:
462 500 # don't check for forced-push when we don't need to
463 501 continue
464 502
465 503 type_ = push_ref['type']
466 504 new_branch = push_ref['old_rev'] == empty_commit_id
467 505 if type_ == 'heads' and not new_branch:
468 506 old_rev = push_ref['old_rev']
469 507 new_rev = push_ref['new_rev']
470 508 cmd = [settings.GIT_EXECUTABLE, 'rev-list',
471 509 old_rev, '^{}'.format(new_rev)]
472 510 stdout, stderr = subprocessio.run_command(
473 511 cmd, env=os.environ.copy())
474 512 # means we're having some non-reachable objects, this forced push
475 513 # was used
476 514 if stdout:
477 515 push_ref['pruned_sha'] = stdout.splitlines()
478 516
517 extras['hook_type'] = 'pre_receive'
479 518 extras['commit_ids'] = rev_data
480 519 return _call_hook('pre_push', extras, GitMessageWriter())
481 520
482 521
483 522 def git_post_receive(unused_repo_path, revision_lines, env):
484 523 """
485 524 Post push hook.
486 525
487 526 :param extras: dictionary containing the keys defined in simplevcs
488 527 :type extras: dict
489 528
490 529 :return: status code of the hook. 0 for success.
491 530 :rtype: int
492 531 """
493 532 extras = json.loads(env['RC_SCM_DATA'])
494 533 if 'push' not in extras['hooks']:
495 534 return 0
496 535
497 536 rev_data = _parse_git_ref_lines(revision_lines)
498 537
499 538 git_revs = []
500 539
501 540 # N.B.(skreft): it is ok to just call git, as git before calling a
502 541 # subcommand sets the PATH environment variable so that it point to the
503 542 # correct version of the git executable.
504 543 empty_commit_id = '0' * 40
505 544 branches = []
506 545 tags = []
507 546 for push_ref in rev_data:
508 547 type_ = push_ref['type']
509 548
510 549 if type_ == 'heads':
511 550 if push_ref['old_rev'] == empty_commit_id:
512 551 # starting new branch case
513 552 if push_ref['name'] not in branches:
514 553 branches.append(push_ref['name'])
515 554
516 555 # Fix up head revision if needed
517 556 cmd = [settings.GIT_EXECUTABLE, 'show', 'HEAD']
518 557 try:
519 558 subprocessio.run_command(cmd, env=os.environ.copy())
520 559 except Exception:
521 560 cmd = [settings.GIT_EXECUTABLE, 'symbolic-ref', 'HEAD',
522 561 'refs/heads/%s' % push_ref['name']]
523 562 print("Setting default branch to %s" % push_ref['name'])
524 563 subprocessio.run_command(cmd, env=os.environ.copy())
525 564
526 565 cmd = [settings.GIT_EXECUTABLE, 'for-each-ref',
527 566 '--format=%(refname)', 'refs/heads/*']
528 567 stdout, stderr = subprocessio.run_command(
529 568 cmd, env=os.environ.copy())
530 569 heads = stdout
531 570 heads = heads.replace(push_ref['ref'], '')
532 571 heads = ' '.join(head for head
533 572 in heads.splitlines() if head) or '.'
534 573 cmd = [settings.GIT_EXECUTABLE, 'log', '--reverse',
535 574 '--pretty=format:%H', '--', push_ref['new_rev'],
536 575 '--not', heads]
537 576 stdout, stderr = subprocessio.run_command(
538 577 cmd, env=os.environ.copy())
539 578 git_revs.extend(stdout.splitlines())
540 579 elif push_ref['new_rev'] == empty_commit_id:
541 580 # delete branch case
542 581 git_revs.append('delete_branch=>%s' % push_ref['name'])
543 582 else:
544 583 if push_ref['name'] not in branches:
545 584 branches.append(push_ref['name'])
546 585
547 586 cmd = [settings.GIT_EXECUTABLE, 'log',
548 587 '{old_rev}..{new_rev}'.format(**push_ref),
549 588 '--reverse', '--pretty=format:%H']
550 589 stdout, stderr = subprocessio.run_command(
551 590 cmd, env=os.environ.copy())
552 591 git_revs.extend(stdout.splitlines())
553 592 elif type_ == 'tags':
554 593 if push_ref['name'] not in tags:
555 594 tags.append(push_ref['name'])
556 595 git_revs.append('tag=>%s' % push_ref['name'])
557 596
597 extras['hook_type'] = 'post_receive'
558 598 extras['commit_ids'] = git_revs
559 599 extras['new_refs'] = {
560 600 'branches': branches,
561 601 'bookmarks': [],
562 602 'tags': tags,
563 603 }
564 604
565 605 if 'repo_size' in extras['hooks']:
566 606 try:
567 607 _call_hook('repo_size', extras, GitMessageWriter())
568 608 except:
569 609 pass
570 610
571 611 return _call_hook('post_push', extras, GitMessageWriter())
572 612
573 613
574 614 def _get_extras_from_txn_id(path, txn_id):
575 615 extras = {}
576 616 try:
577 617 cmd = ['svnlook', 'pget',
578 618 '-t', txn_id,
579 619 '--revprop', path, 'rc-scm-extras']
580 620 stdout, stderr = subprocessio.run_command(
581 621 cmd, env=os.environ.copy())
582 622 extras = json.loads(base64.urlsafe_b64decode(stdout))
583 623 except Exception:
584 624 log.exception('Failed to extract extras info from txn_id')
585 625
586 626 return extras
587 627
588 628
629 def _get_extras_from_commit_id(commit_id, path):
630 extras = {}
631 try:
632 cmd = ['svnlook', 'pget',
633 '-r', commit_id,
634 '--revprop', path, 'rc-scm-extras']
635 stdout, stderr = subprocessio.run_command(
636 cmd, env=os.environ.copy())
637 extras = json.loads(base64.urlsafe_b64decode(stdout))
638 except Exception:
639 log.exception('Failed to extract extras info from commit_id')
640
641 return extras
642
643
589 644 def svn_pre_commit(repo_path, commit_data, env):
590 645 path, txn_id = commit_data
591 646 branches = []
592 647 tags = []
593 648
594 649 if env.get('RC_SCM_DATA'):
595 650 extras = json.loads(env['RC_SCM_DATA'])
596 651 else:
597 652 # fallback method to read from TXN-ID stored data
598 653 extras = _get_extras_from_txn_id(path, txn_id)
599 654 if not extras:
600 655 return 0
601 656
602 657 extras['commit_ids'] = []
603 658 extras['txn_id'] = txn_id
604 659 extras['new_refs'] = {
660 'total_commits': 1,
605 661 'branches': branches,
606 662 'bookmarks': [],
607 663 'tags': tags,
608 664 }
609 665
610 666 return _call_hook('pre_push', extras, SvnMessageWriter())
611 667
612 668
613 def _get_extras_from_commit_id(commit_id, path):
614 extras = {}
615 try:
616 cmd = ['svnlook', 'pget',
617 '-r', commit_id,
618 '--revprop', path, 'rc-scm-extras']
619 stdout, stderr = subprocessio.run_command(
620 cmd, env=os.environ.copy())
621 extras = json.loads(base64.urlsafe_b64decode(stdout))
622 except Exception:
623 log.exception('Failed to extract extras info from commit_id')
624
625 return extras
626
627
628 669 def svn_post_commit(repo_path, commit_data, env):
629 670 """
630 671 commit_data is path, rev, txn_id
631 672 """
632 673 path, commit_id, txn_id = commit_data
633 674 branches = []
634 675 tags = []
635 676
636 677 if env.get('RC_SCM_DATA'):
637 678 extras = json.loads(env['RC_SCM_DATA'])
638 679 else:
639 680 # fallback method to read from TXN-ID stored data
640 681 extras = _get_extras_from_commit_id(commit_id, path)
641 682 if not extras:
642 683 return 0
643 684
644 685 extras['commit_ids'] = [commit_id]
645 686 extras['txn_id'] = txn_id
646 687 extras['new_refs'] = {
647 688 'branches': branches,
648 689 'bookmarks': [],
649 690 'tags': tags,
691 'total_commits': 1,
650 692 }
651 693
652 694 if 'repo_size' in extras['hooks']:
653 695 try:
654 696 _call_hook('repo_size', extras, SvnMessageWriter())
655 697 except Exception:
656 698 pass
657 699
658 700 return _call_hook('post_push', extras, SvnMessageWriter())
@@ -1,598 +1,598 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import sys
20 20 import base64
21 21 import locale
22 22 import logging
23 23 import uuid
24 24 import wsgiref.util
25 25 import traceback
26 26 import tempfile
27 27 from itertools import chain
28 28
29 29 import simplejson as json
30 30 import msgpack
31 31 from pyramid.config import Configurator
32 32 from pyramid.settings import asbool, aslist
33 33 from pyramid.wsgi import wsgiapp
34 34 from pyramid.compat import configparser
35 35
36 36
37 37 log = logging.getLogger(__name__)
38 38
39 39 # due to Mercurial/glibc2.27 problems we need to detect if locale settings are
40 40 # causing problems and "fix" it in case they do and fallback to LC_ALL = C
41 41
42 42 try:
43 43 locale.setlocale(locale.LC_ALL, '')
44 44 except locale.Error as e:
45 45 log.error(
46 46 'LOCALE ERROR: failed to set LC_ALL, fallback to LC_ALL=C, org error: %s', e)
47 47 os.environ['LC_ALL'] = 'C'
48 48
49 49 import vcsserver
50 50 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
51 51 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
52 52 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
53 53 from vcsserver.echo_stub.echo_app import EchoApp
54 54 from vcsserver.exceptions import HTTPRepoLocked, HTTPRepoBranchProtected
55 55 from vcsserver.lib.exc_tracking import store_exception
56 56 from vcsserver.server import VcsServer
57 57
58 58 try:
59 59 from vcsserver.git import GitFactory, GitRemote
60 60 except ImportError:
61 61 GitFactory = None
62 62 GitRemote = None
63 63
64 64 try:
65 65 from vcsserver.hg import MercurialFactory, HgRemote
66 66 except ImportError:
67 67 MercurialFactory = None
68 68 HgRemote = None
69 69
70 70 try:
71 71 from vcsserver.svn import SubversionFactory, SvnRemote
72 72 except ImportError:
73 73 SubversionFactory = None
74 74 SvnRemote = None
75 75
76 76
77 77 def _is_request_chunked(environ):
78 78 stream = environ.get('HTTP_TRANSFER_ENCODING', '') == 'chunked'
79 79 return stream
80 80
81 81
82 82 def _int_setting(settings, name, default):
83 83 settings[name] = int(settings.get(name, default))
84 84 return settings[name]
85 85
86 86
87 87 def _bool_setting(settings, name, default):
88 88 input_val = settings.get(name, default)
89 89 if isinstance(input_val, unicode):
90 90 input_val = input_val.encode('utf8')
91 91 settings[name] = asbool(input_val)
92 92 return settings[name]
93 93
94 94
95 95 def _list_setting(settings, name, default):
96 96 raw_value = settings.get(name, default)
97 97
98 98 # Otherwise we assume it uses pyramids space/newline separation.
99 99 settings[name] = aslist(raw_value)
100 100 return settings[name]
101 101
102 102
103 103 def _string_setting(settings, name, default, lower=True, default_when_empty=False):
104 104 value = settings.get(name, default)
105 105
106 106 if default_when_empty and not value:
107 107 # use default value when value is empty
108 108 value = default
109 109
110 110 if lower:
111 111 value = value.lower()
112 112 settings[name] = value
113 113 return settings[name]
114 114
115 115
116 116 class VCS(object):
117 117 def __init__(self, locale=None, cache_config=None):
118 118 self.locale = locale
119 119 self.cache_config = cache_config
120 120 self._configure_locale()
121 121
122 122 if GitFactory and GitRemote:
123 123 git_factory = GitFactory()
124 124 self._git_remote = GitRemote(git_factory)
125 125 else:
126 126 log.info("Git client import failed")
127 127
128 128 if MercurialFactory and HgRemote:
129 129 hg_factory = MercurialFactory()
130 130 self._hg_remote = HgRemote(hg_factory)
131 131 else:
132 132 log.info("Mercurial client import failed")
133 133
134 134 if SubversionFactory and SvnRemote:
135 135 svn_factory = SubversionFactory()
136 136
137 137 # hg factory is used for svn url validation
138 138 hg_factory = MercurialFactory()
139 139 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
140 140 else:
141 141 log.info("Subversion client import failed")
142 142
143 143 self._vcsserver = VcsServer()
144 144
145 145 def _configure_locale(self):
146 146 if self.locale:
147 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
147 log.info('Settings locale: `LC_ALL` to %s', self.locale)
148 148 else:
149 149 log.info(
150 150 'Configuring locale subsystem based on environment variables')
151 151 try:
152 152 # If self.locale is the empty string, then the locale
153 153 # module will use the environment variables. See the
154 154 # documentation of the package `locale`.
155 155 locale.setlocale(locale.LC_ALL, self.locale)
156 156
157 157 language_code, encoding = locale.getlocale()
158 158 log.info(
159 159 'Locale set to language code "%s" with encoding "%s".',
160 160 language_code, encoding)
161 161 except locale.Error:
162 162 log.exception(
163 163 'Cannot set locale, not configuring the locale system')
164 164
165 165
166 166 class WsgiProxy(object):
167 167 def __init__(self, wsgi):
168 168 self.wsgi = wsgi
169 169
170 170 def __call__(self, environ, start_response):
171 171 input_data = environ['wsgi.input'].read()
172 172 input_data = msgpack.unpackb(input_data)
173 173
174 174 error = None
175 175 try:
176 176 data, status, headers = self.wsgi.handle(
177 177 input_data['environment'], input_data['input_data'],
178 178 *input_data['args'], **input_data['kwargs'])
179 179 except Exception as e:
180 180 data, status, headers = [], None, None
181 181 error = {
182 182 'message': str(e),
183 183 '_vcs_kind': getattr(e, '_vcs_kind', None)
184 184 }
185 185
186 186 start_response(200, {})
187 187 return self._iterator(error, status, headers, data)
188 188
189 189 def _iterator(self, error, status, headers, data):
190 190 initial_data = [
191 191 error,
192 192 status,
193 193 headers,
194 194 ]
195 195
196 196 for d in chain(initial_data, data):
197 197 yield msgpack.packb(d)
198 198
199 199
200 200 class HTTPApplication(object):
201 201 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
202 202
203 203 remote_wsgi = remote_wsgi
204 204 _use_echo_app = False
205 205
206 206 def __init__(self, settings=None, global_config=None):
207 207 self._sanitize_settings_and_apply_defaults(settings)
208 208
209 209 self.config = Configurator(settings=settings)
210 210 self.global_config = global_config
211 211 self.config.include('vcsserver.lib.rc_cache')
212 212
213 213 locale = settings.get('locale', '') or 'en_US.UTF-8'
214 214 vcs = VCS(locale=locale, cache_config=settings)
215 215 self._remotes = {
216 216 'hg': vcs._hg_remote,
217 217 'git': vcs._git_remote,
218 218 'svn': vcs._svn_remote,
219 219 'server': vcs._vcsserver,
220 220 }
221 221 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
222 222 self._use_echo_app = True
223 223 log.warning("Using EchoApp for VCS operations.")
224 224 self.remote_wsgi = remote_wsgi_stub
225 225
226 226 self._configure_settings(global_config, settings)
227 227 self._configure()
228 228
229 229 def _configure_settings(self, global_config, app_settings):
230 230 """
231 231 Configure the settings module.
232 232 """
233 233 settings_merged = global_config.copy()
234 234 settings_merged.update(app_settings)
235 235
236 236 git_path = app_settings.get('git_path', None)
237 237 if git_path:
238 238 settings.GIT_EXECUTABLE = git_path
239 239 binary_dir = app_settings.get('core.binary_dir', None)
240 240 if binary_dir:
241 241 settings.BINARY_DIR = binary_dir
242 242
243 243 # Store the settings to make them available to other modules.
244 244 vcsserver.PYRAMID_SETTINGS = settings_merged
245 245 vcsserver.CONFIG = settings_merged
246 246
247 247 def _sanitize_settings_and_apply_defaults(self, settings):
248 248 temp_store = tempfile.gettempdir()
249 249 default_cache_dir = os.path.join(temp_store, 'rc_cache')
250 250
251 251 # save default, cache dir, and use it for all backends later.
252 252 default_cache_dir = _string_setting(
253 253 settings,
254 254 'cache_dir',
255 255 default_cache_dir, lower=False, default_when_empty=True)
256 256
257 257 # ensure we have our dir created
258 258 if not os.path.isdir(default_cache_dir):
259 259 os.makedirs(default_cache_dir, mode=0755)
260 260
261 261 # exception store cache
262 262 _string_setting(
263 263 settings,
264 264 'exception_tracker.store_path',
265 265 temp_store, lower=False, default_when_empty=True)
266 266
267 267 # repo_object cache
268 268 _string_setting(
269 269 settings,
270 270 'rc_cache.repo_object.backend',
271 271 'dogpile.cache.rc.memory_lru')
272 272 _int_setting(
273 273 settings,
274 274 'rc_cache.repo_object.expiration_time',
275 275 300)
276 276 _int_setting(
277 277 settings,
278 278 'rc_cache.repo_object.max_size',
279 279 1024)
280 280
281 281 def _configure(self):
282 282 self.config.add_renderer(
283 283 name='msgpack',
284 284 factory=self._msgpack_renderer_factory)
285 285
286 286 self.config.add_route('service', '/_service')
287 287 self.config.add_route('status', '/status')
288 288 self.config.add_route('hg_proxy', '/proxy/hg')
289 289 self.config.add_route('git_proxy', '/proxy/git')
290 290 self.config.add_route('vcs', '/{backend}')
291 291 self.config.add_route('stream_git', '/stream/git/*repo_name')
292 292 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
293 293
294 294 self.config.add_view(
295 295 self.status_view, route_name='status', renderer='json')
296 296 self.config.add_view(
297 297 self.service_view, route_name='service', renderer='msgpack')
298 298
299 299 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
300 300 self.config.add_view(self.git_proxy(), route_name='git_proxy')
301 301 self.config.add_view(
302 302 self.vcs_view, route_name='vcs', renderer='msgpack',
303 303 custom_predicates=[self.is_vcs_view])
304 304
305 305 self.config.add_view(self.hg_stream(), route_name='stream_hg')
306 306 self.config.add_view(self.git_stream(), route_name='stream_git')
307 307
308 308 def notfound(request):
309 309 return {'status': '404 NOT FOUND'}
310 310 self.config.add_notfound_view(notfound, renderer='json')
311 311
312 312 self.config.add_view(self.handle_vcs_exception, context=Exception)
313 313
314 314 self.config.add_tween(
315 315 'vcsserver.tweens.RequestWrapperTween',
316 316 )
317 317
318 318 def wsgi_app(self):
319 319 return self.config.make_wsgi_app()
320 320
321 321 def vcs_view(self, request):
322 322 remote = self._remotes[request.matchdict['backend']]
323 323 payload = msgpack.unpackb(request.body, use_list=True)
324 324 method = payload.get('method')
325 325 params = payload.get('params')
326 326 wire = params.get('wire')
327 327 args = params.get('args')
328 328 kwargs = params.get('kwargs')
329 329 context_uid = None
330 330
331 331 if wire:
332 332 try:
333 333 wire['context'] = context_uid = uuid.UUID(wire['context'])
334 334 except KeyError:
335 335 pass
336 336 args.insert(0, wire)
337 337
338 338 log.debug('method called:%s with kwargs:%s context_uid: %s',
339 339 method, kwargs, context_uid)
340 340 try:
341 341 resp = getattr(remote, method)(*args, **kwargs)
342 342 except Exception as e:
343 343 exc_info = list(sys.exc_info())
344 344 exc_type, exc_value, exc_traceback = exc_info
345 345
346 346 org_exc = getattr(e, '_org_exc', None)
347 347 org_exc_name = None
348 348 if org_exc:
349 349 org_exc_name = org_exc.__class__.__name__
350 350 # replace our "faked" exception with our org
351 351 exc_info[0] = org_exc.__class__
352 352 exc_info[1] = org_exc
353 353
354 354 store_exception(id(exc_info), exc_info)
355 355
356 356 tb_info = ''.join(
357 357 traceback.format_exception(exc_type, exc_value, exc_traceback))
358 358
359 359 type_ = e.__class__.__name__
360 360 if type_ not in self.ALLOWED_EXCEPTIONS:
361 361 type_ = None
362 362
363 363 resp = {
364 364 'id': payload.get('id'),
365 365 'error': {
366 366 'message': e.message,
367 367 'traceback': tb_info,
368 368 'org_exc': org_exc_name,
369 369 'type': type_
370 370 }
371 371 }
372 372 try:
373 373 resp['error']['_vcs_kind'] = getattr(e, '_vcs_kind', None)
374 374 except AttributeError:
375 375 pass
376 376 else:
377 377 resp = {
378 378 'id': payload.get('id'),
379 379 'result': resp
380 380 }
381 381
382 382 return resp
383 383
384 384 def status_view(self, request):
385 385 import vcsserver
386 386 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__,
387 387 'pid': os.getpid()}
388 388
389 389 def service_view(self, request):
390 390 import vcsserver
391 391
392 392 payload = msgpack.unpackb(request.body, use_list=True)
393 393
394 394 try:
395 395 path = self.global_config['__file__']
396 396 config = configparser.ConfigParser()
397 397 config.read(path)
398 398 parsed_ini = config
399 399 if parsed_ini.has_section('server:main'):
400 400 parsed_ini = dict(parsed_ini.items('server:main'))
401 401 except Exception:
402 402 log.exception('Failed to read .ini file for display')
403 403 parsed_ini = {}
404 404
405 405 resp = {
406 406 'id': payload.get('id'),
407 407 'result': dict(
408 408 version=vcsserver.__version__,
409 409 config=parsed_ini,
410 410 payload=payload,
411 411 )
412 412 }
413 413 return resp
414 414
415 415 def _msgpack_renderer_factory(self, info):
416 416 def _render(value, system):
417 417 value = msgpack.packb(value)
418 418 request = system.get('request')
419 419 if request is not None:
420 420 response = request.response
421 421 ct = response.content_type
422 422 if ct == response.default_content_type:
423 423 response.content_type = 'application/x-msgpack'
424 424 return value
425 425 return _render
426 426
427 427 def set_env_from_config(self, environ, config):
428 428 dict_conf = {}
429 429 try:
430 430 for elem in config:
431 431 if elem[0] == 'rhodecode':
432 432 dict_conf = json.loads(elem[2])
433 433 break
434 434 except Exception:
435 435 log.exception('Failed to fetch SCM CONFIG')
436 436 return
437 437
438 438 username = dict_conf.get('username')
439 439 if username:
440 440 environ['REMOTE_USER'] = username
441 441 # mercurial specific, some extension api rely on this
442 442 environ['HGUSER'] = username
443 443
444 444 ip = dict_conf.get('ip')
445 445 if ip:
446 446 environ['REMOTE_HOST'] = ip
447 447
448 448 if _is_request_chunked(environ):
449 449 # set the compatibility flag for webob
450 450 environ['wsgi.input_terminated'] = True
451 451
452 452 def hg_proxy(self):
453 453 @wsgiapp
454 454 def _hg_proxy(environ, start_response):
455 455 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
456 456 return app(environ, start_response)
457 457 return _hg_proxy
458 458
459 459 def git_proxy(self):
460 460 @wsgiapp
461 461 def _git_proxy(environ, start_response):
462 462 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
463 463 return app(environ, start_response)
464 464 return _git_proxy
465 465
466 466 def hg_stream(self):
467 467 if self._use_echo_app:
468 468 @wsgiapp
469 469 def _hg_stream(environ, start_response):
470 470 app = EchoApp('fake_path', 'fake_name', None)
471 471 return app(environ, start_response)
472 472 return _hg_stream
473 473 else:
474 474 @wsgiapp
475 475 def _hg_stream(environ, start_response):
476 476 log.debug('http-app: handling hg stream')
477 477 repo_path = environ['HTTP_X_RC_REPO_PATH']
478 478 repo_name = environ['HTTP_X_RC_REPO_NAME']
479 479 packed_config = base64.b64decode(
480 480 environ['HTTP_X_RC_REPO_CONFIG'])
481 481 config = msgpack.unpackb(packed_config)
482 482 app = scm_app.create_hg_wsgi_app(
483 483 repo_path, repo_name, config)
484 484
485 485 # Consistent path information for hgweb
486 486 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
487 487 environ['REPO_NAME'] = repo_name
488 488 self.set_env_from_config(environ, config)
489 489
490 490 log.debug('http-app: starting app handler '
491 491 'with %s and process request', app)
492 492 return app(environ, ResponseFilter(start_response))
493 493 return _hg_stream
494 494
495 495 def git_stream(self):
496 496 if self._use_echo_app:
497 497 @wsgiapp
498 498 def _git_stream(environ, start_response):
499 499 app = EchoApp('fake_path', 'fake_name', None)
500 500 return app(environ, start_response)
501 501 return _git_stream
502 502 else:
503 503 @wsgiapp
504 504 def _git_stream(environ, start_response):
505 505 log.debug('http-app: handling git stream')
506 506 repo_path = environ['HTTP_X_RC_REPO_PATH']
507 507 repo_name = environ['HTTP_X_RC_REPO_NAME']
508 508 packed_config = base64.b64decode(
509 509 environ['HTTP_X_RC_REPO_CONFIG'])
510 510 config = msgpack.unpackb(packed_config)
511 511
512 512 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
513 513 self.set_env_from_config(environ, config)
514 514
515 515 content_type = environ.get('CONTENT_TYPE', '')
516 516
517 517 path = environ['PATH_INFO']
518 518 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
519 519 log.debug(
520 520 'LFS: Detecting if request `%s` is LFS server path based '
521 521 'on content type:`%s`, is_lfs:%s',
522 522 path, content_type, is_lfs_request)
523 523
524 524 if not is_lfs_request:
525 525 # fallback detection by path
526 526 if GIT_LFS_PROTO_PAT.match(path):
527 527 is_lfs_request = True
528 528 log.debug(
529 529 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
530 530 path, is_lfs_request)
531 531
532 532 if is_lfs_request:
533 533 app = scm_app.create_git_lfs_wsgi_app(
534 534 repo_path, repo_name, config)
535 535 else:
536 536 app = scm_app.create_git_wsgi_app(
537 537 repo_path, repo_name, config)
538 538
539 539 log.debug('http-app: starting app handler '
540 540 'with %s and process request', app)
541 541
542 542 return app(environ, start_response)
543 543
544 544 return _git_stream
545 545
546 546 def is_vcs_view(self, context, request):
547 547 """
548 548 View predicate that returns true if given backend is supported by
549 549 defined remotes.
550 550 """
551 551 backend = request.matchdict.get('backend')
552 552 return backend in self._remotes
553 553
554 554 def handle_vcs_exception(self, exception, request):
555 555 _vcs_kind = getattr(exception, '_vcs_kind', '')
556 556 if _vcs_kind == 'repo_locked':
557 557 # Get custom repo-locked status code if present.
558 558 status_code = request.headers.get('X-RC-Locked-Status-Code')
559 559 return HTTPRepoLocked(
560 560 title=exception.message, status_code=status_code)
561 561
562 562 elif _vcs_kind == 'repo_branch_protected':
563 563 # Get custom repo-branch-protected status code if present.
564 564 return HTTPRepoBranchProtected(title=exception.message)
565 565
566 566 exc_info = request.exc_info
567 567 store_exception(id(exc_info), exc_info)
568 568
569 569 traceback_info = 'unavailable'
570 570 if request.exc_info:
571 571 exc_type, exc_value, exc_tb = request.exc_info
572 572 traceback_info = ''.join(traceback.format_exception(exc_type, exc_value, exc_tb))
573 573
574 574 log.error(
575 575 'error occurred handling this request for path: %s, \n tb: %s',
576 576 request.path, traceback_info)
577 577 raise exception
578 578
579 579
580 580 class ResponseFilter(object):
581 581
582 582 def __init__(self, start_response):
583 583 self._start_response = start_response
584 584
585 585 def __call__(self, status, response_headers, exc_info=None):
586 586 headers = tuple(
587 587 (h, v) for h, v in response_headers
588 588 if not wsgiref.util.is_hop_by_hop(h))
589 589 return self._start_response(status, headers, exc_info)
590 590
591 591
592 592 def main(global_config, **settings):
593 593 if MercurialFactory:
594 594 hgpatches.patch_largefiles_capabilities()
595 595 hgpatches.patch_subrepo_type_mapping()
596 596
597 597 app = HTTPApplication(settings=settings, global_config=global_config)
598 598 return app.wsgi_app()
@@ -1,65 +1,65 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2018 RhodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20
21 21 import logging
22 22
23 23 from repoze.lru import LRUCache
24 24
25 25 from vcsserver.utils import safe_str
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 30 class LRUDict(LRUCache):
31 31 """
32 32 Wrapper to provide partial dict access
33 33 """
34 34
35 35 def __setitem__(self, key, value):
36 36 return self.put(key, value)
37 37
38 38 def __getitem__(self, key):
39 39 return self.get(key)
40 40
41 41 def __contains__(self, key):
42 42 return bool(self.get(key))
43 43
44 44 def __delitem__(self, key):
45 45 del self.data[key]
46 46
47 47 def keys(self):
48 48 return self.data.keys()
49 49
50 50
51 51 class LRUDictDebug(LRUDict):
52 52 """
53 53 Wrapper to provide some debug options
54 54 """
55 55 def _report_keys(self):
56 56 elems_cnt = '%s/%s' % (len(self.keys()), self.size)
57 57 # trick for pformat print it more nicely
58 58 fmt = '\n'
59 59 for cnt, elem in enumerate(self.keys()):
60 60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 log.debug('current LRU keys (%s):%s' % (elems_cnt, fmt))
61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
62 62
63 63 def __getitem__(self, key):
64 64 self._report_keys()
65 65 return self.get(key)
@@ -1,86 +1,86 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import os
19 19 import shutil
20 20 import tempfile
21 21
22 22 import configobj
23 23
24 24
25 25 class ContextINI(object):
26 26 """
27 27 Allows to create a new test.ini file as a copy of existing one with edited
28 28 data. If existing file is not present, it creates a new one. Example usage::
29 29
30 30 with TestINI('test.ini', [{'section': {'key': 'val'}}]) as new_test_ini_path:
31 31 print 'vcsserver --config=%s' % new_test_ini
32 32 """
33 33
34 34 def __init__(self, ini_file_path, ini_params, new_file_prefix=None,
35 35 destroy=True):
36 36 self.ini_file_path = ini_file_path
37 37 self.ini_params = ini_params
38 38 self.new_path = None
39 39 self.new_path_prefix = new_file_prefix or 'test'
40 40 self.destroy = destroy
41 41
42 42 def __enter__(self):
43 43 _, pref = tempfile.mkstemp()
44 44 loc = tempfile.gettempdir()
45 45 self.new_path = os.path.join(loc, '{}_{}_{}'.format(
46 46 pref, self.new_path_prefix, self.ini_file_path))
47 47
48 48 # copy ini file and modify according to the params, if we re-use a file
49 49 if os.path.isfile(self.ini_file_path):
50 50 shutil.copy(self.ini_file_path, self.new_path)
51 51 else:
52 52 # create new dump file for configObj to write to.
53 53 with open(self.new_path, 'wb'):
54 54 pass
55 55
56 56 config = configobj.ConfigObj(
57 57 self.new_path, file_error=True, write_empty_values=True)
58 58
59 59 for data in self.ini_params:
60 60 section, ini_params = data.items()[0]
61 61 key, val = ini_params.items()[0]
62 62 if section not in config:
63 63 config[section] = {}
64 64 config[section][key] = val
65 65
66 66 config.write()
67 67 return self.new_path
68 68
69 69 def __exit__(self, exc_type, exc_val, exc_tb):
70 70 if self.destroy:
71 71 os.remove(self.new_path)
72 72
73 73
74 74 def no_newline_id_generator(test_name):
75 75 """
76 76 Generates a test name without spaces or newlines characters. Used for
77 77 nicer output of progress of test
78 78 """
79 79 org_name = test_name
80 test_name = test_name\
80 test_name = str(test_name)\
81 81 .replace('\n', '_N') \
82 82 .replace('\r', '_N') \
83 83 .replace('\t', '_T') \
84 84 .replace(' ', '_S')
85 85
86 86 return test_name or 'test-with-empty-name'
@@ -1,165 +1,165 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import inspect
19 19
20 20 import pytest
21 21 import dulwich.errors
22 22 from mock import Mock, patch
23 23
24 24 from vcsserver import git
25 25
26 26
27 27 SAMPLE_REFS = {
28 28 'HEAD': 'fd627b9e0dd80b47be81af07c4a98518244ed2f7',
29 29 'refs/tags/v0.1.9': '341d28f0eec5ddf0b6b77871e13c2bbd6bec685c',
30 30 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
31 31 'refs/tags/v0.1.1': 'e6ea6d16e2f26250124a1f4b4fe37a912f9d86a0',
32 32 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
33 33 }
34 34
35 35
36 36 @pytest.fixture
37 37 def git_remote():
38 38 """
39 39 A GitRemote instance with a mock factory.
40 40 """
41 41 factory = Mock()
42 42 remote = git.GitRemote(factory)
43 43 return remote
44 44
45 45
46 46 def test_discover_git_version(git_remote):
47 47 version = git_remote.discover_git_version()
48 48 assert version
49 49
50 50
51 51 class TestGitFetch(object):
52 52 def setup(self):
53 53 self.mock_repo = Mock()
54 54 factory = Mock()
55 55 factory.repo = Mock(return_value=self.mock_repo)
56 56 self.remote_git = git.GitRemote(factory)
57 57
58 58 def test_fetches_all_when_no_commit_ids_specified(self):
59 59 def side_effect(determine_wants, *args, **kwargs):
60 60 determine_wants(SAMPLE_REFS)
61 61
62 62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 63 mock_fetch.side_effect = side_effect
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire=None, url='/tmp/', apply_refs=False)
65 65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67 67
68 68 def test_fetches_specified_commits(self):
69 69 selected_refs = {
70 70 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
71 71 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
72 72 }
73 73
74 74 def side_effect(determine_wants, *args, **kwargs):
75 75 result = determine_wants(SAMPLE_REFS)
76 76 assert sorted(result) == sorted(selected_refs.values())
77 77 return result
78 78
79 79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 80 mock_fetch.side_effect = side_effect
81 self.remote_git.fetch(
81 self.remote_git.pull(
82 82 wire=None, url='/tmp/', apply_refs=False,
83 83 refs=selected_refs.keys())
84 84 determine_wants = self.mock_repo.object_store.determine_wants_all
85 85 assert determine_wants.call_count == 0
86 86
87 87 def test_get_remote_refs(self):
88 88 factory = Mock()
89 89 remote_git = git.GitRemote(factory)
90 90 url = 'http://example.com/test/test.git'
91 91 sample_refs = {
92 92 'refs/tags/v0.1.8': '74ebce002c088b8a5ecf40073db09375515ecd68',
93 93 'refs/tags/v0.1.3': '5a3a8fb005554692b16e21dee62bf02667d8dc3e',
94 94 }
95 95
96 96 with patch('vcsserver.git.Repo', create=False) as mock_repo:
97 97 mock_repo().get_refs.return_value = sample_refs
98 98 remote_refs = remote_git.get_remote_refs(wire=None, url=url)
99 99 mock_repo().get_refs.assert_called_once_with()
100 100 assert remote_refs == sample_refs
101 101
102 102 def test_remove_ref(self):
103 103 ref_to_remove = 'refs/tags/v0.1.9'
104 104 self.mock_repo.refs = SAMPLE_REFS.copy()
105 105 self.remote_git.remove_ref(None, ref_to_remove)
106 106 assert ref_to_remove not in self.mock_repo.refs
107 107
108 108
109 109 class TestReraiseSafeExceptions(object):
110 110 def test_method_decorated_with_reraise_safe_exceptions(self):
111 111 factory = Mock()
112 112 git_remote = git.GitRemote(factory)
113 113
114 114 def fake_function():
115 115 return None
116 116
117 117 decorator = git.reraise_safe_exceptions(fake_function)
118 118
119 119 methods = inspect.getmembers(git_remote, predicate=inspect.ismethod)
120 120 for method_name, method in methods:
121 121 if not method_name.startswith('_'):
122 122 assert method.im_func.__code__ == decorator.__code__
123 123
124 124 @pytest.mark.parametrize('side_effect, expected_type', [
125 125 (dulwich.errors.ChecksumMismatch('0000000', 'deadbeef'), 'lookup'),
126 126 (dulwich.errors.NotCommitError('deadbeef'), 'lookup'),
127 127 (dulwich.errors.MissingCommitError('deadbeef'), 'lookup'),
128 128 (dulwich.errors.ObjectMissing('deadbeef'), 'lookup'),
129 129 (dulwich.errors.HangupException(), 'error'),
130 130 (dulwich.errors.UnexpectedCommandError('test-cmd'), 'error'),
131 131 ])
132 132 def test_safe_exceptions_reraised(self, side_effect, expected_type):
133 133 @git.reraise_safe_exceptions
134 134 def fake_method():
135 135 raise side_effect
136 136
137 137 with pytest.raises(Exception) as exc_info:
138 138 fake_method()
139 139 assert type(exc_info.value) == Exception
140 140 assert exc_info.value._vcs_kind == expected_type
141 141
142 142
143 143 class TestDulwichRepoWrapper(object):
144 144 def test_calls_close_on_delete(self):
145 145 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
146 146 with isdir_patcher:
147 147 repo = git.Repo('/tmp/abcde')
148 148 with patch.object(git.DulwichRepo, 'close') as close_mock:
149 149 del repo
150 150 close_mock.assert_called_once_with()
151 151
152 152
153 153 class TestGitFactory(object):
154 154 def test_create_repo_returns_dulwich_wrapper(self):
155 155
156 156 with patch('vcsserver.lib.rc_cache.region_meta.dogpile_cache_regions') as mock:
157 157 mock.side_effect = {'repo_objects': ''}
158 158 factory = git.GitFactory()
159 159 wire = {
160 160 'path': '/tmp/abcde'
161 161 }
162 162 isdir_patcher = patch('dulwich.repo.os.path.isdir', return_value=True)
163 163 with isdir_patcher:
164 164 result = factory._create_repo(wire, True)
165 165 assert isinstance(result, git.Repo)
@@ -1,241 +1,241 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import contextlib
19 19 import io
20 20 import threading
21 21 from BaseHTTPServer import BaseHTTPRequestHandler
22 22 from SocketServer import TCPServer
23 23
24 24 import mercurial.ui
25 25 import mock
26 26 import pytest
27 27 import simplejson as json
28 28
29 29 from vcsserver import hooks
30 30
31 31
32 32 def get_hg_ui(extras=None):
33 33 """Create a Config object with a valid RC_SCM_DATA entry."""
34 34 extras = extras or {}
35 35 required_extras = {
36 36 'username': '',
37 37 'repository': '',
38 38 'locked_by': '',
39 39 'scm': '',
40 40 'make_lock': '',
41 41 'action': '',
42 42 'ip': '',
43 43 'hooks_uri': 'fake_hooks_uri',
44 44 }
45 45 required_extras.update(extras)
46 46 hg_ui = mercurial.ui.ui()
47 47 hg_ui.setconfig('rhodecode', 'RC_SCM_DATA', json.dumps(required_extras))
48 48
49 49 return hg_ui
50 50
51 51
52 52 def test_git_pre_receive_is_disabled():
53 53 extras = {'hooks': ['pull']}
54 54 response = hooks.git_pre_receive(None, None,
55 55 {'RC_SCM_DATA': json.dumps(extras)})
56 56
57 57 assert response == 0
58 58
59 59
60 60 def test_git_post_receive_is_disabled():
61 61 extras = {'hooks': ['pull']}
62 62 response = hooks.git_post_receive(None, '',
63 63 {'RC_SCM_DATA': json.dumps(extras)})
64 64
65 65 assert response == 0
66 66
67 67
68 68 def test_git_post_receive_calls_repo_size():
69 69 extras = {'hooks': ['push', 'repo_size']}
70 70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 71 hooks.git_post_receive(
72 72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
73 extras.update({'commit_ids': [],
73 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
74 74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
75 75 expected_calls = [
76 76 mock.call('repo_size', extras, mock.ANY),
77 77 mock.call('post_push', extras, mock.ANY),
78 78 ]
79 79 assert call_hook_mock.call_args_list == expected_calls
80 80
81 81
82 82 def test_git_post_receive_does_not_call_disabled_repo_size():
83 83 extras = {'hooks': ['push']}
84 84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 85 hooks.git_post_receive(
86 86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
87 extras.update({'commit_ids': [],
87 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
88 88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
89 89 expected_calls = [
90 90 mock.call('post_push', extras, mock.ANY)
91 91 ]
92 92 assert call_hook_mock.call_args_list == expected_calls
93 93
94 94
95 95 def test_repo_size_exception_does_not_affect_git_post_receive():
96 96 extras = {'hooks': ['push', 'repo_size']}
97 97 status = 0
98 98
99 99 def side_effect(name, *args, **kwargs):
100 100 if name == 'repo_size':
101 101 raise Exception('Fake exception')
102 102 else:
103 103 return status
104 104
105 105 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
106 106 call_hook_mock.side_effect = side_effect
107 107 result = hooks.git_post_receive(
108 108 None, '', {'RC_SCM_DATA': json.dumps(extras)})
109 109 assert result == status
110 110
111 111
112 112 def test_git_pre_pull_is_disabled():
113 113 assert hooks.git_pre_pull({'hooks': ['push']}) == hooks.HookResponse(0, '')
114 114
115 115
116 116 def test_git_post_pull_is_disabled():
117 117 assert (
118 118 hooks.git_post_pull({'hooks': ['push']}) == hooks.HookResponse(0, ''))
119 119
120 120
121 121 class TestGetHooksClient(object):
122 122
123 123 def test_returns_http_client_when_protocol_matches(self):
124 124 hooks_uri = 'localhost:8000'
125 125 result = hooks._get_hooks_client({
126 126 'hooks_uri': hooks_uri,
127 127 'hooks_protocol': 'http'
128 128 })
129 129 assert isinstance(result, hooks.HooksHttpClient)
130 130 assert result.hooks_uri == hooks_uri
131 131
132 132 def test_returns_dummy_client_when_hooks_uri_not_specified(self):
133 133 fake_module = mock.Mock()
134 134 import_patcher = mock.patch.object(
135 135 hooks.importlib, 'import_module', return_value=fake_module)
136 136 fake_module_name = 'fake.module'
137 137 with import_patcher as import_mock:
138 138 result = hooks._get_hooks_client(
139 139 {'hooks_module': fake_module_name})
140 140
141 141 import_mock.assert_called_once_with(fake_module_name)
142 142 assert isinstance(result, hooks.HooksDummyClient)
143 143 assert result._hooks_module == fake_module
144 144
145 145
146 146 class TestHooksHttpClient(object):
147 147 def test_init_sets_hooks_uri(self):
148 148 uri = 'localhost:3000'
149 149 client = hooks.HooksHttpClient(uri)
150 150 assert client.hooks_uri == uri
151 151
152 152 def test_serialize_returns_json_string(self):
153 153 client = hooks.HooksHttpClient('localhost:3000')
154 154 hook_name = 'test'
155 155 extras = {
156 156 'first': 1,
157 157 'second': 'two'
158 158 }
159 159 result = client._serialize(hook_name, extras)
160 160 expected_result = json.dumps({
161 161 'method': hook_name,
162 162 'extras': extras
163 163 })
164 164 assert result == expected_result
165 165
166 166 def test_call_queries_http_server(self, http_mirror):
167 167 client = hooks.HooksHttpClient(http_mirror.uri)
168 168 hook_name = 'test'
169 169 extras = {
170 170 'first': 1,
171 171 'second': 'two'
172 172 }
173 173 result = client(hook_name, extras)
174 174 expected_result = {
175 175 'method': hook_name,
176 176 'extras': extras
177 177 }
178 178 assert result == expected_result
179 179
180 180
181 181 class TestHooksDummyClient(object):
182 182 def test_init_imports_hooks_module(self):
183 183 hooks_module_name = 'rhodecode.fake.module'
184 184 hooks_module = mock.MagicMock()
185 185
186 186 import_patcher = mock.patch.object(
187 187 hooks.importlib, 'import_module', return_value=hooks_module)
188 188 with import_patcher as import_mock:
189 189 client = hooks.HooksDummyClient(hooks_module_name)
190 190 import_mock.assert_called_once_with(hooks_module_name)
191 191 assert client._hooks_module == hooks_module
192 192
193 193 def test_call_returns_hook_result(self):
194 194 hooks_module_name = 'rhodecode.fake.module'
195 195 hooks_module = mock.MagicMock()
196 196 import_patcher = mock.patch.object(
197 197 hooks.importlib, 'import_module', return_value=hooks_module)
198 198 with import_patcher:
199 199 client = hooks.HooksDummyClient(hooks_module_name)
200 200
201 201 result = client('post_push', {})
202 202 hooks_module.Hooks.assert_called_once_with()
203 203 assert result == hooks_module.Hooks().__enter__().post_push()
204 204
205 205
206 206 @pytest.fixture
207 207 def http_mirror(request):
208 208 server = MirrorHttpServer()
209 209 request.addfinalizer(server.stop)
210 210 return server
211 211
212 212
213 213 class MirrorHttpHandler(BaseHTTPRequestHandler):
214 214 def do_POST(self):
215 215 length = int(self.headers['Content-Length'])
216 216 body = self.rfile.read(length).decode('utf-8')
217 217 self.send_response(200)
218 218 self.end_headers()
219 219 self.wfile.write(body)
220 220
221 221
222 222 class MirrorHttpServer(object):
223 223 ip_address = '127.0.0.1'
224 224 port = 0
225 225
226 226 def __init__(self):
227 227 self._daemon = TCPServer((self.ip_address, 0), MirrorHttpHandler)
228 228 _, self.port = self._daemon.server_address
229 229 self._thread = threading.Thread(target=self._daemon.serve_forever)
230 230 self._thread.daemon = True
231 231 self._thread.start()
232 232
233 233 def stop(self):
234 234 self._daemon.shutdown()
235 235 self._thread.join()
236 236 self._daemon = None
237 237 self._thread = None
238 238
239 239 @property
240 240 def uri(self):
241 241 return '{}:{}'.format(self.ip_address, self.port)
@@ -1,60 +1,58 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2018 RhodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18
19 19
20 20 import time
21 21 import logging
22 22
23 23
24 24 from vcsserver.utils import safe_str
25 25
26 26
27 27 log = logging.getLogger(__name__)
28 28
29 29
30 30 def get_access_path(request):
31 31 environ = request.environ
32 32 return environ.get('PATH_INFO')
33 33
34 34
35 35 class RequestWrapperTween(object):
36 36 def __init__(self, handler, registry):
37 37 self.handler = handler
38 38 self.registry = registry
39 39
40 40 # one-time configuration code goes here
41 41
42 42 def __call__(self, request):
43 43 start = time.time()
44 44 try:
45 45 response = self.handler(request)
46 46 finally:
47 47 end = time.time()
48 48
49 log.info('IP: %s Request to path: `%s` time: %.3fs' % (
50 '127.0.0.1',
51 safe_str(get_access_path(request)), end - start)
52 )
49 log.info('IP: %s Request to path: `%s` time: %.3fs',
50 '127.0.0.1', safe_str(get_access_path(request)), end - start)
53 51
54 52 return response
55 53
56 54
57 55 def includeme(config):
58 56 config.add_tween(
59 57 'vcsserver.tweens.RequestWrapperTween',
60 58 )
General Comments 0
You need to be logged in to leave comments. Login now