##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r284:a3f9910f merge stable
parent child Browse files
Show More
@@ -0,0 +1,37 b''
1 This patch does two things: (1) use the right name for `docbook2texi',
2 and (2) make sure `gitman.info' isn't produced since it's broken (duplicate
3 node names).
4
5 diff -ru git-1.8.4-orig/Documentation/Makefile git-1.8.4/Documentation/Makefile
6 --- git-1.8.4-orig/Documentation/Makefile 2013-08-23 21:38:43.000000000 +0200
7 +++ git-1.8.4/Documentation/Makefile 2013-09-30 14:48:51.532890378 +0200
8 @@ -101,7 +101,7 @@
9
10 MAKEINFO = makeinfo
11 INSTALL_INFO = install-info
12 -DOCBOOK2X_TEXI = docbook2x-texi
13 +DOCBOOK2X_TEXI = docbook2texi
14 DBLATEX = dblatex
15 ifndef PERL_PATH
16 PERL_PATH = /usr/bin/perl
17 @@ -205,7 +205,7 @@
18 man5: $(DOC_MAN5)
19 man7: $(DOC_MAN7)
20
21 -info: git.info gitman.info
22 +info: git.info
23
24 pdf: user-manual.pdf
25
26 @@ -221,10 +221,9 @@
27
28 install-info: info
29 $(INSTALL) -d -m 755 $(DESTDIR)$(infodir)
30 - $(INSTALL) -m 644 git.info gitman.info $(DESTDIR)$(infodir)
31 + $(INSTALL) -m 644 git.info $(DESTDIR)$(infodir)
32 if test -r $(DESTDIR)$(infodir)/dir; then \
33 $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) git.info ;\
34 - $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) gitman.info ;\
35 else \
36 echo "No directory found in $(DESTDIR)$(infodir)" >&2 ; \
37 fi
@@ -0,0 +1,94 b''
1 --- a/git-sh-i18n.sh
2 +++ b/git-sh-i18n.sh
3 @@ -15,87 +15,11 @@
4 fi
5 export TEXTDOMAINDIR
6
7 -# First decide what scheme to use...
8 -GIT_INTERNAL_GETTEXT_SH_SCHEME=fallthrough
9 -if test -n "@@USE_GETTEXT_SCHEME@@"
10 -then
11 - GIT_INTERNAL_GETTEXT_SH_SCHEME="@@USE_GETTEXT_SCHEME@@"
12 -elif test -n "$GIT_INTERNAL_GETTEXT_TEST_FALLBACKS"
13 -then
14 - : no probing necessary
15 -elif test -n "$GIT_GETTEXT_POISON"
16 -then
17 - GIT_INTERNAL_GETTEXT_SH_SCHEME=poison
18 -elif type gettext.sh >/dev/null 2>&1
19 -then
20 - # GNU libintl's gettext.sh
21 - GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu
22 -elif test "$(gettext -h 2>&1)" = "-h"
23 -then
24 - # gettext binary exists but no gettext.sh. likely to be a gettext
25 - # binary on a Solaris or something that is not GNU libintl and
26 - # lack eval_gettext.
27 - GIT_INTERNAL_GETTEXT_SH_SCHEME=gettext_without_eval_gettext
28 -fi
29 -export GIT_INTERNAL_GETTEXT_SH_SCHEME
30 -
31 -# ... and then follow that decision.
32 -case "$GIT_INTERNAL_GETTEXT_SH_SCHEME" in
33 -gnu)
34 - # Use libintl's gettext.sh, or fall back to English if we can't.
35 - . gettext.sh
36 - ;;
37 -gettext_without_eval_gettext)
38 - # Solaris has a gettext(1) but no eval_gettext(1)
39 - eval_gettext () {
40 - gettext "$1" | (
41 - export PATH $(git sh-i18n--envsubst --variables "$1");
42 - git sh-i18n--envsubst "$1"
43 - )
44 - }
45 -
46 - eval_ngettext () {
47 - ngettext "$1" "$2" "$3" | (
48 - export PATH $(git sh-i18n--envsubst --variables "$2");
49 - git sh-i18n--envsubst "$2"
50 - )
51 - }
52 - ;;
53 -poison)
54 - # Emit garbage so that tests that incorrectly rely on translatable
55 - # strings will fail.
56 - gettext () {
57 - printf "%s" "# GETTEXT POISON #"
58 - }
59 -
60 - eval_gettext () {
61 - printf "%s" "# GETTEXT POISON #"
62 - }
63 -
64 - eval_ngettext () {
65 - printf "%s" "# GETTEXT POISON #"
66 - }
67 - ;;
68 -*)
69 - gettext () {
70 - printf "%s" "$1"
71 - }
72 -
73 - eval_gettext () {
74 - printf "%s" "$1" | (
75 - export PATH $(git sh-i18n--envsubst --variables "$1");
76 - git sh-i18n--envsubst "$1"
77 - )
78 - }
79 +# GNU gettext
80 +export GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu
81 +export PATH=@gettext@/bin:$PATH
82
83 - eval_ngettext () {
84 - (test "$3" = 1 && printf "%s" "$1" || printf "%s" "$2") | (
85 - export PATH $(git sh-i18n--envsubst --variables "$2");
86 - git sh-i18n--envsubst "$2"
87 - )
88 - }
89 - ;;
90 -esac
91 +. @gettext@/bin/gettext.sh
92
93 # Git-specific wrapper functions
94 gettextln () {
@@ -0,0 +1,26 b''
1 diff --git a/connect.c b/connect.c
2 index fd7ffe1..20cd992 100644
3 --- a/connect.c
4 +++ b/connect.c
5 @@ -768,7 +768,7 @@
6
7 ssh = getenv("GIT_SSH");
8 if (!ssh)
9 - ssh = "ssh";
10 + ssh = "@ssh@";
11 else
12 handle_ssh_variant(ssh, 0,
13 &port_option,
14 diff --git a/git-gui/lib/remote_add.tcl b/git-gui/lib/remote_add.tcl
15 index 50029d0..17b9594 100644
16 --- a/git-gui/lib/remote_add.tcl
17 +++ b/git-gui/lib/remote_add.tcl
18 @@ -139,7 +139,7 @@
19 # Parse the location
20 if { [regexp {(?:git\+)?ssh://([^/]+)(/.+)} $location xx host path]
21 || [regexp {([^:][^:]+):(.+)} $location xx host path]} {
22 - set ssh ssh
23 + set ssh @ssh@
24 if {[info exists env(GIT_SSH)]} {
25 set ssh $env(GIT_SSH)
26 }
@@ -1,6 +1,6 b''
1 1 [bumpversion]
2 current_version = 4.9.1
2 current_version = 4.10.0
3 3 message = release: Bump version {current_version} to {new_version}
4 4
5 5 [bumpversion:file:vcsserver/VERSION]
6 6
@@ -1,16 +1,14 b''
1 1 [DEFAULT]
2 2 done = false
3 3
4 4 [task:bump_version]
5 5 done = true
6 6
7 7 [task:fixes_on_stable]
8 done = true
9 8
10 9 [task:pip2nix_generated]
11 done = true
12 10
13 11 [release]
14 state = prepared
15 version = 4.9.1
12 state = in_progress
13 version = 4.10.0
16 14
@@ -1,158 +1,165 b''
1 1 # Nix environment for the community edition
2 2 #
3 3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 4 # derivation. For advanced tweaks to pimp up the development environment we use
5 5 # "shell.nix" so that it does not have to clutter this file.
6 6
7 7 { pkgs ? (import <nixpkgs> {})
8 8 , pythonPackages ? "python27Packages"
9 9 , pythonExternalOverrides ? self: super: {}
10 10 , doCheck ? true
11 11 }:
12 12
13 13 let pkgs_ = pkgs; in
14 14
15 15 let
16 16 pkgs = pkgs_.overridePackages (self: super: {
17 17 # bump GIT version
18 18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 name = "git-2.9.5";
19 name = "git-2.13.5";
20 20 src = pkgs.fetchurl {
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.9.5.tar.xz";
22 sha256 = "00ir7qmgfszwrhxjzxwixk7wp35gxvvw467gr30bagwsrdza7gm4";
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.13.5.tar.xz";
22 sha256 = "18fi18103n7grshm4ffb0fwsnvbl48sbqy5gqx528vf8maff5j91";
23 23 };
24 24
25 patches = [
26 ./pkgs/git_patches/docbook2texi.patch
27 ./pkgs/git_patches/symlinks-in-bin.patch
28 ./pkgs/git_patches/git-sh-i18n.patch
29 ./pkgs/git_patches/ssh-path.patch
30 ];
31
25 32 });
26 33
27 34 # Override subversion derivation to
28 35 # - activate python bindings
29 36 subversion = let
30 37 subversionWithPython = super.subversion.override {
31 38 httpSupport = true;
32 39 pythonBindings = true;
33 40 python = self.python27Packages.python;
34 41 };
35 42
36 43 in
37 44
38 45 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
39 46 name = "subversion-1.9.7";
40 47 src = pkgs.fetchurl {
41 48 url = "https://www.apache.org/dist/subversion/subversion-1.9.7.tar.gz";
42 49 sha256 = "0g3cs2h008z8ymgkhbk54jp87bjh7y049rn42igj881yi2f20an7";
43 50 };
44 51
45 52 });
46 53
47 54 });
48 55
49 56 inherit (pkgs.lib) fix extends;
50 57 basePythonPackages = with builtins; if isAttrs pythonPackages
51 58 then pythonPackages
52 59 else getAttr pythonPackages pkgs;
53 60
54 61 elem = builtins.elem;
55 62 basename = path: with pkgs.lib; last (splitString "/" path);
56 63 startsWith = prefix: full: let
57 64 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
58 65 in actualPrefix == prefix;
59 66
60 67 src-filter = path: type: with pkgs.lib;
61 68 let
62 69 ext = last (splitString "." path);
63 70 in
64 71 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
65 72 "node_modules" "build" "data" "tmp"] &&
66 73 !elem ext ["egg-info" "pyc"] &&
67 74 !startsWith "result" path;
68 75
69 76 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
70 77
71 78 pythonGeneratedPackages = self: basePythonPackages.override (a: {
72 79 inherit self;
73 80 }) // (scopedImport {
74 81 self = self;
75 82 super = basePythonPackages;
76 83 inherit pkgs;
77 84 inherit (pkgs) fetchurl fetchgit;
78 85 } ./pkgs/python-packages.nix);
79 86
80 87 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
81 88 inherit basePythonPackages pkgs;
82 89 };
83 90
84 91 version = builtins.readFile ./vcsserver/VERSION;
85 92
86 93 pythonLocalOverrides = self: super: {
87 94 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
88 95 inherit doCheck version;
89 96
90 97 name = "rhodecode-vcsserver-${version}";
91 98 releaseName = "RhodeCodeVCSServer-${version}";
92 99 src = rhodecode-vcsserver-src;
93 100 dontStrip = true; # prevent strip, we don't need it.
94 101
95 102 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
96 103 pkgs.git
97 104 pkgs.subversion
98 105 ]);
99 106
100 107 # TODO: johbo: Make a nicer way to expose the parts. Maybe
101 108 # pkgs/default.nix?
102 109 passthru = {
103 110 pythonPackages = self;
104 111 };
105 112
106 113 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
107 114 preCheck = ''
108 115 export PATH="$out/bin:$PATH"
109 116 '';
110 117
111 118 # put custom attrs here
112 119 checkPhase = ''
113 120 runHook preCheck
114 121 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
115 122 runHook postCheck
116 123 '';
117 124
118 125 postInstall = ''
119 126 echo "Writing meta information for rccontrol to nix-support/rccontrol"
120 127 mkdir -p $out/nix-support/rccontrol
121 128 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
122 129 echo "DONE: Meta information for rccontrol written"
123 130
124 131 # python based programs need to be wrapped
125 132 ln -s ${self.pyramid}/bin/* $out/bin/
126 133 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
127 134
128 135 # Symlink version control utilities
129 136 #
130 137 # We ensure that always the correct version is available as a symlink.
131 138 # So that users calling them via the profile path will always use the
132 139 # correct version.
133 140 ln -s ${pkgs.git}/bin/git $out/bin
134 141 ln -s ${self.mercurial}/bin/hg $out/bin
135 142 ln -s ${pkgs.subversion}/bin/svn* $out/bin
136 143
137 144 for file in $out/bin/*;
138 145 do
139 146 wrapProgram $file \
140 147 --set PATH $PATH \
141 148 --set PYTHONPATH $PYTHONPATH \
142 149 --set PYTHONHASHSEED random
143 150 done
144 151
145 152 '';
146 153
147 154 });
148 155 };
149 156
150 157 # Apply all overrides and fix the final package set
151 158 myPythonPackages =
152 159 (fix
153 160 (extends pythonExternalOverrides
154 161 (extends pythonLocalOverrides
155 162 (extends pythonOverrides
156 163 pythonGeneratedPackages))));
157 164
158 165 in myPythonPackages.rhodecode-vcsserver
@@ -1,799 +1,877 b''
1 1 # Generated by pip2nix 0.4.0
2 2 # See https://github.com/johbo/pip2nix
3 3
4 4 {
5 5 Beaker = super.buildPythonPackage {
6 name = "Beaker-1.7.0";
6 name = "Beaker-1.9.0";
7 7 buildInputs = with self; [];
8 8 doCheck = false;
9 propagatedBuildInputs = with self; [];
9 propagatedBuildInputs = with self; [funcsigs];
10 10 src = fetchurl {
11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
12 md5 = "386be3f7fe427358881eee4622b428b3";
11 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
12 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
13 13 };
14 14 meta = {
15 15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 16 };
17 17 };
18 18 Jinja2 = super.buildPythonPackage {
19 19 name = "Jinja2-2.8";
20 20 buildInputs = with self; [];
21 21 doCheck = false;
22 22 propagatedBuildInputs = with self; [MarkupSafe];
23 23 src = fetchurl {
24 24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
25 25 md5 = "edb51693fe22c53cee5403775c71a99e";
26 26 };
27 27 meta = {
28 28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 29 };
30 30 };
31 31 Mako = super.buildPythonPackage {
32 name = "Mako-1.0.6";
32 name = "Mako-1.0.7";
33 33 buildInputs = with self; [];
34 34 doCheck = false;
35 35 propagatedBuildInputs = with self; [MarkupSafe];
36 36 src = fetchurl {
37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
38 md5 = "a28e22a339080316b2acc352b9ee631c";
37 url = "https://pypi.python.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
38 md5 = "5836cc997b1b773ef389bf6629c30e65";
39 39 };
40 40 meta = {
41 41 license = [ pkgs.lib.licenses.mit ];
42 42 };
43 43 };
44 44 MarkupSafe = super.buildPythonPackage {
45 45 name = "MarkupSafe-0.23";
46 46 buildInputs = with self; [];
47 47 doCheck = false;
48 48 propagatedBuildInputs = with self; [];
49 49 src = fetchurl {
50 50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
51 51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
52 52 };
53 53 meta = {
54 54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 55 };
56 56 };
57 57 PasteDeploy = super.buildPythonPackage {
58 58 name = "PasteDeploy-1.5.2";
59 59 buildInputs = with self; [];
60 60 doCheck = false;
61 61 propagatedBuildInputs = with self; [];
62 62 src = fetchurl {
63 63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 64 md5 = "352b7205c78c8de4987578d19431af3b";
65 65 };
66 66 meta = {
67 67 license = [ pkgs.lib.licenses.mit ];
68 68 };
69 69 };
70 70 WebOb = super.buildPythonPackage {
71 name = "WebOb-1.3.1";
71 name = "WebOb-1.7.3";
72 72 buildInputs = with self; [];
73 73 doCheck = false;
74 74 propagatedBuildInputs = with self; [];
75 75 src = fetchurl {
76 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
77 md5 = "20918251c5726956ba8fef22d1556177";
76 url = "https://pypi.python.org/packages/46/87/2f96d8d43b2078fae6e1d33fa86b95c228cebed060f4e3c7576cc44ea83b/WebOb-1.7.3.tar.gz";
77 md5 = "350028baffc508e3d23c078118e35316";
78 78 };
79 79 meta = {
80 80 license = [ pkgs.lib.licenses.mit ];
81 81 };
82 82 };
83 83 WebTest = super.buildPythonPackage {
84 name = "WebTest-1.4.3";
84 name = "WebTest-2.0.27";
85 85 buildInputs = with self; [];
86 86 doCheck = false;
87 propagatedBuildInputs = with self; [WebOb];
87 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
88 88 src = fetchurl {
89 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
90 md5 = "631ce728bed92c681a4020a36adbc353";
89 url = "https://pypi.python.org/packages/80/fa/ca3a759985c72e3a124cbca3e1f8a2e931a07ffd31fd45d8f7bf21cb95cf/WebTest-2.0.27.tar.gz";
90 md5 = "54e6515ac71c51b6fc90179483c749ad";
91 91 };
92 92 meta = {
93 93 license = [ pkgs.lib.licenses.mit ];
94 94 };
95 95 };
96 96 backports.shutil-get-terminal-size = super.buildPythonPackage {
97 97 name = "backports.shutil-get-terminal-size-1.0.0";
98 98 buildInputs = with self; [];
99 99 doCheck = false;
100 100 propagatedBuildInputs = with self; [];
101 101 src = fetchurl {
102 102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
103 103 md5 = "03267762480bd86b50580dc19dff3c66";
104 104 };
105 105 meta = {
106 106 license = [ pkgs.lib.licenses.mit ];
107 107 };
108 108 };
109 beautifulsoup4 = super.buildPythonPackage {
110 name = "beautifulsoup4-4.6.0";
111 buildInputs = with self; [];
112 doCheck = false;
113 propagatedBuildInputs = with self; [];
114 src = fetchurl {
115 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
116 md5 = "c17714d0f91a23b708a592cb3c697728";
117 };
118 meta = {
119 license = [ pkgs.lib.licenses.mit ];
120 };
121 };
109 122 configobj = super.buildPythonPackage {
110 123 name = "configobj-5.0.6";
111 124 buildInputs = with self; [];
112 125 doCheck = false;
113 126 propagatedBuildInputs = with self; [six];
114 127 src = fetchurl {
115 128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
116 129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
117 130 };
118 131 meta = {
119 132 license = [ pkgs.lib.licenses.bsdOriginal ];
120 133 };
121 134 };
122 135 cov-core = super.buildPythonPackage {
123 136 name = "cov-core-1.15.0";
124 137 buildInputs = with self; [];
125 138 doCheck = false;
126 139 propagatedBuildInputs = with self; [coverage];
127 140 src = fetchurl {
128 141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
129 142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
130 143 };
131 144 meta = {
132 145 license = [ pkgs.lib.licenses.mit ];
133 146 };
134 147 };
135 148 coverage = super.buildPythonPackage {
136 149 name = "coverage-3.7.1";
137 150 buildInputs = with self; [];
138 151 doCheck = false;
139 152 propagatedBuildInputs = with self; [];
140 153 src = fetchurl {
141 154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
142 155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
143 156 };
144 157 meta = {
145 158 license = [ pkgs.lib.licenses.bsdOriginal ];
146 159 };
147 160 };
148 161 decorator = super.buildPythonPackage {
149 162 name = "decorator-4.0.11";
150 163 buildInputs = with self; [];
151 164 doCheck = false;
152 165 propagatedBuildInputs = with self; [];
153 166 src = fetchurl {
154 167 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
155 168 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
156 169 };
157 170 meta = {
158 171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
159 172 };
160 173 };
161 174 dulwich = super.buildPythonPackage {
162 175 name = "dulwich-0.13.0";
163 176 buildInputs = with self; [];
164 177 doCheck = false;
165 178 propagatedBuildInputs = with self; [];
166 179 src = fetchurl {
167 180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
168 181 md5 = "6dede0626657c2bd08f48ca1221eea91";
169 182 };
170 183 meta = {
171 184 license = [ pkgs.lib.licenses.gpl2Plus ];
172 185 };
173 186 };
174 187 enum34 = super.buildPythonPackage {
175 188 name = "enum34-1.1.6";
176 189 buildInputs = with self; [];
177 190 doCheck = false;
178 191 propagatedBuildInputs = with self; [];
179 192 src = fetchurl {
180 193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
181 194 md5 = "5f13a0841a61f7fc295c514490d120d0";
182 195 };
183 196 meta = {
184 197 license = [ pkgs.lib.licenses.bsdOriginal ];
185 198 };
186 199 };
200 funcsigs = super.buildPythonPackage {
201 name = "funcsigs-1.0.2";
202 buildInputs = with self; [];
203 doCheck = false;
204 propagatedBuildInputs = with self; [];
205 src = fetchurl {
206 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
207 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
208 };
209 meta = {
210 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
211 };
212 };
187 213 gevent = super.buildPythonPackage {
188 name = "gevent-1.1.2";
214 name = "gevent-1.2.2";
189 215 buildInputs = with self; [];
190 216 doCheck = false;
191 217 propagatedBuildInputs = with self; [greenlet];
192 218 src = fetchurl {
193 url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz";
194 md5 = "bb32a2f852a4997138014d5007215c6e";
219 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
220 md5 = "7f0baf355384fe5ff2ecf66853422554";
195 221 };
196 222 meta = {
197 223 license = [ pkgs.lib.licenses.mit ];
198 224 };
199 225 };
200 226 gprof2dot = super.buildPythonPackage {
201 227 name = "gprof2dot-2016.10.13";
202 228 buildInputs = with self; [];
203 229 doCheck = false;
204 230 propagatedBuildInputs = with self; [];
205 231 src = fetchurl {
206 232 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
207 233 md5 = "0125401f15fd2afe1df686a76c64a4fd";
208 234 };
209 235 meta = {
210 236 license = [ { fullName = "LGPL"; } ];
211 237 };
212 238 };
213 239 greenlet = super.buildPythonPackage {
214 name = "greenlet-0.4.10";
240 name = "greenlet-0.4.12";
215 241 buildInputs = with self; [];
216 242 doCheck = false;
217 243 propagatedBuildInputs = with self; [];
218 244 src = fetchurl {
219 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
220 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
245 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
246 md5 = "e8637647d58a26c4a1f51ca393e53c00";
221 247 };
222 248 meta = {
223 249 license = [ pkgs.lib.licenses.mit ];
224 250 };
225 251 };
226 252 gunicorn = super.buildPythonPackage {
227 name = "gunicorn-19.6.0";
253 name = "gunicorn-19.7.1";
228 254 buildInputs = with self; [];
229 255 doCheck = false;
230 256 propagatedBuildInputs = with self; [];
231 257 src = fetchurl {
232 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
233 md5 = "338e5e8a83ea0f0625f768dba4597530";
258 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
259 md5 = "174d3c3cd670a5be0404d84c484e590c";
234 260 };
235 261 meta = {
236 262 license = [ pkgs.lib.licenses.mit ];
237 263 };
238 264 };
239 265 hg-evolve = super.buildPythonPackage {
240 266 name = "hg-evolve-6.6.0";
241 267 buildInputs = with self; [];
242 268 doCheck = false;
243 269 propagatedBuildInputs = with self; [];
244 270 src = fetchurl {
245 271 url = "https://pypi.python.org/packages/c5/04/3557c97eaa320b5a6769edade64a299cd2710f5f3b818f64991ab6c8c08f/hg-evolve-6.6.0.tar.gz";
246 272 md5 = "06b9a9c8e8137bbf0c4fbf940c009725";
247 273 };
248 274 meta = {
249 275 license = [ { fullName = "GPLv2+"; } ];
250 276 };
251 277 };
252 278 hgsubversion = super.buildPythonPackage {
253 name = "hgsubversion-1.8.6";
279 name = "hgsubversion-1.8.7";
254 280 buildInputs = with self; [];
255 281 doCheck = false;
256 282 propagatedBuildInputs = with self; [mercurial subvertpy];
257 283 src = fetchurl {
258 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
259 md5 = "9310cb266031cf8d0779885782a84a5b";
284 url = "https://pypi.python.org/packages/1c/b8/ff4d2e0ec486f9765b410f09728c02a010e7485d68d6154968074498a403/hgsubversion-1.8.7.tar.gz";
285 md5 = "289f1c36c13bd6a3435a9be390a77bdc";
260 286 };
261 287 meta = {
262 288 license = [ pkgs.lib.licenses.gpl1 ];
263 289 };
264 290 };
291 hupper = super.buildPythonPackage {
292 name = "hupper-1.0";
293 buildInputs = with self; [];
294 doCheck = false;
295 propagatedBuildInputs = with self; [];
296 src = fetchurl {
297 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
298 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
299 };
300 meta = {
301 license = [ pkgs.lib.licenses.mit ];
302 };
303 };
265 304 infrae.cache = super.buildPythonPackage {
266 305 name = "infrae.cache-1.0.1";
267 306 buildInputs = with self; [];
268 307 doCheck = false;
269 308 propagatedBuildInputs = with self; [Beaker repoze.lru];
270 309 src = fetchurl {
271 310 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
272 311 md5 = "b09076a766747e6ed2a755cc62088e32";
273 312 };
274 313 meta = {
275 314 license = [ pkgs.lib.licenses.zpt21 ];
276 315 };
277 316 };
278 317 ipdb = super.buildPythonPackage {
279 name = "ipdb-0.10.1";
318 name = "ipdb-0.10.3";
280 319 buildInputs = with self; [];
281 320 doCheck = false;
282 propagatedBuildInputs = with self; [ipython setuptools];
321 propagatedBuildInputs = with self; [setuptools ipython];
283 322 src = fetchurl {
284 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
285 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
323 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
324 md5 = "def1f6ac075d54bdee07e6501263d4fa";
286 325 };
287 326 meta = {
288 327 license = [ pkgs.lib.licenses.bsdOriginal ];
289 328 };
290 329 };
291 330 ipython = super.buildPythonPackage {
292 331 name = "ipython-5.1.0";
293 332 buildInputs = with self; [];
294 333 doCheck = false;
295 334 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
296 335 src = fetchurl {
297 336 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
298 337 md5 = "47c8122420f65b58784cb4b9b4af35e3";
299 338 };
300 339 meta = {
301 340 license = [ pkgs.lib.licenses.bsdOriginal ];
302 341 };
303 342 };
304 343 ipython-genutils = super.buildPythonPackage {
305 344 name = "ipython-genutils-0.2.0";
306 345 buildInputs = with self; [];
307 346 doCheck = false;
308 347 propagatedBuildInputs = with self; [];
309 348 src = fetchurl {
310 349 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
311 350 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
312 351 };
313 352 meta = {
314 353 license = [ pkgs.lib.licenses.bsdOriginal ];
315 354 };
316 355 };
317 356 mercurial = super.buildPythonPackage {
318 357 name = "mercurial-4.2.3";
319 358 buildInputs = with self; [];
320 359 doCheck = false;
321 360 propagatedBuildInputs = with self; [];
322 361 src = fetchurl {
323 362 url = "https://www.mercurial-scm.org/release/mercurial-4.2.3.tar.gz";
324 363 md5 = "a24a8fab7c2ad2c65e945b1b35d94e3b";
325 364 };
326 365 meta = {
327 366 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
328 367 };
329 368 };
330 369 mock = super.buildPythonPackage {
331 370 name = "mock-1.0.1";
332 371 buildInputs = with self; [];
333 372 doCheck = false;
334 373 propagatedBuildInputs = with self; [];
335 374 src = fetchurl {
336 375 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
337 376 md5 = "869f08d003c289a97c1a6610faf5e913";
338 377 };
339 378 meta = {
340 379 license = [ pkgs.lib.licenses.bsdOriginal ];
341 380 };
342 381 };
343 382 msgpack-python = super.buildPythonPackage {
344 383 name = "msgpack-python-0.4.8";
345 384 buildInputs = with self; [];
346 385 doCheck = false;
347 386 propagatedBuildInputs = with self; [];
348 387 src = fetchurl {
349 388 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
350 389 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
351 390 };
352 391 meta = {
353 392 license = [ pkgs.lib.licenses.asl20 ];
354 393 };
355 394 };
356 395 pathlib2 = super.buildPythonPackage {
357 name = "pathlib2-2.1.0";
396 name = "pathlib2-2.3.0";
358 397 buildInputs = with self; [];
359 398 doCheck = false;
360 propagatedBuildInputs = with self; [six];
399 propagatedBuildInputs = with self; [six scandir];
361 400 src = fetchurl {
362 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
363 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
401 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
402 md5 = "89c90409d11fd5947966b6a30a47d18c";
364 403 };
365 404 meta = {
366 405 license = [ pkgs.lib.licenses.mit ];
367 406 };
368 407 };
369 408 pexpect = super.buildPythonPackage {
370 409 name = "pexpect-4.2.1";
371 410 buildInputs = with self; [];
372 411 doCheck = false;
373 412 propagatedBuildInputs = with self; [ptyprocess];
374 413 src = fetchurl {
375 414 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
376 415 md5 = "3694410001a99dff83f0b500a1ca1c95";
377 416 };
378 417 meta = {
379 418 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
380 419 };
381 420 };
382 421 pickleshare = super.buildPythonPackage {
383 422 name = "pickleshare-0.7.4";
384 423 buildInputs = with self; [];
385 424 doCheck = false;
386 425 propagatedBuildInputs = with self; [pathlib2];
387 426 src = fetchurl {
388 427 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
389 428 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
390 429 };
391 430 meta = {
392 431 license = [ pkgs.lib.licenses.mit ];
393 432 };
394 433 };
434 plaster = super.buildPythonPackage {
435 name = "plaster-0.5";
436 buildInputs = with self; [];
437 doCheck = false;
438 propagatedBuildInputs = with self; [setuptools];
439 src = fetchurl {
440 url = "https://pypi.python.org/packages/99/b3/d7ca1fe31d2b56dba68a238721fda6820770f9c2a3de17a582d4b5b2edcc/plaster-0.5.tar.gz";
441 md5 = "c59345a67a860cfcaa1bd6a81451399d";
442 };
443 meta = {
444 license = [ pkgs.lib.licenses.mit ];
445 };
446 };
447 plaster-pastedeploy = super.buildPythonPackage {
448 name = "plaster-pastedeploy-0.4.1";
449 buildInputs = with self; [];
450 doCheck = false;
451 propagatedBuildInputs = with self; [PasteDeploy plaster];
452 src = fetchurl {
453 url = "https://pypi.python.org/packages/9d/6e/f8be01ed41c94e6c54ac97cf2eb142a702aae0c8cce31c846f785e525b40/plaster_pastedeploy-0.4.1.tar.gz";
454 md5 = "f48d5344b922e56c4978eebf1cd2e0d3";
455 };
456 meta = {
457 license = [ pkgs.lib.licenses.mit ];
458 };
459 };
395 460 prompt-toolkit = super.buildPythonPackage {
396 name = "prompt-toolkit-1.0.14";
461 name = "prompt-toolkit-1.0.15";
397 462 buildInputs = with self; [];
398 463 doCheck = false;
399 464 propagatedBuildInputs = with self; [six wcwidth];
400 465 src = fetchurl {
401 url = "https://pypi.python.org/packages/55/56/8c39509b614bda53e638b7500f12577d663ac1b868aef53426fc6a26c3f5/prompt_toolkit-1.0.14.tar.gz";
402 md5 = "f24061ae133ed32c6b764e92bd48c496";
466 url = "https://pypi.python.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
467 md5 = "8fe70295006dbc8afedd43e5eba99032";
403 468 };
404 469 meta = {
405 470 license = [ pkgs.lib.licenses.bsdOriginal ];
406 471 };
407 472 };
408 473 ptyprocess = super.buildPythonPackage {
409 name = "ptyprocess-0.5.1";
474 name = "ptyprocess-0.5.2";
410 475 buildInputs = with self; [];
411 476 doCheck = false;
412 477 propagatedBuildInputs = with self; [];
413 478 src = fetchurl {
414 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
415 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
479 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
480 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
416 481 };
417 482 meta = {
418 483 license = [ ];
419 484 };
420 485 };
421 486 py = super.buildPythonPackage {
422 name = "py-1.4.31";
487 name = "py-1.4.34";
423 488 buildInputs = with self; [];
424 489 doCheck = false;
425 490 propagatedBuildInputs = with self; [];
426 491 src = fetchurl {
427 url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz";
428 md5 = "5d2c63c56dc3f2115ec35c066ecd582b";
492 url = "https://pypi.python.org/packages/68/35/58572278f1c097b403879c1e9369069633d1cbad5239b9057944bb764782/py-1.4.34.tar.gz";
493 md5 = "d9c3d8f734b0819ff48e355d77bf1730";
429 494 };
430 495 meta = {
431 496 license = [ pkgs.lib.licenses.mit ];
432 497 };
433 498 };
434 499 pygments = super.buildPythonPackage {
435 500 name = "pygments-2.2.0";
436 501 buildInputs = with self; [];
437 502 doCheck = false;
438 503 propagatedBuildInputs = with self; [];
439 504 src = fetchurl {
440 505 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
441 506 md5 = "13037baca42f16917cbd5ad2fab50844";
442 507 };
443 508 meta = {
444 509 license = [ pkgs.lib.licenses.bsdOriginal ];
445 510 };
446 511 };
447 512 pyramid = super.buildPythonPackage {
448 name = "pyramid-1.7.4";
513 name = "pyramid-1.9.1";
449 514 buildInputs = with self; [];
450 515 doCheck = false;
451 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
516 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
452 517 src = fetchurl {
453 url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz";
454 md5 = "6ef1dfdcff9136d04490410757c4c446";
518 url = "https://pypi.python.org/packages/9a/57/73447be9e7d0512d601e3f0a1fb9d7d1efb941911f49efdfe036d2826507/pyramid-1.9.1.tar.gz";
519 md5 = "0163e19c58c2d12976a3b6fdb57e052d";
455 520 };
456 521 meta = {
457 522 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
458 523 };
459 524 };
460 525 pyramid-jinja2 = super.buildPythonPackage {
461 526 name = "pyramid-jinja2-2.5";
462 527 buildInputs = with self; [];
463 528 doCheck = false;
464 529 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
465 530 src = fetchurl {
466 531 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
467 532 md5 = "07cb6547204ac5e6f0b22a954ccee928";
468 533 };
469 534 meta = {
470 535 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
471 536 };
472 537 };
473 538 pyramid-mako = super.buildPythonPackage {
474 539 name = "pyramid-mako-1.0.2";
475 540 buildInputs = with self; [];
476 541 doCheck = false;
477 542 propagatedBuildInputs = with self; [pyramid Mako];
478 543 src = fetchurl {
479 544 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
480 545 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
481 546 };
482 547 meta = {
483 548 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
484 549 };
485 550 };
486 551 pytest = super.buildPythonPackage {
487 name = "pytest-3.0.5";
552 name = "pytest-3.1.2";
488 553 buildInputs = with self; [];
489 554 doCheck = false;
490 propagatedBuildInputs = with self; [py];
555 propagatedBuildInputs = with self; [py setuptools];
491 556 src = fetchurl {
492 url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz";
493 md5 = "cefd527b59332688bf5db4a10aa8a7cb";
557 url = "https://pypi.python.org/packages/72/2b/2d3155e01f45a5a04427857352ee88220ee39550b2bc078f9db3190aea46/pytest-3.1.2.tar.gz";
558 md5 = "c4d179f89043cc925e1c169d03128e02";
494 559 };
495 560 meta = {
496 561 license = [ pkgs.lib.licenses.mit ];
497 562 };
498 563 };
499 564 pytest-catchlog = super.buildPythonPackage {
500 565 name = "pytest-catchlog-1.2.2";
501 566 buildInputs = with self; [];
502 567 doCheck = false;
503 568 propagatedBuildInputs = with self; [py pytest];
504 569 src = fetchurl {
505 570 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
506 571 md5 = "09d890c54c7456c818102b7ff8c182c8";
507 572 };
508 573 meta = {
509 574 license = [ pkgs.lib.licenses.mit ];
510 575 };
511 576 };
512 577 pytest-cov = super.buildPythonPackage {
513 name = "pytest-cov-2.4.0";
578 name = "pytest-cov-2.5.1";
514 579 buildInputs = with self; [];
515 580 doCheck = false;
516 581 propagatedBuildInputs = with self; [pytest coverage];
517 582 src = fetchurl {
518 url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz";
519 md5 = "2fda09677d232acc99ec1b3c5831e33f";
583 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
520 585 };
521 586 meta = {
522 587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
523 588 };
524 589 };
525 590 pytest-profiling = super.buildPythonPackage {
526 name = "pytest-profiling-1.2.2";
591 name = "pytest-profiling-1.2.6";
527 592 buildInputs = with self; [];
528 593 doCheck = false;
529 594 propagatedBuildInputs = with self; [six pytest gprof2dot];
530 595 src = fetchurl {
531 url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz";
532 md5 = "0a16d7dda2d23b91e9730fa4558cf728";
596 url = "https://pypi.python.org/packages/f9/0d/df67fb9ce16c2cef201693da956321b1bccfbf9a4ead39748b9f9d1d74cb/pytest-profiling-1.2.6.tar.gz";
597 md5 = "50eb4c66c3762a2f1a49669bedc0b894";
533 598 };
534 599 meta = {
535 600 license = [ pkgs.lib.licenses.mit ];
536 601 };
537 602 };
538 603 pytest-runner = super.buildPythonPackage {
539 name = "pytest-runner-2.9";
604 name = "pytest-runner-2.11.1";
540 605 buildInputs = with self; [];
541 606 doCheck = false;
542 607 propagatedBuildInputs = with self; [];
543 608 src = fetchurl {
544 url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz";
545 md5 = "2212a2e34404b0960b2fdc2c469247b2";
609 url = "https://pypi.python.org/packages/9e/4d/08889e5e27a9f5d6096b9ad257f4dea1faabb03c5ded8f665ead448f5d8a/pytest-runner-2.11.1.tar.gz";
610 md5 = "bdb73eb18eca2727944a2dcf963c5a81";
546 611 };
547 612 meta = {
548 613 license = [ pkgs.lib.licenses.mit ];
549 614 };
550 615 };
551 616 pytest-sugar = super.buildPythonPackage {
552 name = "pytest-sugar-0.7.1";
617 name = "pytest-sugar-0.8.0";
553 618 buildInputs = with self; [];
554 619 doCheck = false;
555 620 propagatedBuildInputs = with self; [pytest termcolor];
556 621 src = fetchurl {
557 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
558 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
622 url = "https://pypi.python.org/packages/a5/b0/b2773dee078f17773a5bf2dfad49b0be57b6354bbd84bbefe4313e509d87/pytest-sugar-0.8.0.tar.gz";
623 md5 = "8cafbdad648068e0e44b8fc5f9faae42";
559 624 };
560 625 meta = {
561 626 license = [ pkgs.lib.licenses.bsdOriginal ];
562 627 };
563 628 };
564 629 pytest-timeout = super.buildPythonPackage {
565 630 name = "pytest-timeout-1.2.0";
566 631 buildInputs = with self; [];
567 632 doCheck = false;
568 633 propagatedBuildInputs = with self; [pytest];
569 634 src = fetchurl {
570 635 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
571 636 md5 = "83607d91aa163562c7ee835da57d061d";
572 637 };
573 638 meta = {
574 639 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
575 640 };
576 641 };
577 642 repoze.lru = super.buildPythonPackage {
578 643 name = "repoze.lru-0.6";
579 644 buildInputs = with self; [];
580 645 doCheck = false;
581 646 propagatedBuildInputs = with self; [];
582 647 src = fetchurl {
583 648 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
584 649 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
585 650 };
586 651 meta = {
587 652 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
588 653 };
589 654 };
590 655 rhodecode-vcsserver = super.buildPythonPackage {
591 name = "rhodecode-vcsserver-4.9.1";
656 name = "rhodecode-vcsserver-4.10.0";
592 657 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
593 658 doCheck = true;
594 659 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
595 660 src = ./.;
596 661 meta = {
597 662 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
598 663 };
599 664 };
665 scandir = super.buildPythonPackage {
666 name = "scandir-1.5";
667 buildInputs = with self; [];
668 doCheck = false;
669 propagatedBuildInputs = with self; [];
670 src = fetchurl {
671 url = "https://pypi.python.org/packages/bd/f4/3143e0289faf0883228017dbc6387a66d0b468df646645e29e1eb89ea10e/scandir-1.5.tar.gz";
672 md5 = "a2713043de681bba6b084be42e7a8a44";
673 };
674 meta = {
675 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
676 };
677 };
600 678 setuptools = super.buildPythonPackage {
601 679 name = "setuptools-30.1.0";
602 680 buildInputs = with self; [];
603 681 doCheck = false;
604 682 propagatedBuildInputs = with self; [];
605 683 src = fetchurl {
606 684 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
607 685 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
608 686 };
609 687 meta = {
610 688 license = [ pkgs.lib.licenses.mit ];
611 689 };
612 690 };
613 691 simplegeneric = super.buildPythonPackage {
614 692 name = "simplegeneric-0.8.1";
615 693 buildInputs = with self; [];
616 694 doCheck = false;
617 695 propagatedBuildInputs = with self; [];
618 696 src = fetchurl {
619 697 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
620 698 md5 = "f9c1fab00fd981be588fc32759f474e3";
621 699 };
622 700 meta = {
623 701 license = [ pkgs.lib.licenses.zpt21 ];
624 702 };
625 703 };
626 704 simplejson = super.buildPythonPackage {
627 name = "simplejson-3.7.2";
705 name = "simplejson-3.11.1";
628 706 buildInputs = with self; [];
629 707 doCheck = false;
630 708 propagatedBuildInputs = with self; [];
631 709 src = fetchurl {
632 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
633 md5 = "a5fc7d05d4cb38492285553def5d4b46";
710 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
711 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
634 712 };
635 713 meta = {
636 714 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
637 715 };
638 716 };
639 717 six = super.buildPythonPackage {
640 718 name = "six-1.9.0";
641 719 buildInputs = with self; [];
642 720 doCheck = false;
643 721 propagatedBuildInputs = with self; [];
644 722 src = fetchurl {
645 723 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
646 724 md5 = "476881ef4012262dfc8adc645ee786c4";
647 725 };
648 726 meta = {
649 727 license = [ pkgs.lib.licenses.mit ];
650 728 };
651 729 };
652 730 subprocess32 = super.buildPythonPackage {
653 name = "subprocess32-3.2.6";
731 name = "subprocess32-3.2.7";
654 732 buildInputs = with self; [];
655 733 doCheck = false;
656 734 propagatedBuildInputs = with self; [];
657 735 src = fetchurl {
658 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
659 md5 = "754c5ab9f533e764f931136974b618f1";
736 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
737 md5 = "824c801e479d3e916879aae3e9c15e16";
660 738 };
661 739 meta = {
662 740 license = [ pkgs.lib.licenses.psfl ];
663 741 };
664 742 };
665 743 subvertpy = super.buildPythonPackage {
666 744 name = "subvertpy-0.9.3";
667 745 buildInputs = with self; [];
668 746 doCheck = false;
669 747 propagatedBuildInputs = with self; [];
670 748 src = fetchurl {
671 749 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
672 750 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
673 751 };
674 752 meta = {
675 753 license = [ pkgs.lib.licenses.lgpl21Plus ];
676 754 };
677 755 };
678 756 termcolor = super.buildPythonPackage {
679 757 name = "termcolor-1.1.0";
680 758 buildInputs = with self; [];
681 759 doCheck = false;
682 760 propagatedBuildInputs = with self; [];
683 761 src = fetchurl {
684 762 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
685 763 md5 = "043e89644f8909d462fbbfa511c768df";
686 764 };
687 765 meta = {
688 766 license = [ pkgs.lib.licenses.mit ];
689 767 };
690 768 };
691 769 traitlets = super.buildPythonPackage {
692 770 name = "traitlets-4.3.2";
693 771 buildInputs = with self; [];
694 772 doCheck = false;
695 773 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
696 774 src = fetchurl {
697 775 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
698 776 md5 = "3068663f2f38fd939a9eb3a500ccc154";
699 777 };
700 778 meta = {
701 779 license = [ pkgs.lib.licenses.bsdOriginal ];
702 780 };
703 781 };
704 782 translationstring = super.buildPythonPackage {
705 783 name = "translationstring-1.3";
706 784 buildInputs = with self; [];
707 785 doCheck = false;
708 786 propagatedBuildInputs = with self; [];
709 787 src = fetchurl {
710 788 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
711 789 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
712 790 };
713 791 meta = {
714 792 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
715 793 };
716 794 };
717 795 venusian = super.buildPythonPackage {
718 name = "venusian-1.0";
796 name = "venusian-1.1.0";
719 797 buildInputs = with self; [];
720 798 doCheck = false;
721 799 propagatedBuildInputs = with self; [];
722 800 src = fetchurl {
723 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
724 md5 = "dccf2eafb7113759d60c86faf5538756";
801 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
802 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
725 803 };
726 804 meta = {
727 805 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
728 806 };
729 807 };
730 808 waitress = super.buildPythonPackage {
731 name = "waitress-1.0.1";
809 name = "waitress-1.0.2";
732 810 buildInputs = with self; [];
733 811 doCheck = false;
734 812 propagatedBuildInputs = with self; [];
735 813 src = fetchurl {
736 url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz";
737 md5 = "dda92358a7569669086155923a46e57c";
814 url = "https://pypi.python.org/packages/cd/f4/400d00863afa1e03618e31fd7e2092479a71b8c9718b00eb1eeb603746c6/waitress-1.0.2.tar.gz";
815 md5 = "b968f39e95d609f6194c6e50425d4bb7";
738 816 };
739 817 meta = {
740 818 license = [ pkgs.lib.licenses.zpt21 ];
741 819 };
742 820 };
743 821 wcwidth = super.buildPythonPackage {
744 822 name = "wcwidth-0.1.7";
745 823 buildInputs = with self; [];
746 824 doCheck = false;
747 825 propagatedBuildInputs = with self; [];
748 826 src = fetchurl {
749 827 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
750 828 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
751 829 };
752 830 meta = {
753 831 license = [ pkgs.lib.licenses.mit ];
754 832 };
755 833 };
756 834 wheel = super.buildPythonPackage {
757 835 name = "wheel-0.29.0";
758 836 buildInputs = with self; [];
759 837 doCheck = false;
760 838 propagatedBuildInputs = with self; [];
761 839 src = fetchurl {
762 840 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
763 841 md5 = "555a67e4507cedee23a0deb9651e452f";
764 842 };
765 843 meta = {
766 844 license = [ pkgs.lib.licenses.mit ];
767 845 };
768 846 };
769 847 zope.deprecation = super.buildPythonPackage {
770 848 name = "zope.deprecation-4.1.2";
771 849 buildInputs = with self; [];
772 850 doCheck = false;
773 851 propagatedBuildInputs = with self; [setuptools];
774 852 src = fetchurl {
775 853 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
776 854 md5 = "e9a663ded58f4f9f7881beb56cae2782";
777 855 };
778 856 meta = {
779 857 license = [ pkgs.lib.licenses.zpt21 ];
780 858 };
781 859 };
782 860 zope.interface = super.buildPythonPackage {
783 861 name = "zope.interface-4.1.3";
784 862 buildInputs = with self; [];
785 863 doCheck = false;
786 864 propagatedBuildInputs = with self; [setuptools];
787 865 src = fetchurl {
788 866 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
789 867 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
790 868 };
791 869 meta = {
792 870 license = [ pkgs.lib.licenses.zpt21 ];
793 871 };
794 872 };
795 873
796 874 ### Test requirements
797 875
798 876
799 877 }
@@ -1,40 +1,41 b''
1 1 ## core
2 2 setuptools==30.1.0
3 3
4 Beaker==1.7.0
4 Beaker==1.9.0
5 5 configobj==5.0.6
6 6 decorator==4.0.11
7 7 dulwich==0.13.0
8 hgsubversion==1.8.6
8 hgsubversion==1.8.7
9 9 hg-evolve==6.6.0
10 10 infrae.cache==1.0.1
11 11 mercurial==4.2.3
12 12 msgpack-python==0.4.8
13 13 pyramid-jinja2==2.5
14 pyramid==1.7.4
14 pyramid==1.9.1
15 15 pyramid-mako==1.0.2
16 16 repoze.lru==0.6
17 simplejson==3.7.2
18 subprocess32==3.2.6
17 simplejson==3.11.1
18 subprocess32==3.2.7
19 19
20 20 # Custom subvertpy that is not available on pypi.
21 21 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
22 22
23 23 six==1.9.0
24 24 translationstring==1.3
25 WebOb==1.3.1
25 WebOb==1.7.3
26 26 wheel==0.29.0
27 27 zope.deprecation==4.1.2
28 28 zope.interface==4.1.3
29 29
30 ## http servers
31 gevent==1.2.2
32 greenlet==0.4.12
33 gunicorn==19.7.1
34 waitress==1.0.2
35
30 36 ## debug
31 ipdb==0.10.1
37 ipdb==0.10.3
32 38 ipython==5.1.0
33 # http servers
34 gevent==1.1.2
35 greenlet==0.4.10
36 gunicorn==19.6.0
37 waitress==1.0.1
38 39
39 40 ## test related requirements
40 41 -r requirements_test.txt
@@ -1,15 +1,15 b''
1 1 # test related requirements
2 pytest==3.0.5
3 py==1.4.31
4 pytest-cov==2.4.0
5 pytest-sugar==0.7.1
6 pytest-runner==2.9.0
2 pytest==3.1.2
3 py==1.4.34
4 pytest-cov==2.5.1
5 pytest-sugar==0.8.0
6 pytest-runner==2.11.1
7 7 pytest-catchlog==1.2.2
8 pytest-profiling==1.2.2
8 pytest-profiling==1.2.6
9 9 gprof2dot==2016.10.13
10 10 pytest-timeout==1.2.0
11 11
12 12 mock==1.0.1
13 WebTest==1.4.3
13 WebTest==2.0.27
14 14 cov-core==1.15.0
15 15 coverage==3.7.1
@@ -1,1 +1,1 b''
1 4.9.1 No newline at end of file
1 4.10.0 No newline at end of file
@@ -1,34 +1,54 b''
1 1 """
2 2 Implementation of :class:`EchoApp`.
3 3
4 4 This WSGI application will just echo back the data which it recieves.
5 5 """
6 6
7 7 import logging
8 8
9 9
10 10 log = logging.getLogger(__name__)
11 11
12 12
13 13 class EchoApp(object):
14 14
15 15 def __init__(self, repo_path, repo_name, config):
16 16 self._repo_path = repo_path
17 17 log.info("EchoApp initialized for %s", repo_path)
18 18
19 19 def __call__(self, environ, start_response):
20 20 log.debug("EchoApp called for %s", self._repo_path)
21 21 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
22 22 environ['wsgi.input'].read()
23 23 status = '200 OK'
24 headers = []
24 headers = [('Content-Type', 'text/plain')]
25 25 start_response(status, headers)
26 26 return ["ECHO"]
27 27
28 28
29 class EchoAppStream(object):
30
31 def __init__(self, repo_path, repo_name, config):
32 self._repo_path = repo_path
33 log.info("EchoApp initialized for %s", repo_path)
34
35 def __call__(self, environ, start_response):
36 log.debug("EchoApp called for %s", self._repo_path)
37 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
38 environ['wsgi.input'].read()
39 status = '200 OK'
40 headers = [('Content-Type', 'text/plain')]
41 start_response(status, headers)
42
43 def generator():
44 for _ in xrange(1000000):
45 yield "ECHO"
46 return generator()
47
48
29 49 def create_app():
30 50 """
31 51 Allows to run this app directly in a WSGI server.
32 52 """
33 53 stub_config = {}
34 54 return EchoApp('stub_path', 'stub_name', stub_config)
@@ -1,644 +1,645 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import logging
19 19 import os
20 20 import posixpath as vcspath
21 21 import re
22 22 import stat
23 23 import urllib
24 24 import urllib2
25 25 from functools import wraps
26 26
27 27 from dulwich import index, objects
28 28 from dulwich.client import HttpGitClient, LocalGitClient
29 29 from dulwich.errors import (
30 30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 31 MissingCommitError, ObjectMissing, HangupException,
32 32 UnexpectedCommandError)
33 33 from dulwich.repo import Repo as DulwichRepo, Tag
34 34 from dulwich.server import update_server_info
35 35
36 36 from vcsserver import exceptions, settings, subprocessio
37 37 from vcsserver.utils import safe_str
38 38 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 39 from vcsserver.hgcompat import (
40 40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 41 from vcsserver.git_lfs.lib import LFSOidStore
42 42
43 43 DIR_STAT = stat.S_IFDIR
44 44 FILE_MODE = stat.S_IFMT
45 45 GIT_LINK = objects.S_IFGITLINK
46 46
47 47 log = logging.getLogger(__name__)
48 48
49 49
50 50 def reraise_safe_exceptions(func):
51 51 """Converts Dulwich exceptions to something neutral."""
52 52 @wraps(func)
53 53 def wrapper(*args, **kwargs):
54 54 try:
55 55 return func(*args, **kwargs)
56 56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 57 ObjectMissing) as e:
58 58 raise exceptions.LookupException(e.message)
59 59 except (HangupException, UnexpectedCommandError) as e:
60 60 raise exceptions.VcsException(e.message)
61 61 except Exception as e:
62 62 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 63 # (KeyError on empty repos), we cannot track this and catch all
64 64 # exceptions, it's an exceptions from other handlers
65 65 #if not hasattr(e, '_vcs_kind'):
66 66 #log.exception("Unhandled exception in git remote call")
67 67 #raise_from_original(exceptions.UnhandledException)
68 68 raise
69 69 return wrapper
70 70
71 71
72 72 class Repo(DulwichRepo):
73 73 """
74 74 A wrapper for dulwich Repo class.
75 75
76 76 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 77 "Too many open files" error. We need to close all opened file descriptors
78 78 once the repo object is destroyed.
79 79
80 80 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 81 to 0.12.0 +
82 82 """
83 83 def __del__(self):
84 84 if hasattr(self, 'object_store'):
85 85 self.close()
86 86
87 87
88 88 class GitFactory(RepoFactory):
89 89
90 90 def _create_repo(self, wire, create):
91 91 repo_path = str_to_dulwich(wire['path'])
92 92 return Repo(repo_path)
93 93
94 94
95 95 class GitRemote(object):
96 96
97 97 def __init__(self, factory):
98 98 self._factory = factory
99 99
100 100 self._bulk_methods = {
101 101 "author": self.commit_attribute,
102 102 "date": self.get_object_attrs,
103 103 "message": self.commit_attribute,
104 104 "parents": self.commit_attribute,
105 105 "_commit": self.revision,
106 106 }
107 107
108 108 def _wire_to_config(self, wire):
109 109 if 'config' in wire:
110 110 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
111 111 return {}
112 112
113 113 def _assign_ref(self, wire, ref, commit_id):
114 114 repo = self._factory.repo(wire)
115 115 repo[ref] = commit_id
116 116
117 117 @reraise_safe_exceptions
118 118 def add_object(self, wire, content):
119 119 repo = self._factory.repo(wire)
120 120 blob = objects.Blob()
121 121 blob.set_raw_string(content)
122 122 repo.object_store.add_object(blob)
123 123 return blob.id
124 124
125 125 @reraise_safe_exceptions
126 126 def assert_correct_path(self, wire):
127 127 try:
128 128 self._factory.repo(wire)
129 129 except NotGitRepository as e:
130 130 # Exception can contain unicode which we convert
131 131 raise exceptions.AbortException(repr(e))
132 132
133 133 @reraise_safe_exceptions
134 134 def bare(self, wire):
135 135 repo = self._factory.repo(wire)
136 136 return repo.bare
137 137
138 138 @reraise_safe_exceptions
139 139 def blob_as_pretty_string(self, wire, sha):
140 140 repo = self._factory.repo(wire)
141 141 return repo[sha].as_pretty_string()
142 142
143 143 @reraise_safe_exceptions
144 144 def blob_raw_length(self, wire, sha):
145 145 repo = self._factory.repo(wire)
146 146 blob = repo[sha]
147 147 return blob.raw_length()
148 148
149 149 def _parse_lfs_pointer(self, raw_content):
150 150
151 151 spec_string = 'version https://git-lfs.github.com/spec'
152 152 if raw_content and raw_content.startswith(spec_string):
153 153 pattern = re.compile(r"""
154 154 (?:\n)?
155 155 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
156 156 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
157 157 ^size[ ](?P<oid_size>[0-9]+)\n
158 158 (?:\n)?
159 159 """, re.VERBOSE | re.MULTILINE)
160 160 match = pattern.match(raw_content)
161 161 if match:
162 162 return match.groupdict()
163 163
164 164 return {}
165 165
166 166 @reraise_safe_exceptions
167 167 def is_large_file(self, wire, sha):
168 168 repo = self._factory.repo(wire)
169 169 blob = repo[sha]
170 170 return self._parse_lfs_pointer(blob.as_raw_string())
171 171
172 172 @reraise_safe_exceptions
173 173 def in_largefiles_store(self, wire, oid):
174 174 repo = self._factory.repo(wire)
175 175 conf = self._wire_to_config(wire)
176 176
177 177 store_location = conf.get('vcs_git_lfs_store_location')
178 178 if store_location:
179 179 repo_name = repo.path
180 180 store = LFSOidStore(
181 181 oid=oid, repo=repo_name, store_location=store_location)
182 182 return store.has_oid()
183 183
184 184 return False
185 185
186 186 @reraise_safe_exceptions
187 187 def store_path(self, wire, oid):
188 188 repo = self._factory.repo(wire)
189 189 conf = self._wire_to_config(wire)
190 190
191 191 store_location = conf.get('vcs_git_lfs_store_location')
192 192 if store_location:
193 193 repo_name = repo.path
194 194 store = LFSOidStore(
195 195 oid=oid, repo=repo_name, store_location=store_location)
196 196 return store.oid_path
197 197 raise ValueError('Unable to fetch oid with path {}'.format(oid))
198 198
199 199 @reraise_safe_exceptions
200 200 def bulk_request(self, wire, rev, pre_load):
201 201 result = {}
202 202 for attr in pre_load:
203 203 try:
204 204 method = self._bulk_methods[attr]
205 205 args = [wire, rev]
206 206 if attr == "date":
207 207 args.extend(["commit_time", "commit_timezone"])
208 208 elif attr in ["author", "message", "parents"]:
209 209 args.append(attr)
210 210 result[attr] = method(*args)
211 211 except KeyError:
212 212 raise exceptions.VcsException(
213 213 "Unknown bulk attribute: %s" % attr)
214 214 return result
215 215
216 216 def _build_opener(self, url):
217 217 handlers = []
218 218 url_obj = url_parser(url)
219 219 _, authinfo = url_obj.authinfo()
220 220
221 221 if authinfo:
222 222 # create a password manager
223 223 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
224 224 passmgr.add_password(*authinfo)
225 225
226 226 handlers.extend((httpbasicauthhandler(passmgr),
227 227 httpdigestauthhandler(passmgr)))
228 228
229 229 return urllib2.build_opener(*handlers)
230 230
231 231 @reraise_safe_exceptions
232 232 def check_url(self, url, config):
233 233 url_obj = url_parser(url)
234 234 test_uri, _ = url_obj.authinfo()
235 235 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
236 236 url_obj.query = obfuscate_qs(url_obj.query)
237 237 cleaned_uri = str(url_obj)
238 238 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
239 239
240 240 if not test_uri.endswith('info/refs'):
241 241 test_uri = test_uri.rstrip('/') + '/info/refs'
242 242
243 243 o = self._build_opener(url)
244 244 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
245 245
246 246 q = {"service": 'git-upload-pack'}
247 247 qs = '?%s' % urllib.urlencode(q)
248 248 cu = "%s%s" % (test_uri, qs)
249 249 req = urllib2.Request(cu, None, {})
250 250
251 251 try:
252 252 log.debug("Trying to open URL %s", cleaned_uri)
253 253 resp = o.open(req)
254 254 if resp.code != 200:
255 255 raise exceptions.URLError('Return Code is not 200')
256 256 except Exception as e:
257 257 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
258 258 # means it cannot be cloned
259 259 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
260 260
261 261 # now detect if it's proper git repo
262 262 gitdata = resp.read()
263 263 if 'service=git-upload-pack' in gitdata:
264 264 pass
265 265 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
266 266 # old style git can return some other format !
267 267 pass
268 268 else:
269 269 raise exceptions.URLError(
270 270 "url [%s] does not look like an git" % (cleaned_uri,))
271 271
272 272 return True
273 273
274 274 @reraise_safe_exceptions
275 275 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
276 276 remote_refs = self.fetch(wire, url, apply_refs=False)
277 277 repo = self._factory.repo(wire)
278 278 if isinstance(valid_refs, list):
279 279 valid_refs = tuple(valid_refs)
280 280
281 281 for k in remote_refs:
282 282 # only parse heads/tags and skip so called deferred tags
283 283 if k.startswith(valid_refs) and not k.endswith(deferred):
284 284 repo[k] = remote_refs[k]
285 285
286 286 if update_after_clone:
287 287 # we want to checkout HEAD
288 288 repo["HEAD"] = remote_refs["HEAD"]
289 289 index.build_index_from_tree(repo.path, repo.index_path(),
290 290 repo.object_store, repo["HEAD"].tree)
291 291
292 292 # TODO: this is quite complex, check if that can be simplified
293 293 @reraise_safe_exceptions
294 294 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
295 295 repo = self._factory.repo(wire)
296 296 object_store = repo.object_store
297 297
298 298 # Create tree and populates it with blobs
299 299 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
300 300
301 301 for node in updated:
302 302 # Compute subdirs if needed
303 303 dirpath, nodename = vcspath.split(node['path'])
304 304 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
305 305 parent = commit_tree
306 306 ancestors = [('', parent)]
307 307
308 308 # Tries to dig for the deepest existing tree
309 309 while dirnames:
310 310 curdir = dirnames.pop(0)
311 311 try:
312 312 dir_id = parent[curdir][1]
313 313 except KeyError:
314 314 # put curdir back into dirnames and stops
315 315 dirnames.insert(0, curdir)
316 316 break
317 317 else:
318 318 # If found, updates parent
319 319 parent = repo[dir_id]
320 320 ancestors.append((curdir, parent))
321 321 # Now parent is deepest existing tree and we need to create
322 322 # subtrees for dirnames (in reverse order)
323 323 # [this only applies for nodes from added]
324 324 new_trees = []
325 325
326 326 blob = objects.Blob.from_string(node['content'])
327 327
328 328 if dirnames:
329 329 # If there are trees which should be created we need to build
330 330 # them now (in reverse order)
331 331 reversed_dirnames = list(reversed(dirnames))
332 332 curtree = objects.Tree()
333 333 curtree[node['node_path']] = node['mode'], blob.id
334 334 new_trees.append(curtree)
335 335 for dirname in reversed_dirnames[:-1]:
336 336 newtree = objects.Tree()
337 337 newtree[dirname] = (DIR_STAT, curtree.id)
338 338 new_trees.append(newtree)
339 339 curtree = newtree
340 340 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
341 341 else:
342 342 parent.add(
343 343 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
344 344
345 345 new_trees.append(parent)
346 346 # Update ancestors
347 347 reversed_ancestors = reversed(
348 348 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
349 349 for parent, tree, path in reversed_ancestors:
350 350 parent[path] = (DIR_STAT, tree.id)
351 351 object_store.add_object(tree)
352 352
353 353 object_store.add_object(blob)
354 354 for tree in new_trees:
355 355 object_store.add_object(tree)
356 356
357 357 for node_path in removed:
358 358 paths = node_path.split('/')
359 359 tree = commit_tree
360 360 trees = [tree]
361 361 # Traverse deep into the forest...
362 362 for path in paths:
363 363 try:
364 364 obj = repo[tree[path][1]]
365 365 if isinstance(obj, objects.Tree):
366 366 trees.append(obj)
367 367 tree = obj
368 368 except KeyError:
369 369 break
370 370 # Cut down the blob and all rotten trees on the way back...
371 371 for path, tree in reversed(zip(paths, trees)):
372 372 del tree[path]
373 373 if tree:
374 374 # This tree still has elements - don't remove it or any
375 375 # of it's parents
376 376 break
377 377
378 378 object_store.add_object(commit_tree)
379 379
380 380 # Create commit
381 381 commit = objects.Commit()
382 382 commit.tree = commit_tree.id
383 383 for k, v in commit_data.iteritems():
384 384 setattr(commit, k, v)
385 385 object_store.add_object(commit)
386 386
387 387 ref = 'refs/heads/%s' % branch
388 388 repo.refs[ref] = commit.id
389 389
390 390 return commit.id
391 391
392 392 @reraise_safe_exceptions
393 393 def fetch(self, wire, url, apply_refs=True, refs=None):
394 394 if url != 'default' and '://' not in url:
395 395 client = LocalGitClient(url)
396 396 else:
397 397 url_obj = url_parser(url)
398 398 o = self._build_opener(url)
399 399 url, _ = url_obj.authinfo()
400 400 client = HttpGitClient(base_url=url, opener=o)
401 401 repo = self._factory.repo(wire)
402 402
403 403 determine_wants = repo.object_store.determine_wants_all
404 404 if refs:
405 405 def determine_wants_requested(references):
406 406 return [references[r] for r in references if r in refs]
407 407 determine_wants = determine_wants_requested
408 408
409 409 try:
410 410 remote_refs = client.fetch(
411 411 path=url, target=repo, determine_wants=determine_wants)
412 412 except NotGitRepository as e:
413 413 log.warning(
414 414 'Trying to fetch from "%s" failed, not a Git repository.', url)
415 415 # Exception can contain unicode which we convert
416 416 raise exceptions.AbortException(repr(e))
417 417
418 418 # mikhail: client.fetch() returns all the remote refs, but fetches only
419 419 # refs filtered by `determine_wants` function. We need to filter result
420 420 # as well
421 421 if refs:
422 422 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
423 423
424 424 if apply_refs:
425 425 # TODO: johbo: Needs proper test coverage with a git repository
426 426 # that contains a tag object, so that we would end up with
427 427 # a peeled ref at this point.
428 428 PEELED_REF_MARKER = '^{}'
429 429 for k in remote_refs:
430 430 if k.endswith(PEELED_REF_MARKER):
431 431 log.info("Skipping peeled reference %s", k)
432 432 continue
433 433 repo[k] = remote_refs[k]
434 434
435 435 if refs:
436 436 # mikhail: explicitly set the head to the last ref.
437 437 repo['HEAD'] = remote_refs[refs[-1]]
438 438
439 439 # TODO: mikhail: should we return remote_refs here to be
440 440 # consistent?
441 441 else:
442 442 return remote_refs
443 443
444 444 @reraise_safe_exceptions
445 445 def get_remote_refs(self, wire, url):
446 446 repo = Repo(url)
447 447 return repo.get_refs()
448 448
449 449 @reraise_safe_exceptions
450 450 def get_description(self, wire):
451 451 repo = self._factory.repo(wire)
452 452 return repo.get_description()
453 453
454 454 @reraise_safe_exceptions
455 455 def get_file_history(self, wire, file_path, commit_id, limit):
456 456 repo = self._factory.repo(wire)
457 457 include = [commit_id]
458 458 paths = [file_path]
459 459
460 460 walker = repo.get_walker(include, paths=paths, max_entries=limit)
461 461 return [x.commit.id for x in walker]
462 462
463 463 @reraise_safe_exceptions
464 464 def get_missing_revs(self, wire, rev1, rev2, path2):
465 465 repo = self._factory.repo(wire)
466 466 LocalGitClient(thin_packs=False).fetch(path2, repo)
467 467
468 468 wire_remote = wire.copy()
469 469 wire_remote['path'] = path2
470 470 repo_remote = self._factory.repo(wire_remote)
471 471 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
472 472
473 473 revs = [
474 474 x.commit.id
475 475 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
476 476 return revs
477 477
478 478 @reraise_safe_exceptions
479 479 def get_object(self, wire, sha):
480 480 repo = self._factory.repo(wire)
481 481 obj = repo.get_object(sha)
482 482 commit_id = obj.id
483 483
484 484 if isinstance(obj, Tag):
485 485 commit_id = obj.object[1]
486 486
487 487 return {
488 488 'id': obj.id,
489 489 'type': obj.type_name,
490 490 'commit_id': commit_id
491 491 }
492 492
493 493 @reraise_safe_exceptions
494 494 def get_object_attrs(self, wire, sha, *attrs):
495 495 repo = self._factory.repo(wire)
496 496 obj = repo.get_object(sha)
497 497 return list(getattr(obj, a) for a in attrs)
498 498
499 499 @reraise_safe_exceptions
500 500 def get_refs(self, wire):
501 501 repo = self._factory.repo(wire)
502 502 result = {}
503 503 for ref, sha in repo.refs.as_dict().items():
504 504 peeled_sha = repo.get_peeled(ref)
505 505 result[ref] = peeled_sha
506 506 return result
507 507
508 508 @reraise_safe_exceptions
509 509 def get_refs_path(self, wire):
510 510 repo = self._factory.repo(wire)
511 511 return repo.refs.path
512 512
513 513 @reraise_safe_exceptions
514 514 def head(self, wire):
515 515 repo = self._factory.repo(wire)
516 516 return repo.head()
517 517
518 518 @reraise_safe_exceptions
519 519 def init(self, wire):
520 520 repo_path = str_to_dulwich(wire['path'])
521 521 self.repo = Repo.init(repo_path)
522 522
523 523 @reraise_safe_exceptions
524 524 def init_bare(self, wire):
525 525 repo_path = str_to_dulwich(wire['path'])
526 526 self.repo = Repo.init_bare(repo_path)
527 527
528 528 @reraise_safe_exceptions
529 529 def revision(self, wire, rev):
530 530 repo = self._factory.repo(wire)
531 531 obj = repo[rev]
532 532 obj_data = {
533 533 'id': obj.id,
534 534 }
535 535 try:
536 536 obj_data['tree'] = obj.tree
537 537 except AttributeError:
538 538 pass
539 539 return obj_data
540 540
541 541 @reraise_safe_exceptions
542 542 def commit_attribute(self, wire, rev, attr):
543 543 repo = self._factory.repo(wire)
544 544 obj = repo[rev]
545 545 return getattr(obj, attr)
546 546
547 547 @reraise_safe_exceptions
548 548 def set_refs(self, wire, key, value):
549 549 repo = self._factory.repo(wire)
550 550 repo.refs[key] = value
551 551
552 552 @reraise_safe_exceptions
553 553 def remove_ref(self, wire, key):
554 554 repo = self._factory.repo(wire)
555 555 del repo.refs[key]
556 556
557 557 @reraise_safe_exceptions
558 558 def tree_changes(self, wire, source_id, target_id):
559 559 repo = self._factory.repo(wire)
560 560 source = repo[source_id].tree if source_id else None
561 561 target = repo[target_id].tree
562 562 result = repo.object_store.tree_changes(source, target)
563 563 return list(result)
564 564
565 565 @reraise_safe_exceptions
566 566 def tree_items(self, wire, tree_id):
567 567 repo = self._factory.repo(wire)
568 568 tree = repo[tree_id]
569 569
570 570 result = []
571 571 for item in tree.iteritems():
572 572 item_sha = item.sha
573 573 item_mode = item.mode
574 574
575 575 if FILE_MODE(item_mode) == GIT_LINK:
576 576 item_type = "link"
577 577 else:
578 578 item_type = repo[item_sha].type_name
579 579
580 580 result.append((item.path, item_mode, item_sha, item_type))
581 581 return result
582 582
583 583 @reraise_safe_exceptions
584 584 def update_server_info(self, wire):
585 585 repo = self._factory.repo(wire)
586 586 update_server_info(repo)
587 587
588 588 @reraise_safe_exceptions
589 589 def discover_git_version(self):
590 590 stdout, _ = self.run_git_command(
591 591 {}, ['--version'], _bare=True, _safe=True)
592 592 prefix = 'git version'
593 593 if stdout.startswith(prefix):
594 594 stdout = stdout[len(prefix):]
595 595 return stdout.strip()
596 596
597 597 @reraise_safe_exceptions
598 598 def run_git_command(self, wire, cmd, **opts):
599 599 path = wire.get('path', None)
600 600
601 601 if path and os.path.isdir(path):
602 602 opts['cwd'] = path
603 603
604 604 if '_bare' in opts:
605 605 _copts = []
606 606 del opts['_bare']
607 607 else:
608 608 _copts = ['-c', 'core.quotepath=false', ]
609 609 safe_call = False
610 610 if '_safe' in opts:
611 611 # no exc on failure
612 612 del opts['_safe']
613 613 safe_call = True
614 614
615 615 gitenv = os.environ.copy()
616 616 gitenv.update(opts.pop('extra_env', {}))
617 617 # need to clean fix GIT_DIR !
618 618 if 'GIT_DIR' in gitenv:
619 619 del gitenv['GIT_DIR']
620 620 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
621 621
622 622 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
623 623
624 624 try:
625 625 _opts = {'env': gitenv, 'shell': False}
626 626 _opts.update(opts)
627 627 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
628 628
629 629 return ''.join(p), ''.join(p.error)
630 630 except (EnvironmentError, OSError) as err:
631 cmd = ' '.join(cmd) # human friendly CMD
631 632 tb_err = ("Couldn't run git command (%s).\n"
632 633 "Original error was:%s\n" % (cmd, err))
633 634 log.exception(tb_err)
634 635 if safe_call:
635 636 return '', err
636 637 else:
637 638 raise exceptions.VcsException(tb_err)
638 639
639 640
640 641 def str_to_dulwich(value):
641 642 """
642 643 Dulwich 0.10.1a requires `unicode` objects to be passed in.
643 644 """
644 645 return value.decode(settings.WIRE_ENCODING)
@@ -1,746 +1,749 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import io
19 19 import logging
20 20 import stat
21 21 import urllib
22 22 import urllib2
23 23
24 24 from hgext import largefiles, rebase
25 25 from hgext.strip import strip as hgext_strip
26 26 from mercurial import commands
27 27 from mercurial import unionrepo
28 28 from mercurial import verify
29 29
30 30 from vcsserver import exceptions
31 31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 32 from vcsserver.hgcompat import (
33 33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
37 InterventionRequired, RequirementError)
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 RepoLookupError, InterventionRequired, RequirementError)
38 38
39 39 log = logging.getLogger(__name__)
40 40
41 41
42 42 def make_ui_from_config(repo_config):
43 43 baseui = ui.ui()
44 44
45 45 # clean the baseui object
46 46 baseui._ocfg = hgconfig.config()
47 47 baseui._ucfg = hgconfig.config()
48 48 baseui._tcfg = hgconfig.config()
49 49
50 50 for section, option, value in repo_config:
51 51 baseui.setconfig(section, option, value)
52 52
53 53 # make our hgweb quiet so it doesn't print output
54 54 baseui.setconfig('ui', 'quiet', 'true')
55 55
56 56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 57 # signal in a non-main thread, thus generating a ValueError.
58 58 baseui.setconfig('worker', 'numcpus', 1)
59 59
60 60 # If there is no config for the largefiles extension, we explicitly disable
61 61 # it here. This overrides settings from repositories hgrc file. Recent
62 62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 63 # repo.
64 64 if not baseui.hasconfig('extensions', 'largefiles'):
65 65 log.debug('Explicitly disable largefiles extension for repo.')
66 66 baseui.setconfig('extensions', 'largefiles', '!')
67 67
68 68 return baseui
69 69
70 70
71 71 def reraise_safe_exceptions(func):
72 72 """Decorator for converting mercurial exceptions to something neutral."""
73 73 def wrapper(*args, **kwargs):
74 74 try:
75 75 return func(*args, **kwargs)
76 76 except (Abort, InterventionRequired):
77 77 raise_from_original(exceptions.AbortException)
78 78 except RepoLookupError:
79 79 raise_from_original(exceptions.LookupException)
80 80 except RequirementError:
81 81 raise_from_original(exceptions.RequirementException)
82 82 except RepoError:
83 83 raise_from_original(exceptions.VcsException)
84 84 except LookupError:
85 85 raise_from_original(exceptions.LookupException)
86 86 except Exception as e:
87 87 if not hasattr(e, '_vcs_kind'):
88 88 log.exception("Unhandled exception in hg remote call")
89 89 raise_from_original(exceptions.UnhandledException)
90 90 raise
91 91 return wrapper
92 92
93 93
94 94 class MercurialFactory(RepoFactory):
95 95
96 96 def _create_config(self, config, hooks=True):
97 97 if not hooks:
98 98 hooks_to_clean = frozenset((
99 99 'changegroup.repo_size', 'preoutgoing.pre_pull',
100 100 'outgoing.pull_logger', 'prechangegroup.pre_push'))
101 101 new_config = []
102 102 for section, option, value in config:
103 103 if section == 'hooks' and option in hooks_to_clean:
104 104 continue
105 105 new_config.append((section, option, value))
106 106 config = new_config
107 107
108 108 baseui = make_ui_from_config(config)
109 109 return baseui
110 110
111 111 def _create_repo(self, wire, create):
112 112 baseui = self._create_config(wire["config"])
113 113 return localrepository(baseui, wire["path"], create)
114 114
115 115
116 116 class HgRemote(object):
117 117
118 118 def __init__(self, factory):
119 119 self._factory = factory
120 120
121 121 self._bulk_methods = {
122 122 "affected_files": self.ctx_files,
123 123 "author": self.ctx_user,
124 124 "branch": self.ctx_branch,
125 125 "children": self.ctx_children,
126 126 "date": self.ctx_date,
127 127 "message": self.ctx_description,
128 128 "parents": self.ctx_parents,
129 129 "status": self.ctx_status,
130 "obsolete": self.ctx_obsolete,
131 "phase": self.ctx_phase,
132 "hidden": self.ctx_hidden,
130 133 "_file_paths": self.ctx_list,
131 134 }
132 135
133 136 @reraise_safe_exceptions
134 137 def discover_hg_version(self):
135 138 from mercurial import util
136 139 return util.version()
137 140
138 141 @reraise_safe_exceptions
139 142 def archive_repo(self, archive_path, mtime, file_info, kind):
140 143 if kind == "tgz":
141 144 archiver = archival.tarit(archive_path, mtime, "gz")
142 145 elif kind == "tbz2":
143 146 archiver = archival.tarit(archive_path, mtime, "bz2")
144 147 elif kind == 'zip':
145 148 archiver = archival.zipit(archive_path, mtime)
146 149 else:
147 150 raise exceptions.ArchiveException(
148 151 'Remote does not support: "%s".' % kind)
149 152
150 153 for f_path, f_mode, f_is_link, f_content in file_info:
151 154 archiver.addfile(f_path, f_mode, f_is_link, f_content)
152 155 archiver.done()
153 156
154 157 @reraise_safe_exceptions
155 158 def bookmarks(self, wire):
156 159 repo = self._factory.repo(wire)
157 160 return dict(repo._bookmarks)
158 161
159 162 @reraise_safe_exceptions
160 163 def branches(self, wire, normal, closed):
161 164 repo = self._factory.repo(wire)
162 165 iter_branches = repo.branchmap().iterbranches()
163 166 bt = {}
164 167 for branch_name, _heads, tip, is_closed in iter_branches:
165 168 if normal and not is_closed:
166 169 bt[branch_name] = tip
167 170 if closed and is_closed:
168 171 bt[branch_name] = tip
169 172
170 173 return bt
171 174
172 175 @reraise_safe_exceptions
173 176 def bulk_request(self, wire, rev, pre_load):
174 177 result = {}
175 178 for attr in pre_load:
176 179 try:
177 180 method = self._bulk_methods[attr]
178 181 result[attr] = method(wire, rev)
179 182 except KeyError:
180 183 raise exceptions.VcsException(
181 184 'Unknown bulk attribute: "%s"' % attr)
182 185 return result
183 186
184 187 @reraise_safe_exceptions
185 188 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
186 189 baseui = self._factory._create_config(wire["config"], hooks=hooks)
187 190 clone(baseui, source, dest, noupdate=not update_after_clone)
188 191
189 192 @reraise_safe_exceptions
190 193 def commitctx(
191 194 self, wire, message, parents, commit_time, commit_timezone,
192 195 user, files, extra, removed, updated):
193 196
194 197 def _filectxfn(_repo, memctx, path):
195 198 """
196 199 Marks given path as added/changed/removed in a given _repo. This is
197 200 for internal mercurial commit function.
198 201 """
199 202
200 203 # check if this path is removed
201 204 if path in removed:
202 205 # returning None is a way to mark node for removal
203 206 return None
204 207
205 208 # check if this path is added
206 209 for node in updated:
207 210 if node['path'] == path:
208 211 return memfilectx(
209 212 _repo,
210 213 path=node['path'],
211 214 data=node['content'],
212 215 islink=False,
213 216 isexec=bool(node['mode'] & stat.S_IXUSR),
214 217 copied=False,
215 218 memctx=memctx)
216 219
217 220 raise exceptions.AbortException(
218 221 "Given path haven't been marked as added, "
219 222 "changed or removed (%s)" % path)
220 223
221 224 repo = self._factory.repo(wire)
222 225
223 226 commit_ctx = memctx(
224 227 repo=repo,
225 228 parents=parents,
226 229 text=message,
227 230 files=files,
228 231 filectxfn=_filectxfn,
229 232 user=user,
230 233 date=(commit_time, commit_timezone),
231 234 extra=extra)
232 235
233 236 n = repo.commitctx(commit_ctx)
234 237 new_id = hex(n)
235 238
236 239 return new_id
237 240
238 241 @reraise_safe_exceptions
239 242 def ctx_branch(self, wire, revision):
240 243 repo = self._factory.repo(wire)
241 244 ctx = repo[revision]
242 245 return ctx.branch()
243 246
244 247 @reraise_safe_exceptions
245 248 def ctx_children(self, wire, revision):
246 249 repo = self._factory.repo(wire)
247 250 ctx = repo[revision]
248 251 return [child.rev() for child in ctx.children()]
249 252
250 253 @reraise_safe_exceptions
251 254 def ctx_date(self, wire, revision):
252 255 repo = self._factory.repo(wire)
253 256 ctx = repo[revision]
254 257 return ctx.date()
255 258
256 259 @reraise_safe_exceptions
257 260 def ctx_description(self, wire, revision):
258 261 repo = self._factory.repo(wire)
259 262 ctx = repo[revision]
260 263 return ctx.description()
261 264
262 265 @reraise_safe_exceptions
263 266 def ctx_diff(
264 267 self, wire, revision, git=True, ignore_whitespace=True, context=3):
265 268 repo = self._factory.repo(wire)
266 269 ctx = repo[revision]
267 270 result = ctx.diff(
268 271 git=git, ignore_whitespace=ignore_whitespace, context=context)
269 272 return list(result)
270 273
271 274 @reraise_safe_exceptions
272 275 def ctx_files(self, wire, revision):
273 276 repo = self._factory.repo(wire)
274 277 ctx = repo[revision]
275 278 return ctx.files()
276 279
277 280 @reraise_safe_exceptions
278 281 def ctx_list(self, path, revision):
279 282 repo = self._factory.repo(path)
280 283 ctx = repo[revision]
281 284 return list(ctx)
282 285
283 286 @reraise_safe_exceptions
284 287 def ctx_parents(self, wire, revision):
285 288 repo = self._factory.repo(wire)
286 289 ctx = repo[revision]
287 290 return [parent.rev() for parent in ctx.parents()]
288 291
289 292 @reraise_safe_exceptions
290 293 def ctx_phase(self, wire, revision):
291 294 repo = self._factory.repo(wire)
292 295 ctx = repo[revision]
293 296 # public=0, draft=1, secret=3
294 297 return ctx.phase()
295 298
296 299 @reraise_safe_exceptions
297 300 def ctx_obsolete(self, wire, revision):
298 301 repo = self._factory.repo(wire)
299 302 ctx = repo[revision]
300 303 return ctx.obsolete()
301 304
302 305 @reraise_safe_exceptions
303 306 def ctx_hidden(self, wire, revision):
304 307 repo = self._factory.repo(wire)
305 308 ctx = repo[revision]
306 309 return ctx.hidden()
307 310
308 311 @reraise_safe_exceptions
309 312 def ctx_substate(self, wire, revision):
310 313 repo = self._factory.repo(wire)
311 314 ctx = repo[revision]
312 315 return ctx.substate
313 316
314 317 @reraise_safe_exceptions
315 318 def ctx_status(self, wire, revision):
316 319 repo = self._factory.repo(wire)
317 320 ctx = repo[revision]
318 321 status = repo[ctx.p1().node()].status(other=ctx.node())
319 322 # object of status (odd, custom named tuple in mercurial) is not
320 323 # correctly serializable, we make it a list, as the underling
321 324 # API expects this to be a list
322 325 return list(status)
323 326
324 327 @reraise_safe_exceptions
325 328 def ctx_user(self, wire, revision):
326 329 repo = self._factory.repo(wire)
327 330 ctx = repo[revision]
328 331 return ctx.user()
329 332
330 333 @reraise_safe_exceptions
331 334 def check_url(self, url, config):
332 335 _proto = None
333 336 if '+' in url[:url.find('://')]:
334 337 _proto = url[0:url.find('+')]
335 338 url = url[url.find('+') + 1:]
336 339 handlers = []
337 340 url_obj = url_parser(url)
338 341 test_uri, authinfo = url_obj.authinfo()
339 342 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
340 343 url_obj.query = obfuscate_qs(url_obj.query)
341 344
342 345 cleaned_uri = str(url_obj)
343 346 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
344 347
345 348 if authinfo:
346 349 # create a password manager
347 350 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
348 351 passmgr.add_password(*authinfo)
349 352
350 353 handlers.extend((httpbasicauthhandler(passmgr),
351 354 httpdigestauthhandler(passmgr)))
352 355
353 356 o = urllib2.build_opener(*handlers)
354 357 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
355 358 ('Accept', 'application/mercurial-0.1')]
356 359
357 360 q = {"cmd": 'between'}
358 361 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
359 362 qs = '?%s' % urllib.urlencode(q)
360 363 cu = "%s%s" % (test_uri, qs)
361 364 req = urllib2.Request(cu, None, {})
362 365
363 366 try:
364 367 log.debug("Trying to open URL %s", cleaned_uri)
365 368 resp = o.open(req)
366 369 if resp.code != 200:
367 370 raise exceptions.URLError('Return Code is not 200')
368 371 except Exception as e:
369 372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
370 373 # means it cannot be cloned
371 374 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
372 375
373 376 # now check if it's a proper hg repo, but don't do it for svn
374 377 try:
375 378 if _proto == 'svn':
376 379 pass
377 380 else:
378 381 # check for pure hg repos
379 382 log.debug(
380 383 "Verifying if URL is a Mercurial repository: %s",
381 384 cleaned_uri)
382 385 httppeer(make_ui_from_config(config), url).lookup('tip')
383 386 except Exception as e:
384 387 log.warning("URL is not a valid Mercurial repository: %s",
385 388 cleaned_uri)
386 389 raise exceptions.URLError(
387 390 "url [%s] does not look like an hg repo org_exc: %s"
388 391 % (cleaned_uri, e))
389 392
390 393 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
391 394 return True
392 395
393 396 @reraise_safe_exceptions
394 397 def diff(
395 398 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
396 399 context):
397 400 repo = self._factory.repo(wire)
398 401
399 402 if file_filter:
400 403 match_filter = match(file_filter[0], '', [file_filter[1]])
401 404 else:
402 405 match_filter = file_filter
403 406 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
404 407
405 408 try:
406 409 return "".join(patch.diff(
407 410 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
408 411 except RepoLookupError:
409 412 raise exceptions.LookupException()
410 413
411 414 @reraise_safe_exceptions
412 415 def file_history(self, wire, revision, path, limit):
413 416 repo = self._factory.repo(wire)
414 417
415 418 ctx = repo[revision]
416 419 fctx = ctx.filectx(path)
417 420
418 421 def history_iter():
419 422 limit_rev = fctx.rev()
420 423 for obj in reversed(list(fctx.filelog())):
421 424 obj = fctx.filectx(obj)
422 425 if limit_rev >= obj.rev():
423 426 yield obj
424 427
425 428 history = []
426 429 for cnt, obj in enumerate(history_iter()):
427 430 if limit and cnt >= limit:
428 431 break
429 432 history.append(hex(obj.node()))
430 433
431 434 return [x for x in history]
432 435
433 436 @reraise_safe_exceptions
434 437 def file_history_untill(self, wire, revision, path, limit):
435 438 repo = self._factory.repo(wire)
436 439 ctx = repo[revision]
437 440 fctx = ctx.filectx(path)
438 441
439 442 file_log = list(fctx.filelog())
440 443 if limit:
441 444 # Limit to the last n items
442 445 file_log = file_log[-limit:]
443 446
444 447 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
445 448
446 449 @reraise_safe_exceptions
447 450 def fctx_annotate(self, wire, revision, path):
448 451 repo = self._factory.repo(wire)
449 452 ctx = repo[revision]
450 453 fctx = ctx.filectx(path)
451 454
452 455 result = []
453 456 for i, annotate_data in enumerate(fctx.annotate()):
454 457 ln_no = i + 1
455 458 node_info, content = annotate_data
456 459 sha = hex(node_info[0].node())
457 460 result.append((ln_no, sha, content))
458 461 return result
459 462
460 463 @reraise_safe_exceptions
461 464 def fctx_data(self, wire, revision, path):
462 465 repo = self._factory.repo(wire)
463 466 ctx = repo[revision]
464 467 fctx = ctx.filectx(path)
465 468 return fctx.data()
466 469
467 470 @reraise_safe_exceptions
468 471 def fctx_flags(self, wire, revision, path):
469 472 repo = self._factory.repo(wire)
470 473 ctx = repo[revision]
471 474 fctx = ctx.filectx(path)
472 475 return fctx.flags()
473 476
474 477 @reraise_safe_exceptions
475 478 def fctx_size(self, wire, revision, path):
476 479 repo = self._factory.repo(wire)
477 480 ctx = repo[revision]
478 481 fctx = ctx.filectx(path)
479 482 return fctx.size()
480 483
481 484 @reraise_safe_exceptions
482 485 def get_all_commit_ids(self, wire, name):
483 486 repo = self._factory.repo(wire)
484 487 revs = repo.filtered(name).changelog.index
485 488 return map(lambda x: hex(x[7]), revs)[:-1]
486 489
487 490 @reraise_safe_exceptions
488 491 def get_config_value(self, wire, section, name, untrusted=False):
489 492 repo = self._factory.repo(wire)
490 493 return repo.ui.config(section, name, untrusted=untrusted)
491 494
492 495 @reraise_safe_exceptions
493 496 def get_config_bool(self, wire, section, name, untrusted=False):
494 497 repo = self._factory.repo(wire)
495 498 return repo.ui.configbool(section, name, untrusted=untrusted)
496 499
497 500 @reraise_safe_exceptions
498 501 def get_config_list(self, wire, section, name, untrusted=False):
499 502 repo = self._factory.repo(wire)
500 503 return repo.ui.configlist(section, name, untrusted=untrusted)
501 504
502 505 @reraise_safe_exceptions
503 506 def is_large_file(self, wire, path):
504 507 return largefiles.lfutil.isstandin(path)
505 508
506 509 @reraise_safe_exceptions
507 510 def in_largefiles_store(self, wire, sha):
508 511 repo = self._factory.repo(wire)
509 512 return largefiles.lfutil.instore(repo, sha)
510 513
511 514 @reraise_safe_exceptions
512 515 def in_user_cache(self, wire, sha):
513 516 repo = self._factory.repo(wire)
514 517 return largefiles.lfutil.inusercache(repo.ui, sha)
515 518
516 519 @reraise_safe_exceptions
517 520 def store_path(self, wire, sha):
518 521 repo = self._factory.repo(wire)
519 522 return largefiles.lfutil.storepath(repo, sha)
520 523
521 524 @reraise_safe_exceptions
522 525 def link(self, wire, sha, path):
523 526 repo = self._factory.repo(wire)
524 527 largefiles.lfutil.link(
525 528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
526 529
527 530 @reraise_safe_exceptions
528 531 def localrepository(self, wire, create=False):
529 532 self._factory.repo(wire, create=create)
530 533
531 534 @reraise_safe_exceptions
532 535 def lookup(self, wire, revision, both):
533 536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
534 537 if isinstance(revision, float):
535 538 revision = long(revision)
536 539 repo = self._factory.repo(wire)
537 540 try:
538 541 ctx = repo[revision]
539 542 except RepoLookupError:
540 543 raise exceptions.LookupException(revision)
541 544 except LookupError as e:
542 545 raise exceptions.LookupException(e.name)
543 546
544 547 if not both:
545 548 return ctx.hex()
546 549
547 550 ctx = repo[ctx.hex()]
548 551 return ctx.hex(), ctx.rev()
549 552
550 553 @reraise_safe_exceptions
551 554 def pull(self, wire, url, commit_ids=None):
552 555 repo = self._factory.repo(wire)
553 556 remote = peer(repo, {}, url)
554 557 if commit_ids:
555 558 commit_ids = [bin(commit_id) for commit_id in commit_ids]
556 559
557 560 return exchange.pull(
558 561 repo, remote, heads=commit_ids, force=None).cgresult
559 562
560 563 @reraise_safe_exceptions
561 564 def revision(self, wire, rev):
562 565 repo = self._factory.repo(wire)
563 566 ctx = repo[rev]
564 567 return ctx.rev()
565 568
566 569 @reraise_safe_exceptions
567 570 def rev_range(self, wire, filter):
568 571 repo = self._factory.repo(wire)
569 572 revisions = [rev for rev in revrange(repo, filter)]
570 573 return revisions
571 574
572 575 @reraise_safe_exceptions
573 576 def rev_range_hash(self, wire, node):
574 577 repo = self._factory.repo(wire)
575 578
576 579 def get_revs(repo, rev_opt):
577 580 if rev_opt:
578 581 revs = revrange(repo, rev_opt)
579 582 if len(revs) == 0:
580 583 return (nullrev, nullrev)
581 584 return max(revs), min(revs)
582 585 else:
583 586 return len(repo) - 1, 0
584 587
585 588 stop, start = get_revs(repo, [node + ':'])
586 589 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
587 590 return revs
588 591
589 592 @reraise_safe_exceptions
590 593 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
591 594 other_path = kwargs.pop('other_path', None)
592 595
593 596 # case when we want to compare two independent repositories
594 597 if other_path and other_path != wire["path"]:
595 598 baseui = self._factory._create_config(wire["config"])
596 599 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
597 600 else:
598 601 repo = self._factory.repo(wire)
599 602 return list(repo.revs(rev_spec, *args))
600 603
601 604 @reraise_safe_exceptions
602 605 def strip(self, wire, revision, update, backup):
603 606 repo = self._factory.repo(wire)
604 607 ctx = repo[revision]
605 608 hgext_strip(
606 609 repo.baseui, repo, ctx.node(), update=update, backup=backup)
607 610
608 611 @reraise_safe_exceptions
609 612 def verify(self, wire,):
610 613 repo = self._factory.repo(wire)
611 614 baseui = self._factory._create_config(wire['config'])
612 615 baseui.setconfig('ui', 'quiet', 'false')
613 616 output = io.BytesIO()
614 617
615 618 def write(data, **unused_kwargs):
616 619 output.write(data)
617 620 baseui.write = write
618 621
619 622 repo.ui = baseui
620 623 verify.verify(repo)
621 624 return output.getvalue()
622 625
623 626 @reraise_safe_exceptions
624 627 def tag(self, wire, name, revision, message, local, user,
625 628 tag_time, tag_timezone):
626 629 repo = self._factory.repo(wire)
627 630 ctx = repo[revision]
628 631 node = ctx.node()
629 632
630 633 date = (tag_time, tag_timezone)
631 634 try:
632 repo.tag(name, node, message, local, user, date)
635 hg_tag.tag(repo, name, node, message, local, user, date)
633 636 except Abort as e:
634 637 log.exception("Tag operation aborted")
635 638 # Exception can contain unicode which we convert
636 639 raise exceptions.AbortException(repr(e))
637 640
638 641 @reraise_safe_exceptions
639 642 def tags(self, wire):
640 643 repo = self._factory.repo(wire)
641 644 return repo.tags()
642 645
643 646 @reraise_safe_exceptions
644 647 def update(self, wire, node=None, clean=False):
645 648 repo = self._factory.repo(wire)
646 649 baseui = self._factory._create_config(wire['config'])
647 650 commands.update(baseui, repo, node=node, clean=clean)
648 651
649 652 @reraise_safe_exceptions
650 653 def identify(self, wire):
651 654 repo = self._factory.repo(wire)
652 655 baseui = self._factory._create_config(wire['config'])
653 656 output = io.BytesIO()
654 657 baseui.write = output.write
655 658 # This is required to get a full node id
656 659 baseui.debugflag = True
657 660 commands.identify(baseui, repo, id=True)
658 661
659 662 return output.getvalue()
660 663
661 664 @reraise_safe_exceptions
662 665 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
663 666 hooks=True):
664 667 repo = self._factory.repo(wire)
665 668 baseui = self._factory._create_config(wire['config'], hooks=hooks)
666 669
667 670 # Mercurial internally has a lot of logic that checks ONLY if
668 671 # option is defined, we just pass those if they are defined then
669 672 opts = {}
670 673 if bookmark:
671 674 opts['bookmark'] = bookmark
672 675 if branch:
673 676 opts['branch'] = branch
674 677 if revision:
675 678 opts['rev'] = revision
676 679
677 680 commands.pull(baseui, repo, source, **opts)
678 681
679 682 @reraise_safe_exceptions
680 683 def heads(self, wire, branch=None):
681 684 repo = self._factory.repo(wire)
682 685 baseui = self._factory._create_config(wire['config'])
683 686 output = io.BytesIO()
684 687
685 688 def write(data, **unused_kwargs):
686 689 output.write(data)
687 690
688 691 baseui.write = write
689 692 if branch:
690 693 args = [branch]
691 694 else:
692 695 args = []
693 696 commands.heads(baseui, repo, template='{node} ', *args)
694 697
695 698 return output.getvalue()
696 699
697 700 @reraise_safe_exceptions
698 701 def ancestor(self, wire, revision1, revision2):
699 702 repo = self._factory.repo(wire)
700 703 changelog = repo.changelog
701 704 lookup = repo.lookup
702 705 a = changelog.ancestor(lookup(revision1), lookup(revision2))
703 706 return hex(a)
704 707
705 708 @reraise_safe_exceptions
706 709 def push(self, wire, revisions, dest_path, hooks=True,
707 710 push_branches=False):
708 711 repo = self._factory.repo(wire)
709 712 baseui = self._factory._create_config(wire['config'], hooks=hooks)
710 713 commands.push(baseui, repo, dest=dest_path, rev=revisions,
711 714 new_branch=push_branches)
712 715
713 716 @reraise_safe_exceptions
714 717 def merge(self, wire, revision):
715 718 repo = self._factory.repo(wire)
716 719 baseui = self._factory._create_config(wire['config'])
717 720 repo.ui.setconfig('ui', 'merge', 'internal:dump')
718 721
719 722 # In case of sub repositories are used mercurial prompts the user in
720 723 # case of merge conflicts or different sub repository sources. By
721 724 # setting the interactive flag to `False` mercurial doesn't prompt the
722 725 # used but instead uses a default value.
723 726 repo.ui.setconfig('ui', 'interactive', False)
724 727
725 728 commands.merge(baseui, repo, rev=revision)
726 729
727 730 @reraise_safe_exceptions
728 def commit(self, wire, message, username):
731 def commit(self, wire, message, username, close_branch=False):
729 732 repo = self._factory.repo(wire)
730 733 baseui = self._factory._create_config(wire['config'])
731 734 repo.ui.setconfig('ui', 'username', username)
732 commands.commit(baseui, repo, message=message)
735 commands.commit(baseui, repo, message=message, close_branch=close_branch)
733 736
734 737 @reraise_safe_exceptions
735 738 def rebase(self, wire, source=None, dest=None, abort=False):
736 739 repo = self._factory.repo(wire)
737 740 baseui = self._factory._create_config(wire['config'])
738 741 repo.ui.setconfig('ui', 'merge', 'internal:dump')
739 742 rebase.rebase(
740 743 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
741 744
742 745 @reraise_safe_exceptions
743 746 def bookmark(self, wire, bookmark, revision=None):
744 747 repo = self._factory.repo(wire)
745 748 baseui = self._factory._create_config(wire['config'])
746 749 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,62 +1,63 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 """
19 19 Mercurial libs compatibility
20 20 """
21 21
22 22 import mercurial
23 23 import mercurial.demandimport
24 24 # patch demandimport, due to bug in mercurial when it always triggers
25 25 # demandimport.enable()
26 26 mercurial.demandimport.enable = lambda *args, **kwargs: 1
27 27
28 28 from mercurial import ui
29 29 from mercurial import patch
30 30 from mercurial import config
31 31 from mercurial import extensions
32 32 from mercurial import scmutil
33 33 from mercurial import archival
34 34 from mercurial import discovery
35 35 from mercurial import unionrepo
36 36 from mercurial import localrepo
37 37 from mercurial import merge as hg_merge
38 38 from mercurial import subrepo
39 from mercurial import tags as hg_tag
39 40
40 41 from mercurial.commands import clone, nullid, pull
41 42 from mercurial.context import memctx, memfilectx
42 43 from mercurial.error import (
43 44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
44 45 RequirementError)
45 46 from mercurial.hgweb import hgweb_mod
46 47 from mercurial.localrepo import localrepository
47 48 from mercurial.match import match
48 49 from mercurial.mdiff import diffopts
49 50 from mercurial.node import bin, hex
50 51 from mercurial.encoding import tolocal
51 52 from mercurial.discovery import findcommonoutgoing
52 53 from mercurial.hg import peer
53 54 from mercurial.httppeer import httppeer
54 55 from mercurial.util import url as hg_url
55 56 from mercurial.scmutil import revrange
56 57 from mercurial.node import nullrev
57 58 from mercurial import exchange
58 59 from hgext import largefiles
59 60
60 61 # those authnadlers are patched for python 2.6.5 bug an
61 62 # infinit looping when given invalid resources
62 63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
@@ -1,426 +1,475 b''
1 1 # -*- coding: utf-8 -*-
2 2
3 3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 4 # Copyright (C) 2014-2017 RodeCode GmbH
5 5 #
6 6 # This program is free software; you can redistribute it and/or modify
7 7 # it under the terms of the GNU General Public License as published by
8 8 # the Free Software Foundation; either version 3 of the License, or
9 9 # (at your option) any later version.
10 10 #
11 11 # This program is distributed in the hope that it will be useful,
12 12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 14 # GNU General Public License for more details.
15 15 #
16 16 # You should have received a copy of the GNU General Public License
17 17 # along with this program; if not, write to the Free Software Foundation,
18 18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 19
20 20 import io
21 import os
21 22 import sys
22 23 import json
23 24 import logging
24 25 import collections
25 26 import importlib
26 27 import subprocess
27 28
28 29 from httplib import HTTPConnection
29 30
30 31
31 32 import mercurial.scmutil
32 33 import mercurial.node
33 34 import simplejson as json
34 35
35 36 from vcsserver import exceptions
36 37
37 38 log = logging.getLogger(__name__)
38 39
39 40
40 41 class HooksHttpClient(object):
41 42 connection = None
42 43
43 44 def __init__(self, hooks_uri):
44 45 self.hooks_uri = hooks_uri
45 46
46 47 def __call__(self, method, extras):
47 48 connection = HTTPConnection(self.hooks_uri)
48 49 body = self._serialize(method, extras)
49 50 connection.request('POST', '/', body)
50 51 response = connection.getresponse()
51 52 return json.loads(response.read())
52 53
53 54 def _serialize(self, hook_name, extras):
54 55 data = {
55 56 'method': hook_name,
56 57 'extras': extras
57 58 }
58 59 return json.dumps(data)
59 60
60 61
61 62 class HooksDummyClient(object):
62 63 def __init__(self, hooks_module):
63 64 self._hooks_module = importlib.import_module(hooks_module)
64 65
65 66 def __call__(self, hook_name, extras):
66 67 with self._hooks_module.Hooks() as hooks:
67 68 return getattr(hooks, hook_name)(extras)
68 69
69 70
70 71 class RemoteMessageWriter(object):
71 72 """Writer base class."""
72 73 def write(self, message):
73 74 raise NotImplementedError()
74 75
75 76
76 77 class HgMessageWriter(RemoteMessageWriter):
77 78 """Writer that knows how to send messages to mercurial clients."""
78 79
79 80 def __init__(self, ui):
80 81 self.ui = ui
81 82
82 83 def write(self, message):
83 84 # TODO: Check why the quiet flag is set by default.
84 85 old = self.ui.quiet
85 86 self.ui.quiet = False
86 87 self.ui.status(message.encode('utf-8'))
87 88 self.ui.quiet = old
88 89
89 90
90 91 class GitMessageWriter(RemoteMessageWriter):
91 92 """Writer that knows how to send messages to git clients."""
92 93
93 94 def __init__(self, stdout=None):
94 95 self.stdout = stdout or sys.stdout
95 96
96 97 def write(self, message):
97 98 self.stdout.write(message.encode('utf-8'))
98 99
99 100
100 101 def _handle_exception(result):
101 102 exception_class = result.get('exception')
102 103 exception_traceback = result.get('exception_traceback')
103 104
104 105 if exception_traceback:
105 106 log.error('Got traceback from remote call:%s', exception_traceback)
106 107
107 108 if exception_class == 'HTTPLockedRC':
108 109 raise exceptions.RepositoryLockedException(*result['exception_args'])
109 110 elif exception_class == 'RepositoryError':
110 111 raise exceptions.VcsException(*result['exception_args'])
111 112 elif exception_class:
112 113 raise Exception('Got remote exception "%s" with args "%s"' %
113 114 (exception_class, result['exception_args']))
114 115
115 116
116 117 def _get_hooks_client(extras):
117 118 if 'hooks_uri' in extras:
118 119 protocol = extras.get('hooks_protocol')
119 120 return HooksHttpClient(extras['hooks_uri'])
120 121 else:
121 122 return HooksDummyClient(extras['hooks_module'])
122 123
123 124
124 125 def _call_hook(hook_name, extras, writer):
125 126 hooks = _get_hooks_client(extras)
126 127 result = hooks(hook_name, extras)
128 log.debug('Hooks got result: %s', result)
127 129 writer.write(result['output'])
128 130 _handle_exception(result)
129 131
130 132 return result['status']
131 133
132 134
133 135 def _extras_from_ui(ui):
134 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
136 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
137 if not hook_data:
138 # maybe it's inside environ ?
139 hook_data = os.environ.get('RC_SCM_DATA')
140 extras = json.loads(hook_data)
135 141 return extras
136 142
137 143
138 def repo_size(ui, repo, **kwargs):
139 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
140
141
142 def pre_pull(ui, repo, **kwargs):
143 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
144
145
146 def post_pull(ui, repo, **kwargs):
147 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
148
149
150 144 def _rev_range_hash(repo, node):
151 145
152 146 commits = []
153 147 for rev in xrange(repo[node], len(repo)):
154 148 ctx = repo[rev]
155 149 commit_id = mercurial.node.hex(ctx.node())
156 150 branch = ctx.branch()
157 151 commits.append((commit_id, branch))
158 152
159 153 return commits
160 154
161 155
156 def repo_size(ui, repo, **kwargs):
157 extras = _extras_from_ui(ui)
158 return _call_hook('repo_size', extras, HgMessageWriter(ui))
159
160
161 def pre_pull(ui, repo, **kwargs):
162 extras = _extras_from_ui(ui)
163 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
164
165
166 def pre_pull_ssh(ui, repo, **kwargs):
167 if _extras_from_ui(ui).get('SSH'):
168 return pre_pull(ui, repo, **kwargs)
169 return 0
170
171
172 def post_pull(ui, repo, **kwargs):
173 extras = _extras_from_ui(ui)
174 return _call_hook('post_pull', extras, HgMessageWriter(ui))
175
176
177 def post_pull_ssh(ui, repo, **kwargs):
178 if _extras_from_ui(ui).get('SSH'):
179 return post_pull(ui, repo, **kwargs)
180 return 0
181
182
162 183 def pre_push(ui, repo, node=None, **kwargs):
163 184 extras = _extras_from_ui(ui)
164 185
165 186 rev_data = []
166 187 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
167 188 branches = collections.defaultdict(list)
168 189 for commit_id, branch in _rev_range_hash(repo, node):
169 190 branches[branch].append(commit_id)
170 191
171 192 for branch, commits in branches.iteritems():
172 193 old_rev = kwargs.get('node_last') or commits[0]
173 194 rev_data.append({
174 195 'old_rev': old_rev,
175 196 'new_rev': commits[-1],
176 197 'ref': '',
177 198 'type': 'branch',
178 199 'name': branch,
179 200 })
180 201
181 202 extras['commit_ids'] = rev_data
182 203 return _call_hook('pre_push', extras, HgMessageWriter(ui))
183 204
184 205
206 def pre_push_ssh(ui, repo, node=None, **kwargs):
207 if _extras_from_ui(ui).get('SSH'):
208 return pre_push(ui, repo, node, **kwargs)
209
210 return 0
211
212
213 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
214 extras = _extras_from_ui(ui)
215 if extras.get('SSH'):
216 permission = extras['SSH_PERMISSIONS']
217
218 if 'repository.write' == permission or 'repository.admin' == permission:
219 return 0
220
221 # non-zero ret code
222 return 1
223
224 return 0
225
226
185 227 def post_push(ui, repo, node, **kwargs):
186 228 extras = _extras_from_ui(ui)
187 229
188 230 commit_ids = []
189 231 branches = []
190 232 bookmarks = []
191 233 tags = []
192 234
193 235 for commit_id, branch in _rev_range_hash(repo, node):
194 236 commit_ids.append(commit_id)
195 237 if branch not in branches:
196 238 branches.append(branch)
197 239
198 240 if hasattr(ui, '_rc_pushkey_branches'):
199 241 bookmarks = ui._rc_pushkey_branches
200 242
201 243 extras['commit_ids'] = commit_ids
202 244 extras['new_refs'] = {
203 245 'branches': branches,
204 246 'bookmarks': bookmarks,
205 247 'tags': tags
206 248 }
207 249
208 250 return _call_hook('post_push', extras, HgMessageWriter(ui))
209 251
210 252
253 def post_push_ssh(ui, repo, node, **kwargs):
254 if _extras_from_ui(ui).get('SSH'):
255 return post_push(ui, repo, node, **kwargs)
256 return 0
257
258
211 259 def key_push(ui, repo, **kwargs):
212 260 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
213 261 # store new bookmarks in our UI object propagated later to post_push
214 262 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
215 263 return
216 264
265
217 266 # backward compat
218 267 log_pull_action = post_pull
219 268
220 269 # backward compat
221 270 log_push_action = post_push
222 271
223 272
224 273 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
225 274 """
226 275 Old hook name: keep here for backward compatibility.
227 276
228 277 This is only required when the installed git hooks are not upgraded.
229 278 """
230 279 pass
231 280
232 281
233 282 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
234 283 """
235 284 Old hook name: keep here for backward compatibility.
236 285
237 286 This is only required when the installed git hooks are not upgraded.
238 287 """
239 288 pass
240 289
241 290
242 291 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
243 292
244 293
245 294 def git_pre_pull(extras):
246 295 """
247 296 Pre pull hook.
248 297
249 298 :param extras: dictionary containing the keys defined in simplevcs
250 299 :type extras: dict
251 300
252 301 :return: status code of the hook. 0 for success.
253 302 :rtype: int
254 303 """
255 304 if 'pull' not in extras['hooks']:
256 305 return HookResponse(0, '')
257 306
258 307 stdout = io.BytesIO()
259 308 try:
260 309 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
261 310 except Exception as error:
262 311 status = 128
263 312 stdout.write('ERROR: %s\n' % str(error))
264 313
265 314 return HookResponse(status, stdout.getvalue())
266 315
267 316
268 317 def git_post_pull(extras):
269 318 """
270 319 Post pull hook.
271 320
272 321 :param extras: dictionary containing the keys defined in simplevcs
273 322 :type extras: dict
274 323
275 324 :return: status code of the hook. 0 for success.
276 325 :rtype: int
277 326 """
278 327 if 'pull' not in extras['hooks']:
279 328 return HookResponse(0, '')
280 329
281 330 stdout = io.BytesIO()
282 331 try:
283 332 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
284 333 except Exception as error:
285 334 status = 128
286 335 stdout.write('ERROR: %s\n' % error)
287 336
288 337 return HookResponse(status, stdout.getvalue())
289 338
290 339
291 340 def _parse_git_ref_lines(revision_lines):
292 341 rev_data = []
293 342 for revision_line in revision_lines or []:
294 343 old_rev, new_rev, ref = revision_line.strip().split(' ')
295 344 ref_data = ref.split('/', 2)
296 345 if ref_data[1] in ('tags', 'heads'):
297 346 rev_data.append({
298 347 'old_rev': old_rev,
299 348 'new_rev': new_rev,
300 349 'ref': ref,
301 350 'type': ref_data[1],
302 351 'name': ref_data[2],
303 352 })
304 353 return rev_data
305 354
306 355
307 356 def git_pre_receive(unused_repo_path, revision_lines, env):
308 357 """
309 358 Pre push hook.
310 359
311 360 :param extras: dictionary containing the keys defined in simplevcs
312 361 :type extras: dict
313 362
314 363 :return: status code of the hook. 0 for success.
315 364 :rtype: int
316 365 """
317 366 extras = json.loads(env['RC_SCM_DATA'])
318 367 rev_data = _parse_git_ref_lines(revision_lines)
319 368 if 'push' not in extras['hooks']:
320 369 return 0
321 370 extras['commit_ids'] = rev_data
322 371 return _call_hook('pre_push', extras, GitMessageWriter())
323 372
324 373
325 374 def _run_command(arguments):
326 375 """
327 376 Run the specified command and return the stdout.
328 377
329 378 :param arguments: sequence of program arguments (including the program name)
330 379 :type arguments: list[str]
331 380 """
332 381 # TODO(skreft): refactor this method and all the other similar ones.
333 382 # Probably this should be using subprocessio.
334 383 process = subprocess.Popen(
335 384 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
336 385 stdout, stderr = process.communicate()
337 386
338 387 if process.returncode != 0:
339 388 raise Exception(
340 389 'Command %s exited with exit code %s: stderr:%s' % (
341 390 arguments, process.returncode, stderr))
342 391
343 392 return stdout
344 393
345 394
346 395 def git_post_receive(unused_repo_path, revision_lines, env):
347 396 """
348 397 Post push hook.
349 398
350 399 :param extras: dictionary containing the keys defined in simplevcs
351 400 :type extras: dict
352 401
353 402 :return: status code of the hook. 0 for success.
354 403 :rtype: int
355 404 """
356 405 extras = json.loads(env['RC_SCM_DATA'])
357 406 if 'push' not in extras['hooks']:
358 407 return 0
359 408
360 409 rev_data = _parse_git_ref_lines(revision_lines)
361 410
362 411 git_revs = []
363 412
364 413 # N.B.(skreft): it is ok to just call git, as git before calling a
365 414 # subcommand sets the PATH environment variable so that it point to the
366 415 # correct version of the git executable.
367 416 empty_commit_id = '0' * 40
368 417 branches = []
369 418 tags = []
370 419 for push_ref in rev_data:
371 420 type_ = push_ref['type']
372 421
373 422 if type_ == 'heads':
374 423 if push_ref['old_rev'] == empty_commit_id:
375 424 # starting new branch case
376 425 if push_ref['name'] not in branches:
377 426 branches.append(push_ref['name'])
378 427
379 428 # Fix up head revision if needed
380 429 cmd = ['git', 'show', 'HEAD']
381 430 try:
382 431 _run_command(cmd)
383 432 except Exception:
384 433 cmd = ['git', 'symbolic-ref', 'HEAD',
385 434 'refs/heads/%s' % push_ref['name']]
386 435 print("Setting default branch to %s" % push_ref['name'])
387 436 _run_command(cmd)
388 437
389 438 cmd = ['git', 'for-each-ref', '--format=%(refname)',
390 439 'refs/heads/*']
391 440 heads = _run_command(cmd)
392 441 heads = heads.replace(push_ref['ref'], '')
393 442 heads = ' '.join(head for head in heads.splitlines() if head)
394 443 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
395 444 '--', push_ref['new_rev'], '--not', heads]
396 445 git_revs.extend(_run_command(cmd).splitlines())
397 446 elif push_ref['new_rev'] == empty_commit_id:
398 447 # delete branch case
399 448 git_revs.append('delete_branch=>%s' % push_ref['name'])
400 449 else:
401 450 if push_ref['name'] not in branches:
402 451 branches.append(push_ref['name'])
403 452
404 453 cmd = ['git', 'log',
405 454 '{old_rev}..{new_rev}'.format(**push_ref),
406 455 '--reverse', '--pretty=format:%H']
407 456 git_revs.extend(_run_command(cmd).splitlines())
408 457 elif type_ == 'tags':
409 458 if push_ref['name'] not in tags:
410 459 tags.append(push_ref['name'])
411 460 git_revs.append('tag=>%s' % push_ref['name'])
412 461
413 462 extras['commit_ids'] = git_revs
414 463 extras['new_refs'] = {
415 464 'branches': branches,
416 465 'bookmarks': [],
417 466 'tags': tags,
418 467 }
419 468
420 469 if 'repo_size' in extras['hooks']:
421 470 try:
422 471 _call_hook('repo_size', extras, GitMessageWriter())
423 472 except:
424 473 pass
425 474
426 475 return _call_hook('post_push', extras, GitMessageWriter())
@@ -1,434 +1,466 b''
1 1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 2 # Copyright (C) 2014-2017 RodeCode GmbH
3 3 #
4 4 # This program is free software; you can redistribute it and/or modify
5 5 # it under the terms of the GNU General Public License as published by
6 6 # the Free Software Foundation; either version 3 of the License, or
7 7 # (at your option) any later version.
8 8 #
9 9 # This program is distributed in the hope that it will be useful,
10 10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 12 # GNU General Public License for more details.
13 13 #
14 14 # You should have received a copy of the GNU General Public License
15 15 # along with this program; if not, write to the Free Software Foundation,
16 16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 17
18 18 import base64
19 19 import locale
20 20 import logging
21 21 import uuid
22 22 import wsgiref.util
23 23 import traceback
24 24 from itertools import chain
25 25
26 import simplejson as json
26 27 import msgpack
27 28 from beaker.cache import CacheManager
28 29 from beaker.util import parse_cache_config_options
29 30 from pyramid.config import Configurator
30 31 from pyramid.wsgi import wsgiapp
31 32
32 33 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
33 34 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
34 35 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
35 36 from vcsserver.echo_stub.echo_app import EchoApp
36 37 from vcsserver.exceptions import HTTPRepoLocked
37 38 from vcsserver.server import VcsServer
38 39
39 40 try:
40 41 from vcsserver.git import GitFactory, GitRemote
41 42 except ImportError:
42 43 GitFactory = None
43 44 GitRemote = None
44 45
45 46 try:
46 47 from vcsserver.hg import MercurialFactory, HgRemote
47 48 except ImportError:
48 49 MercurialFactory = None
49 50 HgRemote = None
50 51
51 52 try:
52 53 from vcsserver.svn import SubversionFactory, SvnRemote
53 54 except ImportError:
54 55 SubversionFactory = None
55 56 SvnRemote = None
56 57
57 58 log = logging.getLogger(__name__)
58 59
59 60
60 61 class VCS(object):
61 62 def __init__(self, locale=None, cache_config=None):
62 63 self.locale = locale
63 64 self.cache_config = cache_config
64 65 self._configure_locale()
65 66 self._initialize_cache()
66 67
67 68 if GitFactory and GitRemote:
68 69 git_repo_cache = self.cache.get_cache_region(
69 70 'git', region='repo_object')
70 71 git_factory = GitFactory(git_repo_cache)
71 72 self._git_remote = GitRemote(git_factory)
72 73 else:
73 74 log.info("Git client import failed")
74 75
75 76 if MercurialFactory and HgRemote:
76 77 hg_repo_cache = self.cache.get_cache_region(
77 78 'hg', region='repo_object')
78 79 hg_factory = MercurialFactory(hg_repo_cache)
79 80 self._hg_remote = HgRemote(hg_factory)
80 81 else:
81 82 log.info("Mercurial client import failed")
82 83
83 84 if SubversionFactory and SvnRemote:
84 85 svn_repo_cache = self.cache.get_cache_region(
85 86 'svn', region='repo_object')
86 87 svn_factory = SubversionFactory(svn_repo_cache)
87 88 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
88 89 else:
89 90 log.info("Subversion client import failed")
90 91
91 92 self._vcsserver = VcsServer()
92 93
93 94 def _initialize_cache(self):
94 95 cache_config = parse_cache_config_options(self.cache_config)
95 96 log.info('Initializing beaker cache: %s' % cache_config)
96 97 self.cache = CacheManager(**cache_config)
97 98
98 99 def _configure_locale(self):
99 100 if self.locale:
100 101 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
101 102 else:
102 103 log.info(
103 104 'Configuring locale subsystem based on environment variables')
104 105 try:
105 106 # If self.locale is the empty string, then the locale
106 107 # module will use the environment variables. See the
107 108 # documentation of the package `locale`.
108 109 locale.setlocale(locale.LC_ALL, self.locale)
109 110
110 111 language_code, encoding = locale.getlocale()
111 112 log.info(
112 113 'Locale set to language code "%s" with encoding "%s".',
113 114 language_code, encoding)
114 115 except locale.Error:
115 116 log.exception(
116 117 'Cannot set locale, not configuring the locale system')
117 118
118 119
119 120 class WsgiProxy(object):
120 121 def __init__(self, wsgi):
121 122 self.wsgi = wsgi
122 123
123 124 def __call__(self, environ, start_response):
124 125 input_data = environ['wsgi.input'].read()
125 126 input_data = msgpack.unpackb(input_data)
126 127
127 128 error = None
128 129 try:
129 130 data, status, headers = self.wsgi.handle(
130 131 input_data['environment'], input_data['input_data'],
131 132 *input_data['args'], **input_data['kwargs'])
132 133 except Exception as e:
133 134 data, status, headers = [], None, None
134 135 error = {
135 136 'message': str(e),
136 137 '_vcs_kind': getattr(e, '_vcs_kind', None)
137 138 }
138 139
139 140 start_response(200, {})
140 141 return self._iterator(error, status, headers, data)
141 142
142 143 def _iterator(self, error, status, headers, data):
143 144 initial_data = [
144 145 error,
145 146 status,
146 147 headers,
147 148 ]
148 149
149 150 for d in chain(initial_data, data):
150 151 yield msgpack.packb(d)
151 152
152 153
153 154 class HTTPApplication(object):
154 155 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
155 156
156 157 remote_wsgi = remote_wsgi
157 158 _use_echo_app = False
158 159
159 160 def __init__(self, settings=None, global_config=None):
160 161 self.config = Configurator(settings=settings)
161 162 self.global_config = global_config
162 163
163 164 locale = settings.get('locale', '') or 'en_US.UTF-8'
164 165 vcs = VCS(locale=locale, cache_config=settings)
165 166 self._remotes = {
166 167 'hg': vcs._hg_remote,
167 168 'git': vcs._git_remote,
168 169 'svn': vcs._svn_remote,
169 170 'server': vcs._vcsserver,
170 171 }
171 172 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
172 173 self._use_echo_app = True
173 174 log.warning("Using EchoApp for VCS operations.")
174 175 self.remote_wsgi = remote_wsgi_stub
175 176 self._configure_settings(settings)
176 177 self._configure()
177 178
178 179 def _configure_settings(self, app_settings):
179 180 """
180 181 Configure the settings module.
181 182 """
182 183 git_path = app_settings.get('git_path', None)
183 184 if git_path:
184 185 settings.GIT_EXECUTABLE = git_path
185 186
186 187 def _configure(self):
187 188 self.config.add_renderer(
188 189 name='msgpack',
189 190 factory=self._msgpack_renderer_factory)
190 191
191 192 self.config.add_route('service', '/_service')
192 193 self.config.add_route('status', '/status')
193 194 self.config.add_route('hg_proxy', '/proxy/hg')
194 195 self.config.add_route('git_proxy', '/proxy/git')
195 196 self.config.add_route('vcs', '/{backend}')
196 197 self.config.add_route('stream_git', '/stream/git/*repo_name')
197 198 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
198 199
199 200 self.config.add_view(
200 201 self.status_view, route_name='status', renderer='json')
201 202 self.config.add_view(
202 203 self.service_view, route_name='service', renderer='msgpack')
203 204
204 205 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
205 206 self.config.add_view(self.git_proxy(), route_name='git_proxy')
206 207 self.config.add_view(
207 208 self.vcs_view, route_name='vcs', renderer='msgpack',
208 209 custom_predicates=[self.is_vcs_view])
209 210
210 211 self.config.add_view(self.hg_stream(), route_name='stream_hg')
211 212 self.config.add_view(self.git_stream(), route_name='stream_git')
212 213
213 214 def notfound(request):
214 215 return {'status': '404 NOT FOUND'}
215 216 self.config.add_notfound_view(notfound, renderer='json')
216 217
217 218 self.config.add_view(self.handle_vcs_exception, context=Exception)
218 219
219 220 self.config.add_tween(
220 221 'vcsserver.tweens.RequestWrapperTween',
221 222 )
222 223
223 224 def wsgi_app(self):
224 225 return self.config.make_wsgi_app()
225 226
226 227 def vcs_view(self, request):
227 228 remote = self._remotes[request.matchdict['backend']]
228 229 payload = msgpack.unpackb(request.body, use_list=True)
229 230 method = payload.get('method')
230 231 params = payload.get('params')
231 232 wire = params.get('wire')
232 233 args = params.get('args')
233 234 kwargs = params.get('kwargs')
234 235 if wire:
235 236 try:
236 237 wire['context'] = uuid.UUID(wire['context'])
237 238 except KeyError:
238 239 pass
239 240 args.insert(0, wire)
240 241
241 242 log.debug('method called:%s with kwargs:%s', method, kwargs)
242 243 try:
243 244 resp = getattr(remote, method)(*args, **kwargs)
244 245 except Exception as e:
245 246 tb_info = traceback.format_exc()
246 247
247 248 type_ = e.__class__.__name__
248 249 if type_ not in self.ALLOWED_EXCEPTIONS:
249 250 type_ = None
250 251
251 252 resp = {
252 253 'id': payload.get('id'),
253 254 'error': {
254 255 'message': e.message,
255 256 'traceback': tb_info,
256 257 'type': type_
257 258 }
258 259 }
259 260 try:
260 261 resp['error']['_vcs_kind'] = e._vcs_kind
261 262 except AttributeError:
262 263 pass
263 264 else:
264 265 resp = {
265 266 'id': payload.get('id'),
266 267 'result': resp
267 268 }
268 269
269 270 return resp
270 271
271 272 def status_view(self, request):
272 return {'status': 'OK'}
273 import vcsserver
274 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__}
273 275
274 276 def service_view(self, request):
275 277 import vcsserver
276 278 import ConfigParser as configparser
277 279
278 280 payload = msgpack.unpackb(request.body, use_list=True)
279 281
280 282 try:
281 283 path = self.global_config['__file__']
282 284 config = configparser.ConfigParser()
283 285 config.read(path)
284 286 parsed_ini = config
285 287 if parsed_ini.has_section('server:main'):
286 288 parsed_ini = dict(parsed_ini.items('server:main'))
287 289 except Exception:
288 290 log.exception('Failed to read .ini file for display')
289 291 parsed_ini = {}
290 292
291 293 resp = {
292 294 'id': payload.get('id'),
293 295 'result': dict(
294 296 version=vcsserver.__version__,
295 297 config=parsed_ini,
296 298 payload=payload,
297 299 )
298 300 }
299 301 return resp
300 302
301 303 def _msgpack_renderer_factory(self, info):
302 304 def _render(value, system):
303 305 value = msgpack.packb(value)
304 306 request = system.get('request')
305 307 if request is not None:
306 308 response = request.response
307 309 ct = response.content_type
308 310 if ct == response.default_content_type:
309 311 response.content_type = 'application/x-msgpack'
310 312 return value
311 313 return _render
312 314
315 def set_env_from_config(self, environ, config):
316 dict_conf = {}
317 try:
318 for elem in config:
319 if elem[0] == 'rhodecode':
320 dict_conf = json.loads(elem[2])
321 break
322 except Exception:
323 log.exception('Failed to fetch SCM CONFIG')
324 return
325
326 username = dict_conf.get('username')
327 if username:
328 environ['REMOTE_USER'] = username
329
330 ip = dict_conf.get('ip')
331 if ip:
332 environ['REMOTE_HOST'] = ip
333
313 334 def hg_proxy(self):
314 335 @wsgiapp
315 336 def _hg_proxy(environ, start_response):
316 337 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
317 338 return app(environ, start_response)
318 339 return _hg_proxy
319 340
320 341 def git_proxy(self):
321 342 @wsgiapp
322 343 def _git_proxy(environ, start_response):
323 344 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
324 345 return app(environ, start_response)
325 346 return _git_proxy
326 347
327 348 def hg_stream(self):
328 349 if self._use_echo_app:
329 350 @wsgiapp
330 351 def _hg_stream(environ, start_response):
331 352 app = EchoApp('fake_path', 'fake_name', None)
332 353 return app(environ, start_response)
333 354 return _hg_stream
334 355 else:
335 356 @wsgiapp
336 357 def _hg_stream(environ, start_response):
358 log.debug('http-app: handling hg stream')
337 359 repo_path = environ['HTTP_X_RC_REPO_PATH']
338 360 repo_name = environ['HTTP_X_RC_REPO_NAME']
339 361 packed_config = base64.b64decode(
340 362 environ['HTTP_X_RC_REPO_CONFIG'])
341 363 config = msgpack.unpackb(packed_config)
342 364 app = scm_app.create_hg_wsgi_app(
343 365 repo_path, repo_name, config)
344 366
345 # Consitent path information for hgweb
367 # Consistent path information for hgweb
346 368 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
347 369 environ['REPO_NAME'] = repo_name
370 self.set_env_from_config(environ, config)
371
372 log.debug('http-app: starting app handler '
373 'with %s and process request', app)
348 374 return app(environ, ResponseFilter(start_response))
349 375 return _hg_stream
350 376
351 377 def git_stream(self):
352 378 if self._use_echo_app:
353 379 @wsgiapp
354 380 def _git_stream(environ, start_response):
355 381 app = EchoApp('fake_path', 'fake_name', None)
356 382 return app(environ, start_response)
357 383 return _git_stream
358 384 else:
359 385 @wsgiapp
360 386 def _git_stream(environ, start_response):
387 log.debug('http-app: handling git stream')
361 388 repo_path = environ['HTTP_X_RC_REPO_PATH']
362 389 repo_name = environ['HTTP_X_RC_REPO_NAME']
363 390 packed_config = base64.b64decode(
364 391 environ['HTTP_X_RC_REPO_CONFIG'])
365 392 config = msgpack.unpackb(packed_config)
366 393
367 394 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
395 self.set_env_from_config(environ, config)
396
368 397 content_type = environ.get('CONTENT_TYPE', '')
369 398
370 399 path = environ['PATH_INFO']
371 400 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
372 401 log.debug(
373 402 'LFS: Detecting if request `%s` is LFS server path based '
374 403 'on content type:`%s`, is_lfs:%s',
375 404 path, content_type, is_lfs_request)
376 405
377 406 if not is_lfs_request:
378 407 # fallback detection by path
379 408 if GIT_LFS_PROTO_PAT.match(path):
380 409 is_lfs_request = True
381 410 log.debug(
382 411 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
383 412 path, is_lfs_request)
384 413
385 414 if is_lfs_request:
386 415 app = scm_app.create_git_lfs_wsgi_app(
387 416 repo_path, repo_name, config)
388 417 else:
389 418 app = scm_app.create_git_wsgi_app(
390 419 repo_path, repo_name, config)
420
421 log.debug('http-app: starting app handler '
422 'with %s and process request', app)
391 423 return app(environ, start_response)
392 424
393 425 return _git_stream
394 426
395 427 def is_vcs_view(self, context, request):
396 428 """
397 429 View predicate that returns true if given backend is supported by
398 430 defined remotes.
399 431 """
400 432 backend = request.matchdict.get('backend')
401 433 return backend in self._remotes
402 434
403 435 def handle_vcs_exception(self, exception, request):
404 436 _vcs_kind = getattr(exception, '_vcs_kind', '')
405 437 if _vcs_kind == 'repo_locked':
406 438 # Get custom repo-locked status code if present.
407 439 status_code = request.headers.get('X-RC-Locked-Status-Code')
408 440 return HTTPRepoLocked(
409 441 title=exception.message, status_code=status_code)
410 442
411 443 # Re-raise exception if we can not handle it.
412 444 log.exception(
413 445 'error occurred handling this request for path: %s', request.path)
414 446 raise exception
415 447
416 448
417 449 class ResponseFilter(object):
418 450
419 451 def __init__(self, start_response):
420 452 self._start_response = start_response
421 453
422 454 def __call__(self, status, response_headers, exc_info=None):
423 455 headers = tuple(
424 456 (h, v) for h, v in response_headers
425 457 if not wsgiref.util.is_hop_by_hop(h))
426 458 return self._start_response(status, headers, exc_info)
427 459
428 460
429 461 def main(global_config, **settings):
430 462 if MercurialFactory:
431 463 hgpatches.patch_largefiles_capabilities()
432 464 hgpatches.patch_subrepo_type_mapping()
433 465 app = HTTPApplication(settings=settings, global_config=global_config)
434 466 return app.wsgi_app()
General Comments 0
You need to be logged in to leave comments. Login now