##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r284:a3f9910f merge stable
parent child Browse files
Show More
@@ -0,0 +1,37 b''
1 This patch does two things: (1) use the right name for `docbook2texi',
2 and (2) make sure `gitman.info' isn't produced since it's broken (duplicate
3 node names).
4
5 diff -ru git-1.8.4-orig/Documentation/Makefile git-1.8.4/Documentation/Makefile
6 --- git-1.8.4-orig/Documentation/Makefile 2013-08-23 21:38:43.000000000 +0200
7 +++ git-1.8.4/Documentation/Makefile 2013-09-30 14:48:51.532890378 +0200
8 @@ -101,7 +101,7 @@
9
10 MAKEINFO = makeinfo
11 INSTALL_INFO = install-info
12 -DOCBOOK2X_TEXI = docbook2x-texi
13 +DOCBOOK2X_TEXI = docbook2texi
14 DBLATEX = dblatex
15 ifndef PERL_PATH
16 PERL_PATH = /usr/bin/perl
17 @@ -205,7 +205,7 @@
18 man5: $(DOC_MAN5)
19 man7: $(DOC_MAN7)
20
21 -info: git.info gitman.info
22 +info: git.info
23
24 pdf: user-manual.pdf
25
26 @@ -221,10 +221,9 @@
27
28 install-info: info
29 $(INSTALL) -d -m 755 $(DESTDIR)$(infodir)
30 - $(INSTALL) -m 644 git.info gitman.info $(DESTDIR)$(infodir)
31 + $(INSTALL) -m 644 git.info $(DESTDIR)$(infodir)
32 if test -r $(DESTDIR)$(infodir)/dir; then \
33 $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) git.info ;\
34 - $(INSTALL_INFO) --info-dir=$(DESTDIR)$(infodir) gitman.info ;\
35 else \
36 echo "No directory found in $(DESTDIR)$(infodir)" >&2 ; \
37 fi
@@ -0,0 +1,94 b''
1 --- a/git-sh-i18n.sh
2 +++ b/git-sh-i18n.sh
3 @@ -15,87 +15,11 @@
4 fi
5 export TEXTDOMAINDIR
6
7 -# First decide what scheme to use...
8 -GIT_INTERNAL_GETTEXT_SH_SCHEME=fallthrough
9 -if test -n "@@USE_GETTEXT_SCHEME@@"
10 -then
11 - GIT_INTERNAL_GETTEXT_SH_SCHEME="@@USE_GETTEXT_SCHEME@@"
12 -elif test -n "$GIT_INTERNAL_GETTEXT_TEST_FALLBACKS"
13 -then
14 - : no probing necessary
15 -elif test -n "$GIT_GETTEXT_POISON"
16 -then
17 - GIT_INTERNAL_GETTEXT_SH_SCHEME=poison
18 -elif type gettext.sh >/dev/null 2>&1
19 -then
20 - # GNU libintl's gettext.sh
21 - GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu
22 -elif test "$(gettext -h 2>&1)" = "-h"
23 -then
24 - # gettext binary exists but no gettext.sh. likely to be a gettext
25 - # binary on a Solaris or something that is not GNU libintl and
26 - # lack eval_gettext.
27 - GIT_INTERNAL_GETTEXT_SH_SCHEME=gettext_without_eval_gettext
28 -fi
29 -export GIT_INTERNAL_GETTEXT_SH_SCHEME
30 -
31 -# ... and then follow that decision.
32 -case "$GIT_INTERNAL_GETTEXT_SH_SCHEME" in
33 -gnu)
34 - # Use libintl's gettext.sh, or fall back to English if we can't.
35 - . gettext.sh
36 - ;;
37 -gettext_without_eval_gettext)
38 - # Solaris has a gettext(1) but no eval_gettext(1)
39 - eval_gettext () {
40 - gettext "$1" | (
41 - export PATH $(git sh-i18n--envsubst --variables "$1");
42 - git sh-i18n--envsubst "$1"
43 - )
44 - }
45 -
46 - eval_ngettext () {
47 - ngettext "$1" "$2" "$3" | (
48 - export PATH $(git sh-i18n--envsubst --variables "$2");
49 - git sh-i18n--envsubst "$2"
50 - )
51 - }
52 - ;;
53 -poison)
54 - # Emit garbage so that tests that incorrectly rely on translatable
55 - # strings will fail.
56 - gettext () {
57 - printf "%s" "# GETTEXT POISON #"
58 - }
59 -
60 - eval_gettext () {
61 - printf "%s" "# GETTEXT POISON #"
62 - }
63 -
64 - eval_ngettext () {
65 - printf "%s" "# GETTEXT POISON #"
66 - }
67 - ;;
68 -*)
69 - gettext () {
70 - printf "%s" "$1"
71 - }
72 -
73 - eval_gettext () {
74 - printf "%s" "$1" | (
75 - export PATH $(git sh-i18n--envsubst --variables "$1");
76 - git sh-i18n--envsubst "$1"
77 - )
78 - }
79 +# GNU gettext
80 +export GIT_INTERNAL_GETTEXT_SH_SCHEME=gnu
81 +export PATH=@gettext@/bin:$PATH
82
83 - eval_ngettext () {
84 - (test "$3" = 1 && printf "%s" "$1" || printf "%s" "$2") | (
85 - export PATH $(git sh-i18n--envsubst --variables "$2");
86 - git sh-i18n--envsubst "$2"
87 - )
88 - }
89 - ;;
90 -esac
91 +. @gettext@/bin/gettext.sh
92
93 # Git-specific wrapper functions
94 gettextln () {
@@ -0,0 +1,26 b''
1 diff --git a/connect.c b/connect.c
2 index fd7ffe1..20cd992 100644
3 --- a/connect.c
4 +++ b/connect.c
5 @@ -768,7 +768,7 @@
6
7 ssh = getenv("GIT_SSH");
8 if (!ssh)
9 - ssh = "ssh";
10 + ssh = "@ssh@";
11 else
12 handle_ssh_variant(ssh, 0,
13 &port_option,
14 diff --git a/git-gui/lib/remote_add.tcl b/git-gui/lib/remote_add.tcl
15 index 50029d0..17b9594 100644
16 --- a/git-gui/lib/remote_add.tcl
17 +++ b/git-gui/lib/remote_add.tcl
18 @@ -139,7 +139,7 @@
19 # Parse the location
20 if { [regexp {(?:git\+)?ssh://([^/]+)(/.+)} $location xx host path]
21 || [regexp {([^:][^:]+):(.+)} $location xx host path]} {
22 - set ssh ssh
23 + set ssh @ssh@
24 if {[info exists env(GIT_SSH)]} {
25 set ssh $env(GIT_SSH)
26 }
@@ -1,6 +1,6 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.9.1
2 current_version = 4.10.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
6
6
@@ -1,16 +1,14 b''
1 [DEFAULT]
1 [DEFAULT]
2 done = false
2 done = false
3
3
4 [task:bump_version]
4 [task:bump_version]
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.9.1
13 version = 4.10.0
16
14
@@ -1,158 +1,165 b''
1 # Nix environment for the community edition
1 # Nix environment for the community edition
2 #
2 #
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
3 # This shall be as lean as possible, just producing the rhodecode-vcsserver
4 # derivation. For advanced tweaks to pimp up the development environment we use
4 # derivation. For advanced tweaks to pimp up the development environment we use
5 # "shell.nix" so that it does not have to clutter this file.
5 # "shell.nix" so that it does not have to clutter this file.
6
6
7 { pkgs ? (import <nixpkgs> {})
7 { pkgs ? (import <nixpkgs> {})
8 , pythonPackages ? "python27Packages"
8 , pythonPackages ? "python27Packages"
9 , pythonExternalOverrides ? self: super: {}
9 , pythonExternalOverrides ? self: super: {}
10 , doCheck ? true
10 , doCheck ? true
11 }:
11 }:
12
12
13 let pkgs_ = pkgs; in
13 let pkgs_ = pkgs; in
14
14
15 let
15 let
16 pkgs = pkgs_.overridePackages (self: super: {
16 pkgs = pkgs_.overridePackages (self: super: {
17 # bump GIT version
17 # bump GIT version
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
18 git = pkgs.lib.overrideDerivation pkgs_.git (oldAttrs: {
19 name = "git-2.9.5";
19 name = "git-2.13.5";
20 src = pkgs.fetchurl {
20 src = pkgs.fetchurl {
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.9.5.tar.xz";
21 url = "https://www.kernel.org/pub/software/scm/git/git-2.13.5.tar.xz";
22 sha256 = "00ir7qmgfszwrhxjzxwixk7wp35gxvvw467gr30bagwsrdza7gm4";
22 sha256 = "18fi18103n7grshm4ffb0fwsnvbl48sbqy5gqx528vf8maff5j91";
23 };
23 };
24
24
25 patches = [
26 ./pkgs/git_patches/docbook2texi.patch
27 ./pkgs/git_patches/symlinks-in-bin.patch
28 ./pkgs/git_patches/git-sh-i18n.patch
29 ./pkgs/git_patches/ssh-path.patch
30 ];
31
25 });
32 });
26
33
27 # Override subversion derivation to
34 # Override subversion derivation to
28 # - activate python bindings
35 # - activate python bindings
29 subversion = let
36 subversion = let
30 subversionWithPython = super.subversion.override {
37 subversionWithPython = super.subversion.override {
31 httpSupport = true;
38 httpSupport = true;
32 pythonBindings = true;
39 pythonBindings = true;
33 python = self.python27Packages.python;
40 python = self.python27Packages.python;
34 };
41 };
35
42
36 in
43 in
37
44
38 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
45 pkgs.lib.overrideDerivation subversionWithPython (oldAttrs: {
39 name = "subversion-1.9.7";
46 name = "subversion-1.9.7";
40 src = pkgs.fetchurl {
47 src = pkgs.fetchurl {
41 url = "https://www.apache.org/dist/subversion/subversion-1.9.7.tar.gz";
48 url = "https://www.apache.org/dist/subversion/subversion-1.9.7.tar.gz";
42 sha256 = "0g3cs2h008z8ymgkhbk54jp87bjh7y049rn42igj881yi2f20an7";
49 sha256 = "0g3cs2h008z8ymgkhbk54jp87bjh7y049rn42igj881yi2f20an7";
43 };
50 };
44
51
45 });
52 });
46
53
47 });
54 });
48
55
49 inherit (pkgs.lib) fix extends;
56 inherit (pkgs.lib) fix extends;
50 basePythonPackages = with builtins; if isAttrs pythonPackages
57 basePythonPackages = with builtins; if isAttrs pythonPackages
51 then pythonPackages
58 then pythonPackages
52 else getAttr pythonPackages pkgs;
59 else getAttr pythonPackages pkgs;
53
60
54 elem = builtins.elem;
61 elem = builtins.elem;
55 basename = path: with pkgs.lib; last (splitString "/" path);
62 basename = path: with pkgs.lib; last (splitString "/" path);
56 startsWith = prefix: full: let
63 startsWith = prefix: full: let
57 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
64 actualPrefix = builtins.substring 0 (builtins.stringLength prefix) full;
58 in actualPrefix == prefix;
65 in actualPrefix == prefix;
59
66
60 src-filter = path: type: with pkgs.lib;
67 src-filter = path: type: with pkgs.lib;
61 let
68 let
62 ext = last (splitString "." path);
69 ext = last (splitString "." path);
63 in
70 in
64 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
71 !elem (basename path) [".hg" ".git" "__pycache__" ".eggs"
65 "node_modules" "build" "data" "tmp"] &&
72 "node_modules" "build" "data" "tmp"] &&
66 !elem ext ["egg-info" "pyc"] &&
73 !elem ext ["egg-info" "pyc"] &&
67 !startsWith "result" path;
74 !startsWith "result" path;
68
75
69 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
76 rhodecode-vcsserver-src = builtins.filterSource src-filter ./.;
70
77
71 pythonGeneratedPackages = self: basePythonPackages.override (a: {
78 pythonGeneratedPackages = self: basePythonPackages.override (a: {
72 inherit self;
79 inherit self;
73 }) // (scopedImport {
80 }) // (scopedImport {
74 self = self;
81 self = self;
75 super = basePythonPackages;
82 super = basePythonPackages;
76 inherit pkgs;
83 inherit pkgs;
77 inherit (pkgs) fetchurl fetchgit;
84 inherit (pkgs) fetchurl fetchgit;
78 } ./pkgs/python-packages.nix);
85 } ./pkgs/python-packages.nix);
79
86
80 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
87 pythonOverrides = import ./pkgs/python-packages-overrides.nix {
81 inherit basePythonPackages pkgs;
88 inherit basePythonPackages pkgs;
82 };
89 };
83
90
84 version = builtins.readFile ./vcsserver/VERSION;
91 version = builtins.readFile ./vcsserver/VERSION;
85
92
86 pythonLocalOverrides = self: super: {
93 pythonLocalOverrides = self: super: {
87 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
94 rhodecode-vcsserver = super.rhodecode-vcsserver.override (attrs: {
88 inherit doCheck version;
95 inherit doCheck version;
89
96
90 name = "rhodecode-vcsserver-${version}";
97 name = "rhodecode-vcsserver-${version}";
91 releaseName = "RhodeCodeVCSServer-${version}";
98 releaseName = "RhodeCodeVCSServer-${version}";
92 src = rhodecode-vcsserver-src;
99 src = rhodecode-vcsserver-src;
93 dontStrip = true; # prevent strip, we don't need it.
100 dontStrip = true; # prevent strip, we don't need it.
94
101
95 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
102 propagatedBuildInputs = attrs.propagatedBuildInputs ++ ([
96 pkgs.git
103 pkgs.git
97 pkgs.subversion
104 pkgs.subversion
98 ]);
105 ]);
99
106
100 # TODO: johbo: Make a nicer way to expose the parts. Maybe
107 # TODO: johbo: Make a nicer way to expose the parts. Maybe
101 # pkgs/default.nix?
108 # pkgs/default.nix?
102 passthru = {
109 passthru = {
103 pythonPackages = self;
110 pythonPackages = self;
104 };
111 };
105
112
106 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
113 # Add VCSServer bin directory to path so that tests can find 'vcsserver'.
107 preCheck = ''
114 preCheck = ''
108 export PATH="$out/bin:$PATH"
115 export PATH="$out/bin:$PATH"
109 '';
116 '';
110
117
111 # put custom attrs here
118 # put custom attrs here
112 checkPhase = ''
119 checkPhase = ''
113 runHook preCheck
120 runHook preCheck
114 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
121 PYTHONHASHSEED=random py.test -p no:sugar -vv --cov-config=.coveragerc --cov=vcsserver --cov-report=term-missing vcsserver
115 runHook postCheck
122 runHook postCheck
116 '';
123 '';
117
124
118 postInstall = ''
125 postInstall = ''
119 echo "Writing meta information for rccontrol to nix-support/rccontrol"
126 echo "Writing meta information for rccontrol to nix-support/rccontrol"
120 mkdir -p $out/nix-support/rccontrol
127 mkdir -p $out/nix-support/rccontrol
121 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
128 cp -v vcsserver/VERSION $out/nix-support/rccontrol/version
122 echo "DONE: Meta information for rccontrol written"
129 echo "DONE: Meta information for rccontrol written"
123
130
124 # python based programs need to be wrapped
131 # python based programs need to be wrapped
125 ln -s ${self.pyramid}/bin/* $out/bin/
132 ln -s ${self.pyramid}/bin/* $out/bin/
126 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
133 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
127
134
128 # Symlink version control utilities
135 # Symlink version control utilities
129 #
136 #
130 # We ensure that always the correct version is available as a symlink.
137 # We ensure that always the correct version is available as a symlink.
131 # So that users calling them via the profile path will always use the
138 # So that users calling them via the profile path will always use the
132 # correct version.
139 # correct version.
133 ln -s ${pkgs.git}/bin/git $out/bin
140 ln -s ${pkgs.git}/bin/git $out/bin
134 ln -s ${self.mercurial}/bin/hg $out/bin
141 ln -s ${self.mercurial}/bin/hg $out/bin
135 ln -s ${pkgs.subversion}/bin/svn* $out/bin
142 ln -s ${pkgs.subversion}/bin/svn* $out/bin
136
143
137 for file in $out/bin/*;
144 for file in $out/bin/*;
138 do
145 do
139 wrapProgram $file \
146 wrapProgram $file \
140 --set PATH $PATH \
147 --set PATH $PATH \
141 --set PYTHONPATH $PYTHONPATH \
148 --set PYTHONPATH $PYTHONPATH \
142 --set PYTHONHASHSEED random
149 --set PYTHONHASHSEED random
143 done
150 done
144
151
145 '';
152 '';
146
153
147 });
154 });
148 };
155 };
149
156
150 # Apply all overrides and fix the final package set
157 # Apply all overrides and fix the final package set
151 myPythonPackages =
158 myPythonPackages =
152 (fix
159 (fix
153 (extends pythonExternalOverrides
160 (extends pythonExternalOverrides
154 (extends pythonLocalOverrides
161 (extends pythonLocalOverrides
155 (extends pythonOverrides
162 (extends pythonOverrides
156 pythonGeneratedPackages))));
163 pythonGeneratedPackages))));
157
164
158 in myPythonPackages.rhodecode-vcsserver
165 in myPythonPackages.rhodecode-vcsserver
@@ -1,799 +1,877 b''
1 # Generated by pip2nix 0.4.0
1 # Generated by pip2nix 0.4.0
2 # See https://github.com/johbo/pip2nix
2 # See https://github.com/johbo/pip2nix
3
3
4 {
4 {
5 Beaker = super.buildPythonPackage {
5 Beaker = super.buildPythonPackage {
6 name = "Beaker-1.7.0";
6 name = "Beaker-1.9.0";
7 buildInputs = with self; [];
7 buildInputs = with self; [];
8 doCheck = false;
8 doCheck = false;
9 propagatedBuildInputs = with self; [];
9 propagatedBuildInputs = with self; [funcsigs];
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://pypi.python.org/packages/97/8e/409d2e7c009b8aa803dc9e6f239f1db7c3cdf578249087a404e7c27a505d/Beaker-1.7.0.tar.gz";
11 url = "https://pypi.python.org/packages/93/b2/12de6937b06e9615dbb3cb3a1c9af17f133f435bdef59f4ad42032b6eb49/Beaker-1.9.0.tar.gz";
12 md5 = "386be3f7fe427358881eee4622b428b3";
12 md5 = "38b3fcdfa24faf97c6cf66991eb54e9c";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.bsdOriginal ];
15 license = [ pkgs.lib.licenses.bsdOriginal ];
16 };
16 };
17 };
17 };
18 Jinja2 = super.buildPythonPackage {
18 Jinja2 = super.buildPythonPackage {
19 name = "Jinja2-2.8";
19 name = "Jinja2-2.8";
20 buildInputs = with self; [];
20 buildInputs = with self; [];
21 doCheck = false;
21 doCheck = false;
22 propagatedBuildInputs = with self; [MarkupSafe];
22 propagatedBuildInputs = with self; [MarkupSafe];
23 src = fetchurl {
23 src = fetchurl {
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
24 url = "https://pypi.python.org/packages/f2/2f/0b98b06a345a761bec91a079ccae392d282690c2d8272e708f4d10829e22/Jinja2-2.8.tar.gz";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
25 md5 = "edb51693fe22c53cee5403775c71a99e";
26 };
26 };
27 meta = {
27 meta = {
28 license = [ pkgs.lib.licenses.bsdOriginal ];
28 license = [ pkgs.lib.licenses.bsdOriginal ];
29 };
29 };
30 };
30 };
31 Mako = super.buildPythonPackage {
31 Mako = super.buildPythonPackage {
32 name = "Mako-1.0.6";
32 name = "Mako-1.0.7";
33 buildInputs = with self; [];
33 buildInputs = with self; [];
34 doCheck = false;
34 doCheck = false;
35 propagatedBuildInputs = with self; [MarkupSafe];
35 propagatedBuildInputs = with self; [MarkupSafe];
36 src = fetchurl {
36 src = fetchurl {
37 url = "https://pypi.python.org/packages/56/4b/cb75836863a6382199aefb3d3809937e21fa4cb0db15a4f4ba0ecc2e7e8e/Mako-1.0.6.tar.gz";
37 url = "https://pypi.python.org/packages/eb/f3/67579bb486517c0d49547f9697e36582cd19dafb5df9e687ed8e22de57fa/Mako-1.0.7.tar.gz";
38 md5 = "a28e22a339080316b2acc352b9ee631c";
38 md5 = "5836cc997b1b773ef389bf6629c30e65";
39 };
39 };
40 meta = {
40 meta = {
41 license = [ pkgs.lib.licenses.mit ];
41 license = [ pkgs.lib.licenses.mit ];
42 };
42 };
43 };
43 };
44 MarkupSafe = super.buildPythonPackage {
44 MarkupSafe = super.buildPythonPackage {
45 name = "MarkupSafe-0.23";
45 name = "MarkupSafe-0.23";
46 buildInputs = with self; [];
46 buildInputs = with self; [];
47 doCheck = false;
47 doCheck = false;
48 propagatedBuildInputs = with self; [];
48 propagatedBuildInputs = with self; [];
49 src = fetchurl {
49 src = fetchurl {
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
50 url = "https://pypi.python.org/packages/c0/41/bae1254e0396c0cc8cf1751cb7d9afc90a602353695af5952530482c963f/MarkupSafe-0.23.tar.gz";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
51 md5 = "f5ab3deee4c37cd6a922fb81e730da6e";
52 };
52 };
53 meta = {
53 meta = {
54 license = [ pkgs.lib.licenses.bsdOriginal ];
54 license = [ pkgs.lib.licenses.bsdOriginal ];
55 };
55 };
56 };
56 };
57 PasteDeploy = super.buildPythonPackage {
57 PasteDeploy = super.buildPythonPackage {
58 name = "PasteDeploy-1.5.2";
58 name = "PasteDeploy-1.5.2";
59 buildInputs = with self; [];
59 buildInputs = with self; [];
60 doCheck = false;
60 doCheck = false;
61 propagatedBuildInputs = with self; [];
61 propagatedBuildInputs = with self; [];
62 src = fetchurl {
62 src = fetchurl {
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
63 url = "https://pypi.python.org/packages/0f/90/8e20cdae206c543ea10793cbf4136eb9a8b3f417e04e40a29d72d9922cbd/PasteDeploy-1.5.2.tar.gz";
64 md5 = "352b7205c78c8de4987578d19431af3b";
64 md5 = "352b7205c78c8de4987578d19431af3b";
65 };
65 };
66 meta = {
66 meta = {
67 license = [ pkgs.lib.licenses.mit ];
67 license = [ pkgs.lib.licenses.mit ];
68 };
68 };
69 };
69 };
70 WebOb = super.buildPythonPackage {
70 WebOb = super.buildPythonPackage {
71 name = "WebOb-1.3.1";
71 name = "WebOb-1.7.3";
72 buildInputs = with self; [];
72 buildInputs = with self; [];
73 doCheck = false;
73 doCheck = false;
74 propagatedBuildInputs = with self; [];
74 propagatedBuildInputs = with self; [];
75 src = fetchurl {
75 src = fetchurl {
76 url = "https://pypi.python.org/packages/16/78/adfc0380b8a0d75b2d543fa7085ba98a573b1ae486d9def88d172b81b9fa/WebOb-1.3.1.tar.gz";
76 url = "https://pypi.python.org/packages/46/87/2f96d8d43b2078fae6e1d33fa86b95c228cebed060f4e3c7576cc44ea83b/WebOb-1.7.3.tar.gz";
77 md5 = "20918251c5726956ba8fef22d1556177";
77 md5 = "350028baffc508e3d23c078118e35316";
78 };
78 };
79 meta = {
79 meta = {
80 license = [ pkgs.lib.licenses.mit ];
80 license = [ pkgs.lib.licenses.mit ];
81 };
81 };
82 };
82 };
83 WebTest = super.buildPythonPackage {
83 WebTest = super.buildPythonPackage {
84 name = "WebTest-1.4.3";
84 name = "WebTest-2.0.27";
85 buildInputs = with self; [];
85 buildInputs = with self; [];
86 doCheck = false;
86 doCheck = false;
87 propagatedBuildInputs = with self; [WebOb];
87 propagatedBuildInputs = with self; [six WebOb waitress beautifulsoup4];
88 src = fetchurl {
88 src = fetchurl {
89 url = "https://pypi.python.org/packages/51/3d/84fd0f628df10b30c7db87895f56d0158e5411206b721ca903cb51bfd948/WebTest-1.4.3.zip";
89 url = "https://pypi.python.org/packages/80/fa/ca3a759985c72e3a124cbca3e1f8a2e931a07ffd31fd45d8f7bf21cb95cf/WebTest-2.0.27.tar.gz";
90 md5 = "631ce728bed92c681a4020a36adbc353";
90 md5 = "54e6515ac71c51b6fc90179483c749ad";
91 };
91 };
92 meta = {
92 meta = {
93 license = [ pkgs.lib.licenses.mit ];
93 license = [ pkgs.lib.licenses.mit ];
94 };
94 };
95 };
95 };
96 backports.shutil-get-terminal-size = super.buildPythonPackage {
96 backports.shutil-get-terminal-size = super.buildPythonPackage {
97 name = "backports.shutil-get-terminal-size-1.0.0";
97 name = "backports.shutil-get-terminal-size-1.0.0";
98 buildInputs = with self; [];
98 buildInputs = with self; [];
99 doCheck = false;
99 doCheck = false;
100 propagatedBuildInputs = with self; [];
100 propagatedBuildInputs = with self; [];
101 src = fetchurl {
101 src = fetchurl {
102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
102 url = "https://pypi.python.org/packages/ec/9c/368086faa9c016efce5da3e0e13ba392c9db79e3ab740b763fe28620b18b/backports.shutil_get_terminal_size-1.0.0.tar.gz";
103 md5 = "03267762480bd86b50580dc19dff3c66";
103 md5 = "03267762480bd86b50580dc19dff3c66";
104 };
104 };
105 meta = {
105 meta = {
106 license = [ pkgs.lib.licenses.mit ];
106 license = [ pkgs.lib.licenses.mit ];
107 };
107 };
108 };
108 };
109 beautifulsoup4 = super.buildPythonPackage {
110 name = "beautifulsoup4-4.6.0";
111 buildInputs = with self; [];
112 doCheck = false;
113 propagatedBuildInputs = with self; [];
114 src = fetchurl {
115 url = "https://pypi.python.org/packages/fa/8d/1d14391fdaed5abada4e0f63543fef49b8331a34ca60c88bd521bcf7f782/beautifulsoup4-4.6.0.tar.gz";
116 md5 = "c17714d0f91a23b708a592cb3c697728";
117 };
118 meta = {
119 license = [ pkgs.lib.licenses.mit ];
120 };
121 };
109 configobj = super.buildPythonPackage {
122 configobj = super.buildPythonPackage {
110 name = "configobj-5.0.6";
123 name = "configobj-5.0.6";
111 buildInputs = with self; [];
124 buildInputs = with self; [];
112 doCheck = false;
125 doCheck = false;
113 propagatedBuildInputs = with self; [six];
126 propagatedBuildInputs = with self; [six];
114 src = fetchurl {
127 src = fetchurl {
115 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
128 url = "https://pypi.python.org/packages/64/61/079eb60459c44929e684fa7d9e2fdca403f67d64dd9dbac27296be2e0fab/configobj-5.0.6.tar.gz";
116 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
129 md5 = "e472a3a1c2a67bb0ec9b5d54c13a47d6";
117 };
130 };
118 meta = {
131 meta = {
119 license = [ pkgs.lib.licenses.bsdOriginal ];
132 license = [ pkgs.lib.licenses.bsdOriginal ];
120 };
133 };
121 };
134 };
122 cov-core = super.buildPythonPackage {
135 cov-core = super.buildPythonPackage {
123 name = "cov-core-1.15.0";
136 name = "cov-core-1.15.0";
124 buildInputs = with self; [];
137 buildInputs = with self; [];
125 doCheck = false;
138 doCheck = false;
126 propagatedBuildInputs = with self; [coverage];
139 propagatedBuildInputs = with self; [coverage];
127 src = fetchurl {
140 src = fetchurl {
128 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
141 url = "https://pypi.python.org/packages/4b/87/13e75a47b4ba1be06f29f6d807ca99638bedc6b57fa491cd3de891ca2923/cov-core-1.15.0.tar.gz";
129 md5 = "f519d4cb4c4e52856afb14af52919fe6";
142 md5 = "f519d4cb4c4e52856afb14af52919fe6";
130 };
143 };
131 meta = {
144 meta = {
132 license = [ pkgs.lib.licenses.mit ];
145 license = [ pkgs.lib.licenses.mit ];
133 };
146 };
134 };
147 };
135 coverage = super.buildPythonPackage {
148 coverage = super.buildPythonPackage {
136 name = "coverage-3.7.1";
149 name = "coverage-3.7.1";
137 buildInputs = with self; [];
150 buildInputs = with self; [];
138 doCheck = false;
151 doCheck = false;
139 propagatedBuildInputs = with self; [];
152 propagatedBuildInputs = with self; [];
140 src = fetchurl {
153 src = fetchurl {
141 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
154 url = "https://pypi.python.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
142 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
155 md5 = "c47b36ceb17eaff3ecfab3bcd347d0df";
143 };
156 };
144 meta = {
157 meta = {
145 license = [ pkgs.lib.licenses.bsdOriginal ];
158 license = [ pkgs.lib.licenses.bsdOriginal ];
146 };
159 };
147 };
160 };
148 decorator = super.buildPythonPackage {
161 decorator = super.buildPythonPackage {
149 name = "decorator-4.0.11";
162 name = "decorator-4.0.11";
150 buildInputs = with self; [];
163 buildInputs = with self; [];
151 doCheck = false;
164 doCheck = false;
152 propagatedBuildInputs = with self; [];
165 propagatedBuildInputs = with self; [];
153 src = fetchurl {
166 src = fetchurl {
154 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
167 url = "https://pypi.python.org/packages/cc/ac/5a16f1fc0506ff72fcc8fd4e858e3a1c231f224ab79bb7c4c9b2094cc570/decorator-4.0.11.tar.gz";
155 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
168 md5 = "73644c8f0bd4983d1b6a34b49adec0ae";
156 };
169 };
157 meta = {
170 meta = {
158 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
171 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "new BSD License"; } ];
159 };
172 };
160 };
173 };
161 dulwich = super.buildPythonPackage {
174 dulwich = super.buildPythonPackage {
162 name = "dulwich-0.13.0";
175 name = "dulwich-0.13.0";
163 buildInputs = with self; [];
176 buildInputs = with self; [];
164 doCheck = false;
177 doCheck = false;
165 propagatedBuildInputs = with self; [];
178 propagatedBuildInputs = with self; [];
166 src = fetchurl {
179 src = fetchurl {
167 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
180 url = "https://pypi.python.org/packages/84/95/732d280eee829dacc954e8109f97b47abcadcca472c2ab013e1635eb4792/dulwich-0.13.0.tar.gz";
168 md5 = "6dede0626657c2bd08f48ca1221eea91";
181 md5 = "6dede0626657c2bd08f48ca1221eea91";
169 };
182 };
170 meta = {
183 meta = {
171 license = [ pkgs.lib.licenses.gpl2Plus ];
184 license = [ pkgs.lib.licenses.gpl2Plus ];
172 };
185 };
173 };
186 };
174 enum34 = super.buildPythonPackage {
187 enum34 = super.buildPythonPackage {
175 name = "enum34-1.1.6";
188 name = "enum34-1.1.6";
176 buildInputs = with self; [];
189 buildInputs = with self; [];
177 doCheck = false;
190 doCheck = false;
178 propagatedBuildInputs = with self; [];
191 propagatedBuildInputs = with self; [];
179 src = fetchurl {
192 src = fetchurl {
180 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
193 url = "https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz";
181 md5 = "5f13a0841a61f7fc295c514490d120d0";
194 md5 = "5f13a0841a61f7fc295c514490d120d0";
182 };
195 };
183 meta = {
196 meta = {
184 license = [ pkgs.lib.licenses.bsdOriginal ];
197 license = [ pkgs.lib.licenses.bsdOriginal ];
185 };
198 };
186 };
199 };
200 funcsigs = super.buildPythonPackage {
201 name = "funcsigs-1.0.2";
202 buildInputs = with self; [];
203 doCheck = false;
204 propagatedBuildInputs = with self; [];
205 src = fetchurl {
206 url = "https://pypi.python.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz";
207 md5 = "7e583285b1fb8a76305d6d68f4ccc14e";
208 };
209 meta = {
210 license = [ { fullName = "ASL"; } pkgs.lib.licenses.asl20 ];
211 };
212 };
187 gevent = super.buildPythonPackage {
213 gevent = super.buildPythonPackage {
188 name = "gevent-1.1.2";
214 name = "gevent-1.2.2";
189 buildInputs = with self; [];
215 buildInputs = with self; [];
190 doCheck = false;
216 doCheck = false;
191 propagatedBuildInputs = with self; [greenlet];
217 propagatedBuildInputs = with self; [greenlet];
192 src = fetchurl {
218 src = fetchurl {
193 url = "https://pypi.python.org/packages/43/8f/cb3224a0e6ab663547f45c10d0651cfd52633fde4283bf68d627084df8cc/gevent-1.1.2.tar.gz";
219 url = "https://pypi.python.org/packages/1b/92/b111f76e54d2be11375b47b213b56687214f258fd9dae703546d30b837be/gevent-1.2.2.tar.gz";
194 md5 = "bb32a2f852a4997138014d5007215c6e";
220 md5 = "7f0baf355384fe5ff2ecf66853422554";
195 };
221 };
196 meta = {
222 meta = {
197 license = [ pkgs.lib.licenses.mit ];
223 license = [ pkgs.lib.licenses.mit ];
198 };
224 };
199 };
225 };
200 gprof2dot = super.buildPythonPackage {
226 gprof2dot = super.buildPythonPackage {
201 name = "gprof2dot-2016.10.13";
227 name = "gprof2dot-2016.10.13";
202 buildInputs = with self; [];
228 buildInputs = with self; [];
203 doCheck = false;
229 doCheck = false;
204 propagatedBuildInputs = with self; [];
230 propagatedBuildInputs = with self; [];
205 src = fetchurl {
231 src = fetchurl {
206 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
232 url = "https://pypi.python.org/packages/a0/e0/73c71baed306f0402a00a94ffc7b2be94ad1296dfcb8b46912655b93154c/gprof2dot-2016.10.13.tar.gz";
207 md5 = "0125401f15fd2afe1df686a76c64a4fd";
233 md5 = "0125401f15fd2afe1df686a76c64a4fd";
208 };
234 };
209 meta = {
235 meta = {
210 license = [ { fullName = "LGPL"; } ];
236 license = [ { fullName = "LGPL"; } ];
211 };
237 };
212 };
238 };
213 greenlet = super.buildPythonPackage {
239 greenlet = super.buildPythonPackage {
214 name = "greenlet-0.4.10";
240 name = "greenlet-0.4.12";
215 buildInputs = with self; [];
241 buildInputs = with self; [];
216 doCheck = false;
242 doCheck = false;
217 propagatedBuildInputs = with self; [];
243 propagatedBuildInputs = with self; [];
218 src = fetchurl {
244 src = fetchurl {
219 url = "https://pypi.python.org/packages/67/62/ca2a95648666eaa2ffeb6a9b3964f21d419ae27f82f2e66b53da5b943fc4/greenlet-0.4.10.zip";
245 url = "https://pypi.python.org/packages/be/76/82af375d98724054b7e273b5d9369346937324f9bcc20980b45b068ef0b0/greenlet-0.4.12.tar.gz";
220 md5 = "bed0c4b3b896702131f4d5c72f87c41d";
246 md5 = "e8637647d58a26c4a1f51ca393e53c00";
221 };
247 };
222 meta = {
248 meta = {
223 license = [ pkgs.lib.licenses.mit ];
249 license = [ pkgs.lib.licenses.mit ];
224 };
250 };
225 };
251 };
226 gunicorn = super.buildPythonPackage {
252 gunicorn = super.buildPythonPackage {
227 name = "gunicorn-19.6.0";
253 name = "gunicorn-19.7.1";
228 buildInputs = with self; [];
254 buildInputs = with self; [];
229 doCheck = false;
255 doCheck = false;
230 propagatedBuildInputs = with self; [];
256 propagatedBuildInputs = with self; [];
231 src = fetchurl {
257 src = fetchurl {
232 url = "https://pypi.python.org/packages/84/ce/7ea5396efad1cef682bbc4068e72a0276341d9d9d0f501da609fab9fcb80/gunicorn-19.6.0.tar.gz";
258 url = "https://pypi.python.org/packages/30/3a/10bb213cede0cc4d13ac2263316c872a64bf4c819000c8ccd801f1d5f822/gunicorn-19.7.1.tar.gz";
233 md5 = "338e5e8a83ea0f0625f768dba4597530";
259 md5 = "174d3c3cd670a5be0404d84c484e590c";
234 };
260 };
235 meta = {
261 meta = {
236 license = [ pkgs.lib.licenses.mit ];
262 license = [ pkgs.lib.licenses.mit ];
237 };
263 };
238 };
264 };
239 hg-evolve = super.buildPythonPackage {
265 hg-evolve = super.buildPythonPackage {
240 name = "hg-evolve-6.6.0";
266 name = "hg-evolve-6.6.0";
241 buildInputs = with self; [];
267 buildInputs = with self; [];
242 doCheck = false;
268 doCheck = false;
243 propagatedBuildInputs = with self; [];
269 propagatedBuildInputs = with self; [];
244 src = fetchurl {
270 src = fetchurl {
245 url = "https://pypi.python.org/packages/c5/04/3557c97eaa320b5a6769edade64a299cd2710f5f3b818f64991ab6c8c08f/hg-evolve-6.6.0.tar.gz";
271 url = "https://pypi.python.org/packages/c5/04/3557c97eaa320b5a6769edade64a299cd2710f5f3b818f64991ab6c8c08f/hg-evolve-6.6.0.tar.gz";
246 md5 = "06b9a9c8e8137bbf0c4fbf940c009725";
272 md5 = "06b9a9c8e8137bbf0c4fbf940c009725";
247 };
273 };
248 meta = {
274 meta = {
249 license = [ { fullName = "GPLv2+"; } ];
275 license = [ { fullName = "GPLv2+"; } ];
250 };
276 };
251 };
277 };
252 hgsubversion = super.buildPythonPackage {
278 hgsubversion = super.buildPythonPackage {
253 name = "hgsubversion-1.8.6";
279 name = "hgsubversion-1.8.7";
254 buildInputs = with self; [];
280 buildInputs = with self; [];
255 doCheck = false;
281 doCheck = false;
256 propagatedBuildInputs = with self; [mercurial subvertpy];
282 propagatedBuildInputs = with self; [mercurial subvertpy];
257 src = fetchurl {
283 src = fetchurl {
258 url = "https://pypi.python.org/packages/ce/97/032e5093ad250e9908cea04395cbddb6902d587f712a79b53b2d778bdfdd/hgsubversion-1.8.6.tar.gz";
284 url = "https://pypi.python.org/packages/1c/b8/ff4d2e0ec486f9765b410f09728c02a010e7485d68d6154968074498a403/hgsubversion-1.8.7.tar.gz";
259 md5 = "9310cb266031cf8d0779885782a84a5b";
285 md5 = "289f1c36c13bd6a3435a9be390a77bdc";
260 };
286 };
261 meta = {
287 meta = {
262 license = [ pkgs.lib.licenses.gpl1 ];
288 license = [ pkgs.lib.licenses.gpl1 ];
263 };
289 };
264 };
290 };
291 hupper = super.buildPythonPackage {
292 name = "hupper-1.0";
293 buildInputs = with self; [];
294 doCheck = false;
295 propagatedBuildInputs = with self; [];
296 src = fetchurl {
297 url = "https://pypi.python.org/packages/2e/07/df892c564dc09bb3cf6f6deb976c26adf9117db75ba218cb4353dbc9d826/hupper-1.0.tar.gz";
298 md5 = "26e77da7d5ac5858f59af050d1a6eb5a";
299 };
300 meta = {
301 license = [ pkgs.lib.licenses.mit ];
302 };
303 };
265 infrae.cache = super.buildPythonPackage {
304 infrae.cache = super.buildPythonPackage {
266 name = "infrae.cache-1.0.1";
305 name = "infrae.cache-1.0.1";
267 buildInputs = with self; [];
306 buildInputs = with self; [];
268 doCheck = false;
307 doCheck = false;
269 propagatedBuildInputs = with self; [Beaker repoze.lru];
308 propagatedBuildInputs = with self; [Beaker repoze.lru];
270 src = fetchurl {
309 src = fetchurl {
271 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
310 url = "https://pypi.python.org/packages/bb/f0/e7d5e984cf6592fd2807dc7bc44a93f9d18e04e6a61f87fdfb2622422d74/infrae.cache-1.0.1.tar.gz";
272 md5 = "b09076a766747e6ed2a755cc62088e32";
311 md5 = "b09076a766747e6ed2a755cc62088e32";
273 };
312 };
274 meta = {
313 meta = {
275 license = [ pkgs.lib.licenses.zpt21 ];
314 license = [ pkgs.lib.licenses.zpt21 ];
276 };
315 };
277 };
316 };
278 ipdb = super.buildPythonPackage {
317 ipdb = super.buildPythonPackage {
279 name = "ipdb-0.10.1";
318 name = "ipdb-0.10.3";
280 buildInputs = with self; [];
319 buildInputs = with self; [];
281 doCheck = false;
320 doCheck = false;
282 propagatedBuildInputs = with self; [ipython setuptools];
321 propagatedBuildInputs = with self; [setuptools ipython];
283 src = fetchurl {
322 src = fetchurl {
284 url = "https://pypi.python.org/packages/eb/0a/0a37dc19572580336ad3813792c0d18c8d7117c2d66fc63c501f13a7a8f8/ipdb-0.10.1.tar.gz";
323 url = "https://pypi.python.org/packages/ad/cc/0e7298e1fbf2efd52667c9354a12aa69fb6f796ce230cca03525051718ef/ipdb-0.10.3.tar.gz";
285 md5 = "4aeab65f633ddc98ebdb5eebf08dc713";
324 md5 = "def1f6ac075d54bdee07e6501263d4fa";
286 };
325 };
287 meta = {
326 meta = {
288 license = [ pkgs.lib.licenses.bsdOriginal ];
327 license = [ pkgs.lib.licenses.bsdOriginal ];
289 };
328 };
290 };
329 };
291 ipython = super.buildPythonPackage {
330 ipython = super.buildPythonPackage {
292 name = "ipython-5.1.0";
331 name = "ipython-5.1.0";
293 buildInputs = with self; [];
332 buildInputs = with self; [];
294 doCheck = false;
333 doCheck = false;
295 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
334 propagatedBuildInputs = with self; [setuptools decorator pickleshare simplegeneric traitlets prompt-toolkit pygments pexpect backports.shutil-get-terminal-size pathlib2 pexpect];
296 src = fetchurl {
335 src = fetchurl {
297 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
336 url = "https://pypi.python.org/packages/89/63/a9292f7cd9d0090a0f995e1167f3f17d5889dcbc9a175261719c513b9848/ipython-5.1.0.tar.gz";
298 md5 = "47c8122420f65b58784cb4b9b4af35e3";
337 md5 = "47c8122420f65b58784cb4b9b4af35e3";
299 };
338 };
300 meta = {
339 meta = {
301 license = [ pkgs.lib.licenses.bsdOriginal ];
340 license = [ pkgs.lib.licenses.bsdOriginal ];
302 };
341 };
303 };
342 };
304 ipython-genutils = super.buildPythonPackage {
343 ipython-genutils = super.buildPythonPackage {
305 name = "ipython-genutils-0.2.0";
344 name = "ipython-genutils-0.2.0";
306 buildInputs = with self; [];
345 buildInputs = with self; [];
307 doCheck = false;
346 doCheck = false;
308 propagatedBuildInputs = with self; [];
347 propagatedBuildInputs = with self; [];
309 src = fetchurl {
348 src = fetchurl {
310 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
349 url = "https://pypi.python.org/packages/e8/69/fbeffffc05236398ebfcfb512b6d2511c622871dca1746361006da310399/ipython_genutils-0.2.0.tar.gz";
311 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
350 md5 = "5a4f9781f78466da0ea1a648f3e1f79f";
312 };
351 };
313 meta = {
352 meta = {
314 license = [ pkgs.lib.licenses.bsdOriginal ];
353 license = [ pkgs.lib.licenses.bsdOriginal ];
315 };
354 };
316 };
355 };
317 mercurial = super.buildPythonPackage {
356 mercurial = super.buildPythonPackage {
318 name = "mercurial-4.2.3";
357 name = "mercurial-4.2.3";
319 buildInputs = with self; [];
358 buildInputs = with self; [];
320 doCheck = false;
359 doCheck = false;
321 propagatedBuildInputs = with self; [];
360 propagatedBuildInputs = with self; [];
322 src = fetchurl {
361 src = fetchurl {
323 url = "https://www.mercurial-scm.org/release/mercurial-4.2.3.tar.gz";
362 url = "https://www.mercurial-scm.org/release/mercurial-4.2.3.tar.gz";
324 md5 = "a24a8fab7c2ad2c65e945b1b35d94e3b";
363 md5 = "a24a8fab7c2ad2c65e945b1b35d94e3b";
325 };
364 };
326 meta = {
365 meta = {
327 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
366 license = [ pkgs.lib.licenses.gpl1 pkgs.lib.licenses.gpl2Plus ];
328 };
367 };
329 };
368 };
330 mock = super.buildPythonPackage {
369 mock = super.buildPythonPackage {
331 name = "mock-1.0.1";
370 name = "mock-1.0.1";
332 buildInputs = with self; [];
371 buildInputs = with self; [];
333 doCheck = false;
372 doCheck = false;
334 propagatedBuildInputs = with self; [];
373 propagatedBuildInputs = with self; [];
335 src = fetchurl {
374 src = fetchurl {
336 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
375 url = "https://pypi.python.org/packages/15/45/30273ee91feb60dabb8fbb2da7868520525f02cf910279b3047182feed80/mock-1.0.1.zip";
337 md5 = "869f08d003c289a97c1a6610faf5e913";
376 md5 = "869f08d003c289a97c1a6610faf5e913";
338 };
377 };
339 meta = {
378 meta = {
340 license = [ pkgs.lib.licenses.bsdOriginal ];
379 license = [ pkgs.lib.licenses.bsdOriginal ];
341 };
380 };
342 };
381 };
343 msgpack-python = super.buildPythonPackage {
382 msgpack-python = super.buildPythonPackage {
344 name = "msgpack-python-0.4.8";
383 name = "msgpack-python-0.4.8";
345 buildInputs = with self; [];
384 buildInputs = with self; [];
346 doCheck = false;
385 doCheck = false;
347 propagatedBuildInputs = with self; [];
386 propagatedBuildInputs = with self; [];
348 src = fetchurl {
387 src = fetchurl {
349 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
388 url = "https://pypi.python.org/packages/21/27/8a1d82041c7a2a51fcc73675875a5f9ea06c2663e02fcfeb708be1d081a0/msgpack-python-0.4.8.tar.gz";
350 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
389 md5 = "dcd854fb41ee7584ebbf35e049e6be98";
351 };
390 };
352 meta = {
391 meta = {
353 license = [ pkgs.lib.licenses.asl20 ];
392 license = [ pkgs.lib.licenses.asl20 ];
354 };
393 };
355 };
394 };
356 pathlib2 = super.buildPythonPackage {
395 pathlib2 = super.buildPythonPackage {
357 name = "pathlib2-2.1.0";
396 name = "pathlib2-2.3.0";
358 buildInputs = with self; [];
397 buildInputs = with self; [];
359 doCheck = false;
398 doCheck = false;
360 propagatedBuildInputs = with self; [six];
399 propagatedBuildInputs = with self; [six scandir];
361 src = fetchurl {
400 src = fetchurl {
362 url = "https://pypi.python.org/packages/c9/27/8448b10d8440c08efeff0794adf7d0ed27adb98372c70c7b38f3947d4749/pathlib2-2.1.0.tar.gz";
401 url = "https://pypi.python.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
363 md5 = "38e4f58b4d69dfcb9edb49a54a8b28d2";
402 md5 = "89c90409d11fd5947966b6a30a47d18c";
364 };
403 };
365 meta = {
404 meta = {
366 license = [ pkgs.lib.licenses.mit ];
405 license = [ pkgs.lib.licenses.mit ];
367 };
406 };
368 };
407 };
369 pexpect = super.buildPythonPackage {
408 pexpect = super.buildPythonPackage {
370 name = "pexpect-4.2.1";
409 name = "pexpect-4.2.1";
371 buildInputs = with self; [];
410 buildInputs = with self; [];
372 doCheck = false;
411 doCheck = false;
373 propagatedBuildInputs = with self; [ptyprocess];
412 propagatedBuildInputs = with self; [ptyprocess];
374 src = fetchurl {
413 src = fetchurl {
375 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
414 url = "https://pypi.python.org/packages/e8/13/d0b0599099d6cd23663043a2a0bb7c61e58c6ba359b2656e6fb000ef5b98/pexpect-4.2.1.tar.gz";
376 md5 = "3694410001a99dff83f0b500a1ca1c95";
415 md5 = "3694410001a99dff83f0b500a1ca1c95";
377 };
416 };
378 meta = {
417 meta = {
379 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
418 license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ];
380 };
419 };
381 };
420 };
382 pickleshare = super.buildPythonPackage {
421 pickleshare = super.buildPythonPackage {
383 name = "pickleshare-0.7.4";
422 name = "pickleshare-0.7.4";
384 buildInputs = with self; [];
423 buildInputs = with self; [];
385 doCheck = false;
424 doCheck = false;
386 propagatedBuildInputs = with self; [pathlib2];
425 propagatedBuildInputs = with self; [pathlib2];
387 src = fetchurl {
426 src = fetchurl {
388 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
427 url = "https://pypi.python.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
389 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
428 md5 = "6a9e5dd8dfc023031f6b7b3f824cab12";
390 };
429 };
391 meta = {
430 meta = {
392 license = [ pkgs.lib.licenses.mit ];
431 license = [ pkgs.lib.licenses.mit ];
393 };
432 };
394 };
433 };
434 plaster = super.buildPythonPackage {
435 name = "plaster-0.5";
436 buildInputs = with self; [];
437 doCheck = false;
438 propagatedBuildInputs = with self; [setuptools];
439 src = fetchurl {
440 url = "https://pypi.python.org/packages/99/b3/d7ca1fe31d2b56dba68a238721fda6820770f9c2a3de17a582d4b5b2edcc/plaster-0.5.tar.gz";
441 md5 = "c59345a67a860cfcaa1bd6a81451399d";
442 };
443 meta = {
444 license = [ pkgs.lib.licenses.mit ];
445 };
446 };
447 plaster-pastedeploy = super.buildPythonPackage {
448 name = "plaster-pastedeploy-0.4.1";
449 buildInputs = with self; [];
450 doCheck = false;
451 propagatedBuildInputs = with self; [PasteDeploy plaster];
452 src = fetchurl {
453 url = "https://pypi.python.org/packages/9d/6e/f8be01ed41c94e6c54ac97cf2eb142a702aae0c8cce31c846f785e525b40/plaster_pastedeploy-0.4.1.tar.gz";
454 md5 = "f48d5344b922e56c4978eebf1cd2e0d3";
455 };
456 meta = {
457 license = [ pkgs.lib.licenses.mit ];
458 };
459 };
395 prompt-toolkit = super.buildPythonPackage {
460 prompt-toolkit = super.buildPythonPackage {
396 name = "prompt-toolkit-1.0.14";
461 name = "prompt-toolkit-1.0.15";
397 buildInputs = with self; [];
462 buildInputs = with self; [];
398 doCheck = false;
463 doCheck = false;
399 propagatedBuildInputs = with self; [six wcwidth];
464 propagatedBuildInputs = with self; [six wcwidth];
400 src = fetchurl {
465 src = fetchurl {
401 url = "https://pypi.python.org/packages/55/56/8c39509b614bda53e638b7500f12577d663ac1b868aef53426fc6a26c3f5/prompt_toolkit-1.0.14.tar.gz";
466 url = "https://pypi.python.org/packages/8a/ad/cf6b128866e78ad6d7f1dc5b7f99885fb813393d9860778b2984582e81b5/prompt_toolkit-1.0.15.tar.gz";
402 md5 = "f24061ae133ed32c6b764e92bd48c496";
467 md5 = "8fe70295006dbc8afedd43e5eba99032";
403 };
468 };
404 meta = {
469 meta = {
405 license = [ pkgs.lib.licenses.bsdOriginal ];
470 license = [ pkgs.lib.licenses.bsdOriginal ];
406 };
471 };
407 };
472 };
408 ptyprocess = super.buildPythonPackage {
473 ptyprocess = super.buildPythonPackage {
409 name = "ptyprocess-0.5.1";
474 name = "ptyprocess-0.5.2";
410 buildInputs = with self; [];
475 buildInputs = with self; [];
411 doCheck = false;
476 doCheck = false;
412 propagatedBuildInputs = with self; [];
477 propagatedBuildInputs = with self; [];
413 src = fetchurl {
478 src = fetchurl {
414 url = "https://pypi.python.org/packages/db/d7/b465161910f3d1cef593c5e002bff67e0384898f597f1a7fdc8db4c02bf6/ptyprocess-0.5.1.tar.gz";
479 url = "https://pypi.python.org/packages/51/83/5d07dc35534640b06f9d9f1a1d2bc2513fb9cc7595a1b0e28ae5477056ce/ptyprocess-0.5.2.tar.gz";
415 md5 = "94e537122914cc9ec9c1eadcd36e73a1";
480 md5 = "d3b8febae1b8c53b054bd818d0bb8665";
416 };
481 };
417 meta = {
482 meta = {
418 license = [ ];
483 license = [ ];
419 };
484 };
420 };
485 };
421 py = super.buildPythonPackage {
486 py = super.buildPythonPackage {
422 name = "py-1.4.31";
487 name = "py-1.4.34";
423 buildInputs = with self; [];
488 buildInputs = with self; [];
424 doCheck = false;
489 doCheck = false;
425 propagatedBuildInputs = with self; [];
490 propagatedBuildInputs = with self; [];
426 src = fetchurl {
491 src = fetchurl {
427 url = "https://pypi.python.org/packages/f4/9a/8dfda23f36600dd701c6722316ba8a3ab4b990261f83e7d3ffc6dfedf7ef/py-1.4.31.tar.gz";
492 url = "https://pypi.python.org/packages/68/35/58572278f1c097b403879c1e9369069633d1cbad5239b9057944bb764782/py-1.4.34.tar.gz";
428 md5 = "5d2c63c56dc3f2115ec35c066ecd582b";
493 md5 = "d9c3d8f734b0819ff48e355d77bf1730";
429 };
494 };
430 meta = {
495 meta = {
431 license = [ pkgs.lib.licenses.mit ];
496 license = [ pkgs.lib.licenses.mit ];
432 };
497 };
433 };
498 };
434 pygments = super.buildPythonPackage {
499 pygments = super.buildPythonPackage {
435 name = "pygments-2.2.0";
500 name = "pygments-2.2.0";
436 buildInputs = with self; [];
501 buildInputs = with self; [];
437 doCheck = false;
502 doCheck = false;
438 propagatedBuildInputs = with self; [];
503 propagatedBuildInputs = with self; [];
439 src = fetchurl {
504 src = fetchurl {
440 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
505 url = "https://pypi.python.org/packages/71/2a/2e4e77803a8bd6408a2903340ac498cb0a2181811af7c9ec92cb70b0308a/Pygments-2.2.0.tar.gz";
441 md5 = "13037baca42f16917cbd5ad2fab50844";
506 md5 = "13037baca42f16917cbd5ad2fab50844";
442 };
507 };
443 meta = {
508 meta = {
444 license = [ pkgs.lib.licenses.bsdOriginal ];
509 license = [ pkgs.lib.licenses.bsdOriginal ];
445 };
510 };
446 };
511 };
447 pyramid = super.buildPythonPackage {
512 pyramid = super.buildPythonPackage {
448 name = "pyramid-1.7.4";
513 name = "pyramid-1.9.1";
449 buildInputs = with self; [];
514 buildInputs = with self; [];
450 doCheck = false;
515 doCheck = false;
451 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy];
516 propagatedBuildInputs = with self; [setuptools WebOb repoze.lru zope.interface zope.deprecation venusian translationstring PasteDeploy plaster plaster-pastedeploy hupper];
452 src = fetchurl {
517 src = fetchurl {
453 url = "https://pypi.python.org/packages/33/91/55f5c661f8923902cd1f68d75f2b937c45e7682857356cf18f0be5493899/pyramid-1.7.4.tar.gz";
518 url = "https://pypi.python.org/packages/9a/57/73447be9e7d0512d601e3f0a1fb9d7d1efb941911f49efdfe036d2826507/pyramid-1.9.1.tar.gz";
454 md5 = "6ef1dfdcff9136d04490410757c4c446";
519 md5 = "0163e19c58c2d12976a3b6fdb57e052d";
455 };
520 };
456 meta = {
521 meta = {
457 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
522 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
458 };
523 };
459 };
524 };
460 pyramid-jinja2 = super.buildPythonPackage {
525 pyramid-jinja2 = super.buildPythonPackage {
461 name = "pyramid-jinja2-2.5";
526 name = "pyramid-jinja2-2.5";
462 buildInputs = with self; [];
527 buildInputs = with self; [];
463 doCheck = false;
528 doCheck = false;
464 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
529 propagatedBuildInputs = with self; [pyramid zope.deprecation Jinja2 MarkupSafe];
465 src = fetchurl {
530 src = fetchurl {
466 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
531 url = "https://pypi.python.org/packages/a1/80/595e26ffab7deba7208676b6936b7e5a721875710f982e59899013cae1ed/pyramid_jinja2-2.5.tar.gz";
467 md5 = "07cb6547204ac5e6f0b22a954ccee928";
532 md5 = "07cb6547204ac5e6f0b22a954ccee928";
468 };
533 };
469 meta = {
534 meta = {
470 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
535 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
471 };
536 };
472 };
537 };
473 pyramid-mako = super.buildPythonPackage {
538 pyramid-mako = super.buildPythonPackage {
474 name = "pyramid-mako-1.0.2";
539 name = "pyramid-mako-1.0.2";
475 buildInputs = with self; [];
540 buildInputs = with self; [];
476 doCheck = false;
541 doCheck = false;
477 propagatedBuildInputs = with self; [pyramid Mako];
542 propagatedBuildInputs = with self; [pyramid Mako];
478 src = fetchurl {
543 src = fetchurl {
479 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
544 url = "https://pypi.python.org/packages/f1/92/7e69bcf09676d286a71cb3bbb887b16595b96f9ba7adbdc239ffdd4b1eb9/pyramid_mako-1.0.2.tar.gz";
480 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
545 md5 = "ee25343a97eb76bd90abdc2a774eb48a";
481 };
546 };
482 meta = {
547 meta = {
483 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
548 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
484 };
549 };
485 };
550 };
486 pytest = super.buildPythonPackage {
551 pytest = super.buildPythonPackage {
487 name = "pytest-3.0.5";
552 name = "pytest-3.1.2";
488 buildInputs = with self; [];
553 buildInputs = with self; [];
489 doCheck = false;
554 doCheck = false;
490 propagatedBuildInputs = with self; [py];
555 propagatedBuildInputs = with self; [py setuptools];
491 src = fetchurl {
556 src = fetchurl {
492 url = "https://pypi.python.org/packages/a8/87/b7ca49efe52d2b4169f2bfc49aa5e384173c4619ea8e635f123a0dac5b75/pytest-3.0.5.tar.gz";
557 url = "https://pypi.python.org/packages/72/2b/2d3155e01f45a5a04427857352ee88220ee39550b2bc078f9db3190aea46/pytest-3.1.2.tar.gz";
493 md5 = "cefd527b59332688bf5db4a10aa8a7cb";
558 md5 = "c4d179f89043cc925e1c169d03128e02";
494 };
559 };
495 meta = {
560 meta = {
496 license = [ pkgs.lib.licenses.mit ];
561 license = [ pkgs.lib.licenses.mit ];
497 };
562 };
498 };
563 };
499 pytest-catchlog = super.buildPythonPackage {
564 pytest-catchlog = super.buildPythonPackage {
500 name = "pytest-catchlog-1.2.2";
565 name = "pytest-catchlog-1.2.2";
501 buildInputs = with self; [];
566 buildInputs = with self; [];
502 doCheck = false;
567 doCheck = false;
503 propagatedBuildInputs = with self; [py pytest];
568 propagatedBuildInputs = with self; [py pytest];
504 src = fetchurl {
569 src = fetchurl {
505 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
570 url = "https://pypi.python.org/packages/f2/2b/2faccdb1a978fab9dd0bf31cca9f6847fbe9184a0bdcc3011ac41dd44191/pytest-catchlog-1.2.2.zip";
506 md5 = "09d890c54c7456c818102b7ff8c182c8";
571 md5 = "09d890c54c7456c818102b7ff8c182c8";
507 };
572 };
508 meta = {
573 meta = {
509 license = [ pkgs.lib.licenses.mit ];
574 license = [ pkgs.lib.licenses.mit ];
510 };
575 };
511 };
576 };
512 pytest-cov = super.buildPythonPackage {
577 pytest-cov = super.buildPythonPackage {
513 name = "pytest-cov-2.4.0";
578 name = "pytest-cov-2.5.1";
514 buildInputs = with self; [];
579 buildInputs = with self; [];
515 doCheck = false;
580 doCheck = false;
516 propagatedBuildInputs = with self; [pytest coverage];
581 propagatedBuildInputs = with self; [pytest coverage];
517 src = fetchurl {
582 src = fetchurl {
518 url = "https://pypi.python.org/packages/00/c0/2bfd1fcdb9d407b8ac8185b1cb5ff458105c6b207a9a7f0e13032de9828f/pytest-cov-2.4.0.tar.gz";
583 url = "https://pypi.python.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
519 md5 = "2fda09677d232acc99ec1b3c5831e33f";
584 md5 = "5acf38d4909e19819eb5c1754fbfc0ac";
520 };
585 };
521 meta = {
586 meta = {
522 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
523 };
588 };
524 };
589 };
525 pytest-profiling = super.buildPythonPackage {
590 pytest-profiling = super.buildPythonPackage {
526 name = "pytest-profiling-1.2.2";
591 name = "pytest-profiling-1.2.6";
527 buildInputs = with self; [];
592 buildInputs = with self; [];
528 doCheck = false;
593 doCheck = false;
529 propagatedBuildInputs = with self; [six pytest gprof2dot];
594 propagatedBuildInputs = with self; [six pytest gprof2dot];
530 src = fetchurl {
595 src = fetchurl {
531 url = "https://pypi.python.org/packages/73/e8/804681323bac0bc45c520ec34185ba8469008942266d0074699b204835c1/pytest-profiling-1.2.2.tar.gz";
596 url = "https://pypi.python.org/packages/f9/0d/df67fb9ce16c2cef201693da956321b1bccfbf9a4ead39748b9f9d1d74cb/pytest-profiling-1.2.6.tar.gz";
532 md5 = "0a16d7dda2d23b91e9730fa4558cf728";
597 md5 = "50eb4c66c3762a2f1a49669bedc0b894";
533 };
598 };
534 meta = {
599 meta = {
535 license = [ pkgs.lib.licenses.mit ];
600 license = [ pkgs.lib.licenses.mit ];
536 };
601 };
537 };
602 };
538 pytest-runner = super.buildPythonPackage {
603 pytest-runner = super.buildPythonPackage {
539 name = "pytest-runner-2.9";
604 name = "pytest-runner-2.11.1";
540 buildInputs = with self; [];
605 buildInputs = with self; [];
541 doCheck = false;
606 doCheck = false;
542 propagatedBuildInputs = with self; [];
607 propagatedBuildInputs = with self; [];
543 src = fetchurl {
608 src = fetchurl {
544 url = "https://pypi.python.org/packages/11/d4/c335ddf94463e451109e3494e909765c3e5205787b772e3b25ee8601b86a/pytest-runner-2.9.tar.gz";
609 url = "https://pypi.python.org/packages/9e/4d/08889e5e27a9f5d6096b9ad257f4dea1faabb03c5ded8f665ead448f5d8a/pytest-runner-2.11.1.tar.gz";
545 md5 = "2212a2e34404b0960b2fdc2c469247b2";
610 md5 = "bdb73eb18eca2727944a2dcf963c5a81";
546 };
611 };
547 meta = {
612 meta = {
548 license = [ pkgs.lib.licenses.mit ];
613 license = [ pkgs.lib.licenses.mit ];
549 };
614 };
550 };
615 };
551 pytest-sugar = super.buildPythonPackage {
616 pytest-sugar = super.buildPythonPackage {
552 name = "pytest-sugar-0.7.1";
617 name = "pytest-sugar-0.8.0";
553 buildInputs = with self; [];
618 buildInputs = with self; [];
554 doCheck = false;
619 doCheck = false;
555 propagatedBuildInputs = with self; [pytest termcolor];
620 propagatedBuildInputs = with self; [pytest termcolor];
556 src = fetchurl {
621 src = fetchurl {
557 url = "https://pypi.python.org/packages/03/97/05d988b4fa870e7373e8ee4582408543b9ca2bd35c3c67b569369c6f9c49/pytest-sugar-0.7.1.tar.gz";
622 url = "https://pypi.python.org/packages/a5/b0/b2773dee078f17773a5bf2dfad49b0be57b6354bbd84bbefe4313e509d87/pytest-sugar-0.8.0.tar.gz";
558 md5 = "7400f7c11f3d572b2c2a3b60352d35fe";
623 md5 = "8cafbdad648068e0e44b8fc5f9faae42";
559 };
624 };
560 meta = {
625 meta = {
561 license = [ pkgs.lib.licenses.bsdOriginal ];
626 license = [ pkgs.lib.licenses.bsdOriginal ];
562 };
627 };
563 };
628 };
564 pytest-timeout = super.buildPythonPackage {
629 pytest-timeout = super.buildPythonPackage {
565 name = "pytest-timeout-1.2.0";
630 name = "pytest-timeout-1.2.0";
566 buildInputs = with self; [];
631 buildInputs = with self; [];
567 doCheck = false;
632 doCheck = false;
568 propagatedBuildInputs = with self; [pytest];
633 propagatedBuildInputs = with self; [pytest];
569 src = fetchurl {
634 src = fetchurl {
570 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
635 url = "https://pypi.python.org/packages/cc/b7/b2a61365ea6b6d2e8881360ae7ed8dad0327ad2df89f2f0be4a02304deb2/pytest-timeout-1.2.0.tar.gz";
571 md5 = "83607d91aa163562c7ee835da57d061d";
636 md5 = "83607d91aa163562c7ee835da57d061d";
572 };
637 };
573 meta = {
638 meta = {
574 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
639 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
575 };
640 };
576 };
641 };
577 repoze.lru = super.buildPythonPackage {
642 repoze.lru = super.buildPythonPackage {
578 name = "repoze.lru-0.6";
643 name = "repoze.lru-0.6";
579 buildInputs = with self; [];
644 buildInputs = with self; [];
580 doCheck = false;
645 doCheck = false;
581 propagatedBuildInputs = with self; [];
646 propagatedBuildInputs = with self; [];
582 src = fetchurl {
647 src = fetchurl {
583 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
648 url = "https://pypi.python.org/packages/6e/1e/aa15cc90217e086dc8769872c8778b409812ff036bf021b15795638939e4/repoze.lru-0.6.tar.gz";
584 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
649 md5 = "2c3b64b17a8e18b405f55d46173e14dd";
585 };
650 };
586 meta = {
651 meta = {
587 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
652 license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
588 };
653 };
589 };
654 };
590 rhodecode-vcsserver = super.buildPythonPackage {
655 rhodecode-vcsserver = super.buildPythonPackage {
591 name = "rhodecode-vcsserver-4.9.1";
656 name = "rhodecode-vcsserver-4.10.0";
592 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
657 buildInputs = with self; [pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage configobj];
593 doCheck = true;
658 doCheck = true;
594 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
659 propagatedBuildInputs = with self; [Beaker configobj decorator dulwich hgsubversion hg-evolve infrae.cache mercurial msgpack-python pyramid pyramid-jinja2 pyramid-mako repoze.lru simplejson subprocess32 subvertpy six translationstring WebOb wheel zope.deprecation zope.interface ipdb ipython gevent greenlet gunicorn waitress pytest py pytest-cov pytest-sugar pytest-runner pytest-catchlog pytest-profiling gprof2dot pytest-timeout mock WebTest cov-core coverage];
595 src = ./.;
660 src = ./.;
596 meta = {
661 meta = {
597 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
662 license = [ { fullName = "GPL V3"; } { fullName = "GNU General Public License v3 or later (GPLv3+)"; } ];
598 };
663 };
599 };
664 };
665 scandir = super.buildPythonPackage {
666 name = "scandir-1.5";
667 buildInputs = with self; [];
668 doCheck = false;
669 propagatedBuildInputs = with self; [];
670 src = fetchurl {
671 url = "https://pypi.python.org/packages/bd/f4/3143e0289faf0883228017dbc6387a66d0b468df646645e29e1eb89ea10e/scandir-1.5.tar.gz";
672 md5 = "a2713043de681bba6b084be42e7a8a44";
673 };
674 meta = {
675 license = [ pkgs.lib.licenses.bsdOriginal { fullName = "New BSD License"; } ];
676 };
677 };
600 setuptools = super.buildPythonPackage {
678 setuptools = super.buildPythonPackage {
601 name = "setuptools-30.1.0";
679 name = "setuptools-30.1.0";
602 buildInputs = with self; [];
680 buildInputs = with self; [];
603 doCheck = false;
681 doCheck = false;
604 propagatedBuildInputs = with self; [];
682 propagatedBuildInputs = with self; [];
605 src = fetchurl {
683 src = fetchurl {
606 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
684 url = "https://pypi.python.org/packages/1e/43/002c8616db9a3e7be23c2556e39b90a32bb40ba0dc652de1999d5334d372/setuptools-30.1.0.tar.gz";
607 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
685 md5 = "cac497f42e5096ac8df29e38d3f81c3e";
608 };
686 };
609 meta = {
687 meta = {
610 license = [ pkgs.lib.licenses.mit ];
688 license = [ pkgs.lib.licenses.mit ];
611 };
689 };
612 };
690 };
613 simplegeneric = super.buildPythonPackage {
691 simplegeneric = super.buildPythonPackage {
614 name = "simplegeneric-0.8.1";
692 name = "simplegeneric-0.8.1";
615 buildInputs = with self; [];
693 buildInputs = with self; [];
616 doCheck = false;
694 doCheck = false;
617 propagatedBuildInputs = with self; [];
695 propagatedBuildInputs = with self; [];
618 src = fetchurl {
696 src = fetchurl {
619 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
697 url = "https://pypi.python.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip";
620 md5 = "f9c1fab00fd981be588fc32759f474e3";
698 md5 = "f9c1fab00fd981be588fc32759f474e3";
621 };
699 };
622 meta = {
700 meta = {
623 license = [ pkgs.lib.licenses.zpt21 ];
701 license = [ pkgs.lib.licenses.zpt21 ];
624 };
702 };
625 };
703 };
626 simplejson = super.buildPythonPackage {
704 simplejson = super.buildPythonPackage {
627 name = "simplejson-3.7.2";
705 name = "simplejson-3.11.1";
628 buildInputs = with self; [];
706 buildInputs = with self; [];
629 doCheck = false;
707 doCheck = false;
630 propagatedBuildInputs = with self; [];
708 propagatedBuildInputs = with self; [];
631 src = fetchurl {
709 src = fetchurl {
632 url = "https://pypi.python.org/packages/6d/89/7f13f099344eea9d6722779a1f165087cb559598107844b1ac5dbd831fb1/simplejson-3.7.2.tar.gz";
710 url = "https://pypi.python.org/packages/08/48/c97b668d6da7d7bebe7ea1817a6f76394b0ec959cb04214ca833c34359df/simplejson-3.11.1.tar.gz";
633 md5 = "a5fc7d05d4cb38492285553def5d4b46";
711 md5 = "6e2f1bd5fb0a926facf5d89d217a7183";
634 };
712 };
635 meta = {
713 meta = {
636 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
714 license = [ { fullName = "Academic Free License (AFL)"; } pkgs.lib.licenses.mit ];
637 };
715 };
638 };
716 };
639 six = super.buildPythonPackage {
717 six = super.buildPythonPackage {
640 name = "six-1.9.0";
718 name = "six-1.9.0";
641 buildInputs = with self; [];
719 buildInputs = with self; [];
642 doCheck = false;
720 doCheck = false;
643 propagatedBuildInputs = with self; [];
721 propagatedBuildInputs = with self; [];
644 src = fetchurl {
722 src = fetchurl {
645 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
723 url = "https://pypi.python.org/packages/16/64/1dc5e5976b17466fd7d712e59cbe9fb1e18bec153109e5ba3ed6c9102f1a/six-1.9.0.tar.gz";
646 md5 = "476881ef4012262dfc8adc645ee786c4";
724 md5 = "476881ef4012262dfc8adc645ee786c4";
647 };
725 };
648 meta = {
726 meta = {
649 license = [ pkgs.lib.licenses.mit ];
727 license = [ pkgs.lib.licenses.mit ];
650 };
728 };
651 };
729 };
652 subprocess32 = super.buildPythonPackage {
730 subprocess32 = super.buildPythonPackage {
653 name = "subprocess32-3.2.6";
731 name = "subprocess32-3.2.7";
654 buildInputs = with self; [];
732 buildInputs = with self; [];
655 doCheck = false;
733 doCheck = false;
656 propagatedBuildInputs = with self; [];
734 propagatedBuildInputs = with self; [];
657 src = fetchurl {
735 src = fetchurl {
658 url = "https://pypi.python.org/packages/28/8d/33ccbff51053f59ae6c357310cac0e79246bbed1d345ecc6188b176d72c3/subprocess32-3.2.6.tar.gz";
736 url = "https://pypi.python.org/packages/b8/2f/49e53b0d0e94611a2dc624a1ad24d41b6d94d0f1b0a078443407ea2214c2/subprocess32-3.2.7.tar.gz";
659 md5 = "754c5ab9f533e764f931136974b618f1";
737 md5 = "824c801e479d3e916879aae3e9c15e16";
660 };
738 };
661 meta = {
739 meta = {
662 license = [ pkgs.lib.licenses.psfl ];
740 license = [ pkgs.lib.licenses.psfl ];
663 };
741 };
664 };
742 };
665 subvertpy = super.buildPythonPackage {
743 subvertpy = super.buildPythonPackage {
666 name = "subvertpy-0.9.3";
744 name = "subvertpy-0.9.3";
667 buildInputs = with self; [];
745 buildInputs = with self; [];
668 doCheck = false;
746 doCheck = false;
669 propagatedBuildInputs = with self; [];
747 propagatedBuildInputs = with self; [];
670 src = fetchurl {
748 src = fetchurl {
671 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
749 url = "https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c";
672 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
750 md5 = "4e49da2fe07608239cc9a80a7bb8f33c";
673 };
751 };
674 meta = {
752 meta = {
675 license = [ pkgs.lib.licenses.lgpl21Plus ];
753 license = [ pkgs.lib.licenses.lgpl21Plus ];
676 };
754 };
677 };
755 };
678 termcolor = super.buildPythonPackage {
756 termcolor = super.buildPythonPackage {
679 name = "termcolor-1.1.0";
757 name = "termcolor-1.1.0";
680 buildInputs = with self; [];
758 buildInputs = with self; [];
681 doCheck = false;
759 doCheck = false;
682 propagatedBuildInputs = with self; [];
760 propagatedBuildInputs = with self; [];
683 src = fetchurl {
761 src = fetchurl {
684 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
762 url = "https://pypi.python.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz";
685 md5 = "043e89644f8909d462fbbfa511c768df";
763 md5 = "043e89644f8909d462fbbfa511c768df";
686 };
764 };
687 meta = {
765 meta = {
688 license = [ pkgs.lib.licenses.mit ];
766 license = [ pkgs.lib.licenses.mit ];
689 };
767 };
690 };
768 };
691 traitlets = super.buildPythonPackage {
769 traitlets = super.buildPythonPackage {
692 name = "traitlets-4.3.2";
770 name = "traitlets-4.3.2";
693 buildInputs = with self; [];
771 buildInputs = with self; [];
694 doCheck = false;
772 doCheck = false;
695 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
773 propagatedBuildInputs = with self; [ipython-genutils six decorator enum34];
696 src = fetchurl {
774 src = fetchurl {
697 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
775 url = "https://pypi.python.org/packages/a5/98/7f5ef2fe9e9e071813aaf9cb91d1a732e0a68b6c44a32b38cb8e14c3f069/traitlets-4.3.2.tar.gz";
698 md5 = "3068663f2f38fd939a9eb3a500ccc154";
776 md5 = "3068663f2f38fd939a9eb3a500ccc154";
699 };
777 };
700 meta = {
778 meta = {
701 license = [ pkgs.lib.licenses.bsdOriginal ];
779 license = [ pkgs.lib.licenses.bsdOriginal ];
702 };
780 };
703 };
781 };
704 translationstring = super.buildPythonPackage {
782 translationstring = super.buildPythonPackage {
705 name = "translationstring-1.3";
783 name = "translationstring-1.3";
706 buildInputs = with self; [];
784 buildInputs = with self; [];
707 doCheck = false;
785 doCheck = false;
708 propagatedBuildInputs = with self; [];
786 propagatedBuildInputs = with self; [];
709 src = fetchurl {
787 src = fetchurl {
710 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
788 url = "https://pypi.python.org/packages/5e/eb/bee578cc150b44c653b63f5ebe258b5d0d812ddac12497e5f80fcad5d0b4/translationstring-1.3.tar.gz";
711 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
789 md5 = "a4b62e0f3c189c783a1685b3027f7c90";
712 };
790 };
713 meta = {
791 meta = {
714 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
792 license = [ { fullName = "BSD-like (http://repoze.org/license.html)"; } ];
715 };
793 };
716 };
794 };
717 venusian = super.buildPythonPackage {
795 venusian = super.buildPythonPackage {
718 name = "venusian-1.0";
796 name = "venusian-1.1.0";
719 buildInputs = with self; [];
797 buildInputs = with self; [];
720 doCheck = false;
798 doCheck = false;
721 propagatedBuildInputs = with self; [];
799 propagatedBuildInputs = with self; [];
722 src = fetchurl {
800 src = fetchurl {
723 url = "https://pypi.python.org/packages/86/20/1948e0dfc4930ddde3da8c33612f6a5717c0b4bc28f591a5c5cf014dd390/venusian-1.0.tar.gz";
801 url = "https://pypi.python.org/packages/38/24/b4b470ab9e0a2e2e9b9030c7735828c8934b4c6b45befd1bb713ec2aeb2d/venusian-1.1.0.tar.gz";
724 md5 = "dccf2eafb7113759d60c86faf5538756";
802 md5 = "56bc5e6756e4bda37bcdb94f74a72b8f";
725 };
803 };
726 meta = {
804 meta = {
727 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
805 license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ];
728 };
806 };
729 };
807 };
730 waitress = super.buildPythonPackage {
808 waitress = super.buildPythonPackage {
731 name = "waitress-1.0.1";
809 name = "waitress-1.0.2";
732 buildInputs = with self; [];
810 buildInputs = with self; [];
733 doCheck = false;
811 doCheck = false;
734 propagatedBuildInputs = with self; [];
812 propagatedBuildInputs = with self; [];
735 src = fetchurl {
813 src = fetchurl {
736 url = "https://pypi.python.org/packages/78/7d/84d11b96c3f60164dec3bef4a859a03aeae0231aa93f57fbe0d05fa4ff36/waitress-1.0.1.tar.gz";
814 url = "https://pypi.python.org/packages/cd/f4/400d00863afa1e03618e31fd7e2092479a71b8c9718b00eb1eeb603746c6/waitress-1.0.2.tar.gz";
737 md5 = "dda92358a7569669086155923a46e57c";
815 md5 = "b968f39e95d609f6194c6e50425d4bb7";
738 };
816 };
739 meta = {
817 meta = {
740 license = [ pkgs.lib.licenses.zpt21 ];
818 license = [ pkgs.lib.licenses.zpt21 ];
741 };
819 };
742 };
820 };
743 wcwidth = super.buildPythonPackage {
821 wcwidth = super.buildPythonPackage {
744 name = "wcwidth-0.1.7";
822 name = "wcwidth-0.1.7";
745 buildInputs = with self; [];
823 buildInputs = with self; [];
746 doCheck = false;
824 doCheck = false;
747 propagatedBuildInputs = with self; [];
825 propagatedBuildInputs = with self; [];
748 src = fetchurl {
826 src = fetchurl {
749 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
827 url = "https://pypi.python.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz";
750 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
828 md5 = "b3b6a0a08f0c8a34d1de8cf44150a4ad";
751 };
829 };
752 meta = {
830 meta = {
753 license = [ pkgs.lib.licenses.mit ];
831 license = [ pkgs.lib.licenses.mit ];
754 };
832 };
755 };
833 };
756 wheel = super.buildPythonPackage {
834 wheel = super.buildPythonPackage {
757 name = "wheel-0.29.0";
835 name = "wheel-0.29.0";
758 buildInputs = with self; [];
836 buildInputs = with self; [];
759 doCheck = false;
837 doCheck = false;
760 propagatedBuildInputs = with self; [];
838 propagatedBuildInputs = with self; [];
761 src = fetchurl {
839 src = fetchurl {
762 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
840 url = "https://pypi.python.org/packages/c9/1d/bd19e691fd4cfe908c76c429fe6e4436c9e83583c4414b54f6c85471954a/wheel-0.29.0.tar.gz";
763 md5 = "555a67e4507cedee23a0deb9651e452f";
841 md5 = "555a67e4507cedee23a0deb9651e452f";
764 };
842 };
765 meta = {
843 meta = {
766 license = [ pkgs.lib.licenses.mit ];
844 license = [ pkgs.lib.licenses.mit ];
767 };
845 };
768 };
846 };
769 zope.deprecation = super.buildPythonPackage {
847 zope.deprecation = super.buildPythonPackage {
770 name = "zope.deprecation-4.1.2";
848 name = "zope.deprecation-4.1.2";
771 buildInputs = with self; [];
849 buildInputs = with self; [];
772 doCheck = false;
850 doCheck = false;
773 propagatedBuildInputs = with self; [setuptools];
851 propagatedBuildInputs = with self; [setuptools];
774 src = fetchurl {
852 src = fetchurl {
775 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
853 url = "https://pypi.python.org/packages/c1/d3/3919492d5e57d8dd01b36f30b34fc8404a30577392b1eb817c303499ad20/zope.deprecation-4.1.2.tar.gz";
776 md5 = "e9a663ded58f4f9f7881beb56cae2782";
854 md5 = "e9a663ded58f4f9f7881beb56cae2782";
777 };
855 };
778 meta = {
856 meta = {
779 license = [ pkgs.lib.licenses.zpt21 ];
857 license = [ pkgs.lib.licenses.zpt21 ];
780 };
858 };
781 };
859 };
782 zope.interface = super.buildPythonPackage {
860 zope.interface = super.buildPythonPackage {
783 name = "zope.interface-4.1.3";
861 name = "zope.interface-4.1.3";
784 buildInputs = with self; [];
862 buildInputs = with self; [];
785 doCheck = false;
863 doCheck = false;
786 propagatedBuildInputs = with self; [setuptools];
864 propagatedBuildInputs = with self; [setuptools];
787 src = fetchurl {
865 src = fetchurl {
788 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
866 url = "https://pypi.python.org/packages/9d/81/2509ca3c6f59080123c1a8a97125eb48414022618cec0e64eb1313727bfe/zope.interface-4.1.3.tar.gz";
789 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
867 md5 = "9ae3d24c0c7415deb249dd1a132f0f79";
790 };
868 };
791 meta = {
869 meta = {
792 license = [ pkgs.lib.licenses.zpt21 ];
870 license = [ pkgs.lib.licenses.zpt21 ];
793 };
871 };
794 };
872 };
795
873
796 ### Test requirements
874 ### Test requirements
797
875
798
876
799 }
877 }
@@ -1,40 +1,41 b''
1 ## core
1 ## core
2 setuptools==30.1.0
2 setuptools==30.1.0
3
3
4 Beaker==1.7.0
4 Beaker==1.9.0
5 configobj==5.0.6
5 configobj==5.0.6
6 decorator==4.0.11
6 decorator==4.0.11
7 dulwich==0.13.0
7 dulwich==0.13.0
8 hgsubversion==1.8.6
8 hgsubversion==1.8.7
9 hg-evolve==6.6.0
9 hg-evolve==6.6.0
10 infrae.cache==1.0.1
10 infrae.cache==1.0.1
11 mercurial==4.2.3
11 mercurial==4.2.3
12 msgpack-python==0.4.8
12 msgpack-python==0.4.8
13 pyramid-jinja2==2.5
13 pyramid-jinja2==2.5
14 pyramid==1.7.4
14 pyramid==1.9.1
15 pyramid-mako==1.0.2
15 pyramid-mako==1.0.2
16 repoze.lru==0.6
16 repoze.lru==0.6
17 simplejson==3.7.2
17 simplejson==3.11.1
18 subprocess32==3.2.6
18 subprocess32==3.2.7
19
19
20 # Custom subvertpy that is not available on pypi.
20 # Custom subvertpy that is not available on pypi.
21 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
21 https://code.rhodecode.com/upstream/subvertpy/archive/subvertpy-0.9.3.tar.gz?md5=4e49da2fe07608239cc9a80a7bb8f33c#egg=subvertpy==0.9.3
22
22
23 six==1.9.0
23 six==1.9.0
24 translationstring==1.3
24 translationstring==1.3
25 WebOb==1.3.1
25 WebOb==1.7.3
26 wheel==0.29.0
26 wheel==0.29.0
27 zope.deprecation==4.1.2
27 zope.deprecation==4.1.2
28 zope.interface==4.1.3
28 zope.interface==4.1.3
29
29
30 ## http servers
31 gevent==1.2.2
32 greenlet==0.4.12
33 gunicorn==19.7.1
34 waitress==1.0.2
35
30 ## debug
36 ## debug
31 ipdb==0.10.1
37 ipdb==0.10.3
32 ipython==5.1.0
38 ipython==5.1.0
33 # http servers
34 gevent==1.1.2
35 greenlet==0.4.10
36 gunicorn==19.6.0
37 waitress==1.0.1
38
39
39 ## test related requirements
40 ## test related requirements
40 -r requirements_test.txt
41 -r requirements_test.txt
@@ -1,15 +1,15 b''
1 # test related requirements
1 # test related requirements
2 pytest==3.0.5
2 pytest==3.1.2
3 py==1.4.31
3 py==1.4.34
4 pytest-cov==2.4.0
4 pytest-cov==2.5.1
5 pytest-sugar==0.7.1
5 pytest-sugar==0.8.0
6 pytest-runner==2.9.0
6 pytest-runner==2.11.1
7 pytest-catchlog==1.2.2
7 pytest-catchlog==1.2.2
8 pytest-profiling==1.2.2
8 pytest-profiling==1.2.6
9 gprof2dot==2016.10.13
9 gprof2dot==2016.10.13
10 pytest-timeout==1.2.0
10 pytest-timeout==1.2.0
11
11
12 mock==1.0.1
12 mock==1.0.1
13 WebTest==1.4.3
13 WebTest==2.0.27
14 cov-core==1.15.0
14 cov-core==1.15.0
15 coverage==3.7.1
15 coverage==3.7.1
@@ -1,1 +1,1 b''
1 4.9.1 No newline at end of file
1 4.10.0 No newline at end of file
@@ -1,34 +1,54 b''
1 """
1 """
2 Implementation of :class:`EchoApp`.
2 Implementation of :class:`EchoApp`.
3
3
4 This WSGI application will just echo back the data which it recieves.
4 This WSGI application will just echo back the data which it recieves.
5 """
5 """
6
6
7 import logging
7 import logging
8
8
9
9
10 log = logging.getLogger(__name__)
10 log = logging.getLogger(__name__)
11
11
12
12
13 class EchoApp(object):
13 class EchoApp(object):
14
14
15 def __init__(self, repo_path, repo_name, config):
15 def __init__(self, repo_path, repo_name, config):
16 self._repo_path = repo_path
16 self._repo_path = repo_path
17 log.info("EchoApp initialized for %s", repo_path)
17 log.info("EchoApp initialized for %s", repo_path)
18
18
19 def __call__(self, environ, start_response):
19 def __call__(self, environ, start_response):
20 log.debug("EchoApp called for %s", self._repo_path)
20 log.debug("EchoApp called for %s", self._repo_path)
21 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
21 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
22 environ['wsgi.input'].read()
22 environ['wsgi.input'].read()
23 status = '200 OK'
23 status = '200 OK'
24 headers = []
24 headers = [('Content-Type', 'text/plain')]
25 start_response(status, headers)
25 start_response(status, headers)
26 return ["ECHO"]
26 return ["ECHO"]
27
27
28
28
29 class EchoAppStream(object):
30
31 def __init__(self, repo_path, repo_name, config):
32 self._repo_path = repo_path
33 log.info("EchoApp initialized for %s", repo_path)
34
35 def __call__(self, environ, start_response):
36 log.debug("EchoApp called for %s", self._repo_path)
37 log.debug("Content-Length: %s", environ.get('CONTENT_LENGTH'))
38 environ['wsgi.input'].read()
39 status = '200 OK'
40 headers = [('Content-Type', 'text/plain')]
41 start_response(status, headers)
42
43 def generator():
44 for _ in xrange(1000000):
45 yield "ECHO"
46 return generator()
47
48
29 def create_app():
49 def create_app():
30 """
50 """
31 Allows to run this app directly in a WSGI server.
51 Allows to run this app directly in a WSGI server.
32 """
52 """
33 stub_config = {}
53 stub_config = {}
34 return EchoApp('stub_path', 'stub_name', stub_config)
54 return EchoApp('stub_path', 'stub_name', stub_config)
@@ -1,644 +1,645 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
21 import re
21 import re
22 import stat
22 import stat
23 import urllib
23 import urllib
24 import urllib2
24 import urllib2
25 from functools import wraps
25 from functools import wraps
26
26
27 from dulwich import index, objects
27 from dulwich import index, objects
28 from dulwich.client import HttpGitClient, LocalGitClient
28 from dulwich.client import HttpGitClient, LocalGitClient
29 from dulwich.errors import (
29 from dulwich.errors import (
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
30 NotGitRepository, ChecksumMismatch, WrongObjectException,
31 MissingCommitError, ObjectMissing, HangupException,
31 MissingCommitError, ObjectMissing, HangupException,
32 UnexpectedCommandError)
32 UnexpectedCommandError)
33 from dulwich.repo import Repo as DulwichRepo, Tag
33 from dulwich.repo import Repo as DulwichRepo, Tag
34 from dulwich.server import update_server_info
34 from dulwich.server import update_server_info
35
35
36 from vcsserver import exceptions, settings, subprocessio
36 from vcsserver import exceptions, settings, subprocessio
37 from vcsserver.utils import safe_str
37 from vcsserver.utils import safe_str
38 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
38 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
39 from vcsserver.hgcompat import (
39 from vcsserver.hgcompat import (
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
40 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler)
41 from vcsserver.git_lfs.lib import LFSOidStore
41 from vcsserver.git_lfs.lib import LFSOidStore
42
42
43 DIR_STAT = stat.S_IFDIR
43 DIR_STAT = stat.S_IFDIR
44 FILE_MODE = stat.S_IFMT
44 FILE_MODE = stat.S_IFMT
45 GIT_LINK = objects.S_IFGITLINK
45 GIT_LINK = objects.S_IFGITLINK
46
46
47 log = logging.getLogger(__name__)
47 log = logging.getLogger(__name__)
48
48
49
49
50 def reraise_safe_exceptions(func):
50 def reraise_safe_exceptions(func):
51 """Converts Dulwich exceptions to something neutral."""
51 """Converts Dulwich exceptions to something neutral."""
52 @wraps(func)
52 @wraps(func)
53 def wrapper(*args, **kwargs):
53 def wrapper(*args, **kwargs):
54 try:
54 try:
55 return func(*args, **kwargs)
55 return func(*args, **kwargs)
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
56 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 ObjectMissing) as e:
57 ObjectMissing) as e:
58 raise exceptions.LookupException(e.message)
58 raise exceptions.LookupException(e.message)
59 except (HangupException, UnexpectedCommandError) as e:
59 except (HangupException, UnexpectedCommandError) as e:
60 raise exceptions.VcsException(e.message)
60 raise exceptions.VcsException(e.message)
61 except Exception as e:
61 except Exception as e:
62 # NOTE(marcink): becuase of how dulwich handles some exceptions
62 # NOTE(marcink): becuase of how dulwich handles some exceptions
63 # (KeyError on empty repos), we cannot track this and catch all
63 # (KeyError on empty repos), we cannot track this and catch all
64 # exceptions, it's an exceptions from other handlers
64 # exceptions, it's an exceptions from other handlers
65 #if not hasattr(e, '_vcs_kind'):
65 #if not hasattr(e, '_vcs_kind'):
66 #log.exception("Unhandled exception in git remote call")
66 #log.exception("Unhandled exception in git remote call")
67 #raise_from_original(exceptions.UnhandledException)
67 #raise_from_original(exceptions.UnhandledException)
68 raise
68 raise
69 return wrapper
69 return wrapper
70
70
71
71
72 class Repo(DulwichRepo):
72 class Repo(DulwichRepo):
73 """
73 """
74 A wrapper for dulwich Repo class.
74 A wrapper for dulwich Repo class.
75
75
76 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
76 Since dulwich is sometimes keeping .idx file descriptors open, it leads to
77 "Too many open files" error. We need to close all opened file descriptors
77 "Too many open files" error. We need to close all opened file descriptors
78 once the repo object is destroyed.
78 once the repo object is destroyed.
79
79
80 TODO: mikhail: please check if we need this wrapper after updating dulwich
80 TODO: mikhail: please check if we need this wrapper after updating dulwich
81 to 0.12.0 +
81 to 0.12.0 +
82 """
82 """
83 def __del__(self):
83 def __del__(self):
84 if hasattr(self, 'object_store'):
84 if hasattr(self, 'object_store'):
85 self.close()
85 self.close()
86
86
87
87
88 class GitFactory(RepoFactory):
88 class GitFactory(RepoFactory):
89
89
90 def _create_repo(self, wire, create):
90 def _create_repo(self, wire, create):
91 repo_path = str_to_dulwich(wire['path'])
91 repo_path = str_to_dulwich(wire['path'])
92 return Repo(repo_path)
92 return Repo(repo_path)
93
93
94
94
95 class GitRemote(object):
95 class GitRemote(object):
96
96
97 def __init__(self, factory):
97 def __init__(self, factory):
98 self._factory = factory
98 self._factory = factory
99
99
100 self._bulk_methods = {
100 self._bulk_methods = {
101 "author": self.commit_attribute,
101 "author": self.commit_attribute,
102 "date": self.get_object_attrs,
102 "date": self.get_object_attrs,
103 "message": self.commit_attribute,
103 "message": self.commit_attribute,
104 "parents": self.commit_attribute,
104 "parents": self.commit_attribute,
105 "_commit": self.revision,
105 "_commit": self.revision,
106 }
106 }
107
107
108 def _wire_to_config(self, wire):
108 def _wire_to_config(self, wire):
109 if 'config' in wire:
109 if 'config' in wire:
110 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
110 return dict([(x[0] + '_' + x[1], x[2]) for x in wire['config']])
111 return {}
111 return {}
112
112
113 def _assign_ref(self, wire, ref, commit_id):
113 def _assign_ref(self, wire, ref, commit_id):
114 repo = self._factory.repo(wire)
114 repo = self._factory.repo(wire)
115 repo[ref] = commit_id
115 repo[ref] = commit_id
116
116
117 @reraise_safe_exceptions
117 @reraise_safe_exceptions
118 def add_object(self, wire, content):
118 def add_object(self, wire, content):
119 repo = self._factory.repo(wire)
119 repo = self._factory.repo(wire)
120 blob = objects.Blob()
120 blob = objects.Blob()
121 blob.set_raw_string(content)
121 blob.set_raw_string(content)
122 repo.object_store.add_object(blob)
122 repo.object_store.add_object(blob)
123 return blob.id
123 return blob.id
124
124
125 @reraise_safe_exceptions
125 @reraise_safe_exceptions
126 def assert_correct_path(self, wire):
126 def assert_correct_path(self, wire):
127 try:
127 try:
128 self._factory.repo(wire)
128 self._factory.repo(wire)
129 except NotGitRepository as e:
129 except NotGitRepository as e:
130 # Exception can contain unicode which we convert
130 # Exception can contain unicode which we convert
131 raise exceptions.AbortException(repr(e))
131 raise exceptions.AbortException(repr(e))
132
132
133 @reraise_safe_exceptions
133 @reraise_safe_exceptions
134 def bare(self, wire):
134 def bare(self, wire):
135 repo = self._factory.repo(wire)
135 repo = self._factory.repo(wire)
136 return repo.bare
136 return repo.bare
137
137
138 @reraise_safe_exceptions
138 @reraise_safe_exceptions
139 def blob_as_pretty_string(self, wire, sha):
139 def blob_as_pretty_string(self, wire, sha):
140 repo = self._factory.repo(wire)
140 repo = self._factory.repo(wire)
141 return repo[sha].as_pretty_string()
141 return repo[sha].as_pretty_string()
142
142
143 @reraise_safe_exceptions
143 @reraise_safe_exceptions
144 def blob_raw_length(self, wire, sha):
144 def blob_raw_length(self, wire, sha):
145 repo = self._factory.repo(wire)
145 repo = self._factory.repo(wire)
146 blob = repo[sha]
146 blob = repo[sha]
147 return blob.raw_length()
147 return blob.raw_length()
148
148
149 def _parse_lfs_pointer(self, raw_content):
149 def _parse_lfs_pointer(self, raw_content):
150
150
151 spec_string = 'version https://git-lfs.github.com/spec'
151 spec_string = 'version https://git-lfs.github.com/spec'
152 if raw_content and raw_content.startswith(spec_string):
152 if raw_content and raw_content.startswith(spec_string):
153 pattern = re.compile(r"""
153 pattern = re.compile(r"""
154 (?:\n)?
154 (?:\n)?
155 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
155 ^version[ ]https://git-lfs\.github\.com/spec/(?P<spec_ver>v\d+)\n
156 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
156 ^oid[ ] sha256:(?P<oid_hash>[0-9a-f]{64})\n
157 ^size[ ](?P<oid_size>[0-9]+)\n
157 ^size[ ](?P<oid_size>[0-9]+)\n
158 (?:\n)?
158 (?:\n)?
159 """, re.VERBOSE | re.MULTILINE)
159 """, re.VERBOSE | re.MULTILINE)
160 match = pattern.match(raw_content)
160 match = pattern.match(raw_content)
161 if match:
161 if match:
162 return match.groupdict()
162 return match.groupdict()
163
163
164 return {}
164 return {}
165
165
166 @reraise_safe_exceptions
166 @reraise_safe_exceptions
167 def is_large_file(self, wire, sha):
167 def is_large_file(self, wire, sha):
168 repo = self._factory.repo(wire)
168 repo = self._factory.repo(wire)
169 blob = repo[sha]
169 blob = repo[sha]
170 return self._parse_lfs_pointer(blob.as_raw_string())
170 return self._parse_lfs_pointer(blob.as_raw_string())
171
171
172 @reraise_safe_exceptions
172 @reraise_safe_exceptions
173 def in_largefiles_store(self, wire, oid):
173 def in_largefiles_store(self, wire, oid):
174 repo = self._factory.repo(wire)
174 repo = self._factory.repo(wire)
175 conf = self._wire_to_config(wire)
175 conf = self._wire_to_config(wire)
176
176
177 store_location = conf.get('vcs_git_lfs_store_location')
177 store_location = conf.get('vcs_git_lfs_store_location')
178 if store_location:
178 if store_location:
179 repo_name = repo.path
179 repo_name = repo.path
180 store = LFSOidStore(
180 store = LFSOidStore(
181 oid=oid, repo=repo_name, store_location=store_location)
181 oid=oid, repo=repo_name, store_location=store_location)
182 return store.has_oid()
182 return store.has_oid()
183
183
184 return False
184 return False
185
185
186 @reraise_safe_exceptions
186 @reraise_safe_exceptions
187 def store_path(self, wire, oid):
187 def store_path(self, wire, oid):
188 repo = self._factory.repo(wire)
188 repo = self._factory.repo(wire)
189 conf = self._wire_to_config(wire)
189 conf = self._wire_to_config(wire)
190
190
191 store_location = conf.get('vcs_git_lfs_store_location')
191 store_location = conf.get('vcs_git_lfs_store_location')
192 if store_location:
192 if store_location:
193 repo_name = repo.path
193 repo_name = repo.path
194 store = LFSOidStore(
194 store = LFSOidStore(
195 oid=oid, repo=repo_name, store_location=store_location)
195 oid=oid, repo=repo_name, store_location=store_location)
196 return store.oid_path
196 return store.oid_path
197 raise ValueError('Unable to fetch oid with path {}'.format(oid))
197 raise ValueError('Unable to fetch oid with path {}'.format(oid))
198
198
199 @reraise_safe_exceptions
199 @reraise_safe_exceptions
200 def bulk_request(self, wire, rev, pre_load):
200 def bulk_request(self, wire, rev, pre_load):
201 result = {}
201 result = {}
202 for attr in pre_load:
202 for attr in pre_load:
203 try:
203 try:
204 method = self._bulk_methods[attr]
204 method = self._bulk_methods[attr]
205 args = [wire, rev]
205 args = [wire, rev]
206 if attr == "date":
206 if attr == "date":
207 args.extend(["commit_time", "commit_timezone"])
207 args.extend(["commit_time", "commit_timezone"])
208 elif attr in ["author", "message", "parents"]:
208 elif attr in ["author", "message", "parents"]:
209 args.append(attr)
209 args.append(attr)
210 result[attr] = method(*args)
210 result[attr] = method(*args)
211 except KeyError:
211 except KeyError:
212 raise exceptions.VcsException(
212 raise exceptions.VcsException(
213 "Unknown bulk attribute: %s" % attr)
213 "Unknown bulk attribute: %s" % attr)
214 return result
214 return result
215
215
216 def _build_opener(self, url):
216 def _build_opener(self, url):
217 handlers = []
217 handlers = []
218 url_obj = url_parser(url)
218 url_obj = url_parser(url)
219 _, authinfo = url_obj.authinfo()
219 _, authinfo = url_obj.authinfo()
220
220
221 if authinfo:
221 if authinfo:
222 # create a password manager
222 # create a password manager
223 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
223 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
224 passmgr.add_password(*authinfo)
224 passmgr.add_password(*authinfo)
225
225
226 handlers.extend((httpbasicauthhandler(passmgr),
226 handlers.extend((httpbasicauthhandler(passmgr),
227 httpdigestauthhandler(passmgr)))
227 httpdigestauthhandler(passmgr)))
228
228
229 return urllib2.build_opener(*handlers)
229 return urllib2.build_opener(*handlers)
230
230
231 @reraise_safe_exceptions
231 @reraise_safe_exceptions
232 def check_url(self, url, config):
232 def check_url(self, url, config):
233 url_obj = url_parser(url)
233 url_obj = url_parser(url)
234 test_uri, _ = url_obj.authinfo()
234 test_uri, _ = url_obj.authinfo()
235 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
235 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
236 url_obj.query = obfuscate_qs(url_obj.query)
236 url_obj.query = obfuscate_qs(url_obj.query)
237 cleaned_uri = str(url_obj)
237 cleaned_uri = str(url_obj)
238 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
238 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
239
239
240 if not test_uri.endswith('info/refs'):
240 if not test_uri.endswith('info/refs'):
241 test_uri = test_uri.rstrip('/') + '/info/refs'
241 test_uri = test_uri.rstrip('/') + '/info/refs'
242
242
243 o = self._build_opener(url)
243 o = self._build_opener(url)
244 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
244 o.addheaders = [('User-Agent', 'git/1.7.8.0')] # fake some git
245
245
246 q = {"service": 'git-upload-pack'}
246 q = {"service": 'git-upload-pack'}
247 qs = '?%s' % urllib.urlencode(q)
247 qs = '?%s' % urllib.urlencode(q)
248 cu = "%s%s" % (test_uri, qs)
248 cu = "%s%s" % (test_uri, qs)
249 req = urllib2.Request(cu, None, {})
249 req = urllib2.Request(cu, None, {})
250
250
251 try:
251 try:
252 log.debug("Trying to open URL %s", cleaned_uri)
252 log.debug("Trying to open URL %s", cleaned_uri)
253 resp = o.open(req)
253 resp = o.open(req)
254 if resp.code != 200:
254 if resp.code != 200:
255 raise exceptions.URLError('Return Code is not 200')
255 raise exceptions.URLError('Return Code is not 200')
256 except Exception as e:
256 except Exception as e:
257 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
257 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
258 # means it cannot be cloned
258 # means it cannot be cloned
259 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
259 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
260
260
261 # now detect if it's proper git repo
261 # now detect if it's proper git repo
262 gitdata = resp.read()
262 gitdata = resp.read()
263 if 'service=git-upload-pack' in gitdata:
263 if 'service=git-upload-pack' in gitdata:
264 pass
264 pass
265 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
265 elif re.findall(r'[0-9a-fA-F]{40}\s+refs', gitdata):
266 # old style git can return some other format !
266 # old style git can return some other format !
267 pass
267 pass
268 else:
268 else:
269 raise exceptions.URLError(
269 raise exceptions.URLError(
270 "url [%s] does not look like an git" % (cleaned_uri,))
270 "url [%s] does not look like an git" % (cleaned_uri,))
271
271
272 return True
272 return True
273
273
274 @reraise_safe_exceptions
274 @reraise_safe_exceptions
275 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
275 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
276 remote_refs = self.fetch(wire, url, apply_refs=False)
276 remote_refs = self.fetch(wire, url, apply_refs=False)
277 repo = self._factory.repo(wire)
277 repo = self._factory.repo(wire)
278 if isinstance(valid_refs, list):
278 if isinstance(valid_refs, list):
279 valid_refs = tuple(valid_refs)
279 valid_refs = tuple(valid_refs)
280
280
281 for k in remote_refs:
281 for k in remote_refs:
282 # only parse heads/tags and skip so called deferred tags
282 # only parse heads/tags and skip so called deferred tags
283 if k.startswith(valid_refs) and not k.endswith(deferred):
283 if k.startswith(valid_refs) and not k.endswith(deferred):
284 repo[k] = remote_refs[k]
284 repo[k] = remote_refs[k]
285
285
286 if update_after_clone:
286 if update_after_clone:
287 # we want to checkout HEAD
287 # we want to checkout HEAD
288 repo["HEAD"] = remote_refs["HEAD"]
288 repo["HEAD"] = remote_refs["HEAD"]
289 index.build_index_from_tree(repo.path, repo.index_path(),
289 index.build_index_from_tree(repo.path, repo.index_path(),
290 repo.object_store, repo["HEAD"].tree)
290 repo.object_store, repo["HEAD"].tree)
291
291
292 # TODO: this is quite complex, check if that can be simplified
292 # TODO: this is quite complex, check if that can be simplified
293 @reraise_safe_exceptions
293 @reraise_safe_exceptions
294 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
294 def commit(self, wire, commit_data, branch, commit_tree, updated, removed):
295 repo = self._factory.repo(wire)
295 repo = self._factory.repo(wire)
296 object_store = repo.object_store
296 object_store = repo.object_store
297
297
298 # Create tree and populates it with blobs
298 # Create tree and populates it with blobs
299 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
299 commit_tree = commit_tree and repo[commit_tree] or objects.Tree()
300
300
301 for node in updated:
301 for node in updated:
302 # Compute subdirs if needed
302 # Compute subdirs if needed
303 dirpath, nodename = vcspath.split(node['path'])
303 dirpath, nodename = vcspath.split(node['path'])
304 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
304 dirnames = map(safe_str, dirpath and dirpath.split('/') or [])
305 parent = commit_tree
305 parent = commit_tree
306 ancestors = [('', parent)]
306 ancestors = [('', parent)]
307
307
308 # Tries to dig for the deepest existing tree
308 # Tries to dig for the deepest existing tree
309 while dirnames:
309 while dirnames:
310 curdir = dirnames.pop(0)
310 curdir = dirnames.pop(0)
311 try:
311 try:
312 dir_id = parent[curdir][1]
312 dir_id = parent[curdir][1]
313 except KeyError:
313 except KeyError:
314 # put curdir back into dirnames and stops
314 # put curdir back into dirnames and stops
315 dirnames.insert(0, curdir)
315 dirnames.insert(0, curdir)
316 break
316 break
317 else:
317 else:
318 # If found, updates parent
318 # If found, updates parent
319 parent = repo[dir_id]
319 parent = repo[dir_id]
320 ancestors.append((curdir, parent))
320 ancestors.append((curdir, parent))
321 # Now parent is deepest existing tree and we need to create
321 # Now parent is deepest existing tree and we need to create
322 # subtrees for dirnames (in reverse order)
322 # subtrees for dirnames (in reverse order)
323 # [this only applies for nodes from added]
323 # [this only applies for nodes from added]
324 new_trees = []
324 new_trees = []
325
325
326 blob = objects.Blob.from_string(node['content'])
326 blob = objects.Blob.from_string(node['content'])
327
327
328 if dirnames:
328 if dirnames:
329 # If there are trees which should be created we need to build
329 # If there are trees which should be created we need to build
330 # them now (in reverse order)
330 # them now (in reverse order)
331 reversed_dirnames = list(reversed(dirnames))
331 reversed_dirnames = list(reversed(dirnames))
332 curtree = objects.Tree()
332 curtree = objects.Tree()
333 curtree[node['node_path']] = node['mode'], blob.id
333 curtree[node['node_path']] = node['mode'], blob.id
334 new_trees.append(curtree)
334 new_trees.append(curtree)
335 for dirname in reversed_dirnames[:-1]:
335 for dirname in reversed_dirnames[:-1]:
336 newtree = objects.Tree()
336 newtree = objects.Tree()
337 newtree[dirname] = (DIR_STAT, curtree.id)
337 newtree[dirname] = (DIR_STAT, curtree.id)
338 new_trees.append(newtree)
338 new_trees.append(newtree)
339 curtree = newtree
339 curtree = newtree
340 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
340 parent[reversed_dirnames[-1]] = (DIR_STAT, curtree.id)
341 else:
341 else:
342 parent.add(
342 parent.add(
343 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
343 name=node['node_path'], mode=node['mode'], hexsha=blob.id)
344
344
345 new_trees.append(parent)
345 new_trees.append(parent)
346 # Update ancestors
346 # Update ancestors
347 reversed_ancestors = reversed(
347 reversed_ancestors = reversed(
348 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
348 [(a[1], b[1], b[0]) for a, b in zip(ancestors, ancestors[1:])])
349 for parent, tree, path in reversed_ancestors:
349 for parent, tree, path in reversed_ancestors:
350 parent[path] = (DIR_STAT, tree.id)
350 parent[path] = (DIR_STAT, tree.id)
351 object_store.add_object(tree)
351 object_store.add_object(tree)
352
352
353 object_store.add_object(blob)
353 object_store.add_object(blob)
354 for tree in new_trees:
354 for tree in new_trees:
355 object_store.add_object(tree)
355 object_store.add_object(tree)
356
356
357 for node_path in removed:
357 for node_path in removed:
358 paths = node_path.split('/')
358 paths = node_path.split('/')
359 tree = commit_tree
359 tree = commit_tree
360 trees = [tree]
360 trees = [tree]
361 # Traverse deep into the forest...
361 # Traverse deep into the forest...
362 for path in paths:
362 for path in paths:
363 try:
363 try:
364 obj = repo[tree[path][1]]
364 obj = repo[tree[path][1]]
365 if isinstance(obj, objects.Tree):
365 if isinstance(obj, objects.Tree):
366 trees.append(obj)
366 trees.append(obj)
367 tree = obj
367 tree = obj
368 except KeyError:
368 except KeyError:
369 break
369 break
370 # Cut down the blob and all rotten trees on the way back...
370 # Cut down the blob and all rotten trees on the way back...
371 for path, tree in reversed(zip(paths, trees)):
371 for path, tree in reversed(zip(paths, trees)):
372 del tree[path]
372 del tree[path]
373 if tree:
373 if tree:
374 # This tree still has elements - don't remove it or any
374 # This tree still has elements - don't remove it or any
375 # of it's parents
375 # of it's parents
376 break
376 break
377
377
378 object_store.add_object(commit_tree)
378 object_store.add_object(commit_tree)
379
379
380 # Create commit
380 # Create commit
381 commit = objects.Commit()
381 commit = objects.Commit()
382 commit.tree = commit_tree.id
382 commit.tree = commit_tree.id
383 for k, v in commit_data.iteritems():
383 for k, v in commit_data.iteritems():
384 setattr(commit, k, v)
384 setattr(commit, k, v)
385 object_store.add_object(commit)
385 object_store.add_object(commit)
386
386
387 ref = 'refs/heads/%s' % branch
387 ref = 'refs/heads/%s' % branch
388 repo.refs[ref] = commit.id
388 repo.refs[ref] = commit.id
389
389
390 return commit.id
390 return commit.id
391
391
392 @reraise_safe_exceptions
392 @reraise_safe_exceptions
393 def fetch(self, wire, url, apply_refs=True, refs=None):
393 def fetch(self, wire, url, apply_refs=True, refs=None):
394 if url != 'default' and '://' not in url:
394 if url != 'default' and '://' not in url:
395 client = LocalGitClient(url)
395 client = LocalGitClient(url)
396 else:
396 else:
397 url_obj = url_parser(url)
397 url_obj = url_parser(url)
398 o = self._build_opener(url)
398 o = self._build_opener(url)
399 url, _ = url_obj.authinfo()
399 url, _ = url_obj.authinfo()
400 client = HttpGitClient(base_url=url, opener=o)
400 client = HttpGitClient(base_url=url, opener=o)
401 repo = self._factory.repo(wire)
401 repo = self._factory.repo(wire)
402
402
403 determine_wants = repo.object_store.determine_wants_all
403 determine_wants = repo.object_store.determine_wants_all
404 if refs:
404 if refs:
405 def determine_wants_requested(references):
405 def determine_wants_requested(references):
406 return [references[r] for r in references if r in refs]
406 return [references[r] for r in references if r in refs]
407 determine_wants = determine_wants_requested
407 determine_wants = determine_wants_requested
408
408
409 try:
409 try:
410 remote_refs = client.fetch(
410 remote_refs = client.fetch(
411 path=url, target=repo, determine_wants=determine_wants)
411 path=url, target=repo, determine_wants=determine_wants)
412 except NotGitRepository as e:
412 except NotGitRepository as e:
413 log.warning(
413 log.warning(
414 'Trying to fetch from "%s" failed, not a Git repository.', url)
414 'Trying to fetch from "%s" failed, not a Git repository.', url)
415 # Exception can contain unicode which we convert
415 # Exception can contain unicode which we convert
416 raise exceptions.AbortException(repr(e))
416 raise exceptions.AbortException(repr(e))
417
417
418 # mikhail: client.fetch() returns all the remote refs, but fetches only
418 # mikhail: client.fetch() returns all the remote refs, but fetches only
419 # refs filtered by `determine_wants` function. We need to filter result
419 # refs filtered by `determine_wants` function. We need to filter result
420 # as well
420 # as well
421 if refs:
421 if refs:
422 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
422 remote_refs = {k: remote_refs[k] for k in remote_refs if k in refs}
423
423
424 if apply_refs:
424 if apply_refs:
425 # TODO: johbo: Needs proper test coverage with a git repository
425 # TODO: johbo: Needs proper test coverage with a git repository
426 # that contains a tag object, so that we would end up with
426 # that contains a tag object, so that we would end up with
427 # a peeled ref at this point.
427 # a peeled ref at this point.
428 PEELED_REF_MARKER = '^{}'
428 PEELED_REF_MARKER = '^{}'
429 for k in remote_refs:
429 for k in remote_refs:
430 if k.endswith(PEELED_REF_MARKER):
430 if k.endswith(PEELED_REF_MARKER):
431 log.info("Skipping peeled reference %s", k)
431 log.info("Skipping peeled reference %s", k)
432 continue
432 continue
433 repo[k] = remote_refs[k]
433 repo[k] = remote_refs[k]
434
434
435 if refs:
435 if refs:
436 # mikhail: explicitly set the head to the last ref.
436 # mikhail: explicitly set the head to the last ref.
437 repo['HEAD'] = remote_refs[refs[-1]]
437 repo['HEAD'] = remote_refs[refs[-1]]
438
438
439 # TODO: mikhail: should we return remote_refs here to be
439 # TODO: mikhail: should we return remote_refs here to be
440 # consistent?
440 # consistent?
441 else:
441 else:
442 return remote_refs
442 return remote_refs
443
443
444 @reraise_safe_exceptions
444 @reraise_safe_exceptions
445 def get_remote_refs(self, wire, url):
445 def get_remote_refs(self, wire, url):
446 repo = Repo(url)
446 repo = Repo(url)
447 return repo.get_refs()
447 return repo.get_refs()
448
448
449 @reraise_safe_exceptions
449 @reraise_safe_exceptions
450 def get_description(self, wire):
450 def get_description(self, wire):
451 repo = self._factory.repo(wire)
451 repo = self._factory.repo(wire)
452 return repo.get_description()
452 return repo.get_description()
453
453
454 @reraise_safe_exceptions
454 @reraise_safe_exceptions
455 def get_file_history(self, wire, file_path, commit_id, limit):
455 def get_file_history(self, wire, file_path, commit_id, limit):
456 repo = self._factory.repo(wire)
456 repo = self._factory.repo(wire)
457 include = [commit_id]
457 include = [commit_id]
458 paths = [file_path]
458 paths = [file_path]
459
459
460 walker = repo.get_walker(include, paths=paths, max_entries=limit)
460 walker = repo.get_walker(include, paths=paths, max_entries=limit)
461 return [x.commit.id for x in walker]
461 return [x.commit.id for x in walker]
462
462
463 @reraise_safe_exceptions
463 @reraise_safe_exceptions
464 def get_missing_revs(self, wire, rev1, rev2, path2):
464 def get_missing_revs(self, wire, rev1, rev2, path2):
465 repo = self._factory.repo(wire)
465 repo = self._factory.repo(wire)
466 LocalGitClient(thin_packs=False).fetch(path2, repo)
466 LocalGitClient(thin_packs=False).fetch(path2, repo)
467
467
468 wire_remote = wire.copy()
468 wire_remote = wire.copy()
469 wire_remote['path'] = path2
469 wire_remote['path'] = path2
470 repo_remote = self._factory.repo(wire_remote)
470 repo_remote = self._factory.repo(wire_remote)
471 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
471 LocalGitClient(thin_packs=False).fetch(wire["path"], repo_remote)
472
472
473 revs = [
473 revs = [
474 x.commit.id
474 x.commit.id
475 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
475 for x in repo_remote.get_walker(include=[rev2], exclude=[rev1])]
476 return revs
476 return revs
477
477
478 @reraise_safe_exceptions
478 @reraise_safe_exceptions
479 def get_object(self, wire, sha):
479 def get_object(self, wire, sha):
480 repo = self._factory.repo(wire)
480 repo = self._factory.repo(wire)
481 obj = repo.get_object(sha)
481 obj = repo.get_object(sha)
482 commit_id = obj.id
482 commit_id = obj.id
483
483
484 if isinstance(obj, Tag):
484 if isinstance(obj, Tag):
485 commit_id = obj.object[1]
485 commit_id = obj.object[1]
486
486
487 return {
487 return {
488 'id': obj.id,
488 'id': obj.id,
489 'type': obj.type_name,
489 'type': obj.type_name,
490 'commit_id': commit_id
490 'commit_id': commit_id
491 }
491 }
492
492
493 @reraise_safe_exceptions
493 @reraise_safe_exceptions
494 def get_object_attrs(self, wire, sha, *attrs):
494 def get_object_attrs(self, wire, sha, *attrs):
495 repo = self._factory.repo(wire)
495 repo = self._factory.repo(wire)
496 obj = repo.get_object(sha)
496 obj = repo.get_object(sha)
497 return list(getattr(obj, a) for a in attrs)
497 return list(getattr(obj, a) for a in attrs)
498
498
499 @reraise_safe_exceptions
499 @reraise_safe_exceptions
500 def get_refs(self, wire):
500 def get_refs(self, wire):
501 repo = self._factory.repo(wire)
501 repo = self._factory.repo(wire)
502 result = {}
502 result = {}
503 for ref, sha in repo.refs.as_dict().items():
503 for ref, sha in repo.refs.as_dict().items():
504 peeled_sha = repo.get_peeled(ref)
504 peeled_sha = repo.get_peeled(ref)
505 result[ref] = peeled_sha
505 result[ref] = peeled_sha
506 return result
506 return result
507
507
508 @reraise_safe_exceptions
508 @reraise_safe_exceptions
509 def get_refs_path(self, wire):
509 def get_refs_path(self, wire):
510 repo = self._factory.repo(wire)
510 repo = self._factory.repo(wire)
511 return repo.refs.path
511 return repo.refs.path
512
512
513 @reraise_safe_exceptions
513 @reraise_safe_exceptions
514 def head(self, wire):
514 def head(self, wire):
515 repo = self._factory.repo(wire)
515 repo = self._factory.repo(wire)
516 return repo.head()
516 return repo.head()
517
517
518 @reraise_safe_exceptions
518 @reraise_safe_exceptions
519 def init(self, wire):
519 def init(self, wire):
520 repo_path = str_to_dulwich(wire['path'])
520 repo_path = str_to_dulwich(wire['path'])
521 self.repo = Repo.init(repo_path)
521 self.repo = Repo.init(repo_path)
522
522
523 @reraise_safe_exceptions
523 @reraise_safe_exceptions
524 def init_bare(self, wire):
524 def init_bare(self, wire):
525 repo_path = str_to_dulwich(wire['path'])
525 repo_path = str_to_dulwich(wire['path'])
526 self.repo = Repo.init_bare(repo_path)
526 self.repo = Repo.init_bare(repo_path)
527
527
528 @reraise_safe_exceptions
528 @reraise_safe_exceptions
529 def revision(self, wire, rev):
529 def revision(self, wire, rev):
530 repo = self._factory.repo(wire)
530 repo = self._factory.repo(wire)
531 obj = repo[rev]
531 obj = repo[rev]
532 obj_data = {
532 obj_data = {
533 'id': obj.id,
533 'id': obj.id,
534 }
534 }
535 try:
535 try:
536 obj_data['tree'] = obj.tree
536 obj_data['tree'] = obj.tree
537 except AttributeError:
537 except AttributeError:
538 pass
538 pass
539 return obj_data
539 return obj_data
540
540
541 @reraise_safe_exceptions
541 @reraise_safe_exceptions
542 def commit_attribute(self, wire, rev, attr):
542 def commit_attribute(self, wire, rev, attr):
543 repo = self._factory.repo(wire)
543 repo = self._factory.repo(wire)
544 obj = repo[rev]
544 obj = repo[rev]
545 return getattr(obj, attr)
545 return getattr(obj, attr)
546
546
547 @reraise_safe_exceptions
547 @reraise_safe_exceptions
548 def set_refs(self, wire, key, value):
548 def set_refs(self, wire, key, value):
549 repo = self._factory.repo(wire)
549 repo = self._factory.repo(wire)
550 repo.refs[key] = value
550 repo.refs[key] = value
551
551
552 @reraise_safe_exceptions
552 @reraise_safe_exceptions
553 def remove_ref(self, wire, key):
553 def remove_ref(self, wire, key):
554 repo = self._factory.repo(wire)
554 repo = self._factory.repo(wire)
555 del repo.refs[key]
555 del repo.refs[key]
556
556
557 @reraise_safe_exceptions
557 @reraise_safe_exceptions
558 def tree_changes(self, wire, source_id, target_id):
558 def tree_changes(self, wire, source_id, target_id):
559 repo = self._factory.repo(wire)
559 repo = self._factory.repo(wire)
560 source = repo[source_id].tree if source_id else None
560 source = repo[source_id].tree if source_id else None
561 target = repo[target_id].tree
561 target = repo[target_id].tree
562 result = repo.object_store.tree_changes(source, target)
562 result = repo.object_store.tree_changes(source, target)
563 return list(result)
563 return list(result)
564
564
565 @reraise_safe_exceptions
565 @reraise_safe_exceptions
566 def tree_items(self, wire, tree_id):
566 def tree_items(self, wire, tree_id):
567 repo = self._factory.repo(wire)
567 repo = self._factory.repo(wire)
568 tree = repo[tree_id]
568 tree = repo[tree_id]
569
569
570 result = []
570 result = []
571 for item in tree.iteritems():
571 for item in tree.iteritems():
572 item_sha = item.sha
572 item_sha = item.sha
573 item_mode = item.mode
573 item_mode = item.mode
574
574
575 if FILE_MODE(item_mode) == GIT_LINK:
575 if FILE_MODE(item_mode) == GIT_LINK:
576 item_type = "link"
576 item_type = "link"
577 else:
577 else:
578 item_type = repo[item_sha].type_name
578 item_type = repo[item_sha].type_name
579
579
580 result.append((item.path, item_mode, item_sha, item_type))
580 result.append((item.path, item_mode, item_sha, item_type))
581 return result
581 return result
582
582
583 @reraise_safe_exceptions
583 @reraise_safe_exceptions
584 def update_server_info(self, wire):
584 def update_server_info(self, wire):
585 repo = self._factory.repo(wire)
585 repo = self._factory.repo(wire)
586 update_server_info(repo)
586 update_server_info(repo)
587
587
588 @reraise_safe_exceptions
588 @reraise_safe_exceptions
589 def discover_git_version(self):
589 def discover_git_version(self):
590 stdout, _ = self.run_git_command(
590 stdout, _ = self.run_git_command(
591 {}, ['--version'], _bare=True, _safe=True)
591 {}, ['--version'], _bare=True, _safe=True)
592 prefix = 'git version'
592 prefix = 'git version'
593 if stdout.startswith(prefix):
593 if stdout.startswith(prefix):
594 stdout = stdout[len(prefix):]
594 stdout = stdout[len(prefix):]
595 return stdout.strip()
595 return stdout.strip()
596
596
597 @reraise_safe_exceptions
597 @reraise_safe_exceptions
598 def run_git_command(self, wire, cmd, **opts):
598 def run_git_command(self, wire, cmd, **opts):
599 path = wire.get('path', None)
599 path = wire.get('path', None)
600
600
601 if path and os.path.isdir(path):
601 if path and os.path.isdir(path):
602 opts['cwd'] = path
602 opts['cwd'] = path
603
603
604 if '_bare' in opts:
604 if '_bare' in opts:
605 _copts = []
605 _copts = []
606 del opts['_bare']
606 del opts['_bare']
607 else:
607 else:
608 _copts = ['-c', 'core.quotepath=false', ]
608 _copts = ['-c', 'core.quotepath=false', ]
609 safe_call = False
609 safe_call = False
610 if '_safe' in opts:
610 if '_safe' in opts:
611 # no exc on failure
611 # no exc on failure
612 del opts['_safe']
612 del opts['_safe']
613 safe_call = True
613 safe_call = True
614
614
615 gitenv = os.environ.copy()
615 gitenv = os.environ.copy()
616 gitenv.update(opts.pop('extra_env', {}))
616 gitenv.update(opts.pop('extra_env', {}))
617 # need to clean fix GIT_DIR !
617 # need to clean fix GIT_DIR !
618 if 'GIT_DIR' in gitenv:
618 if 'GIT_DIR' in gitenv:
619 del gitenv['GIT_DIR']
619 del gitenv['GIT_DIR']
620 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
620 gitenv['GIT_CONFIG_NOGLOBAL'] = '1'
621
621
622 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
622 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
623
623
624 try:
624 try:
625 _opts = {'env': gitenv, 'shell': False}
625 _opts = {'env': gitenv, 'shell': False}
626 _opts.update(opts)
626 _opts.update(opts)
627 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
627 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
628
628
629 return ''.join(p), ''.join(p.error)
629 return ''.join(p), ''.join(p.error)
630 except (EnvironmentError, OSError) as err:
630 except (EnvironmentError, OSError) as err:
631 cmd = ' '.join(cmd) # human friendly CMD
631 tb_err = ("Couldn't run git command (%s).\n"
632 tb_err = ("Couldn't run git command (%s).\n"
632 "Original error was:%s\n" % (cmd, err))
633 "Original error was:%s\n" % (cmd, err))
633 log.exception(tb_err)
634 log.exception(tb_err)
634 if safe_call:
635 if safe_call:
635 return '', err
636 return '', err
636 else:
637 else:
637 raise exceptions.VcsException(tb_err)
638 raise exceptions.VcsException(tb_err)
638
639
639
640
640 def str_to_dulwich(value):
641 def str_to_dulwich(value):
641 """
642 """
642 Dulwich 0.10.1a requires `unicode` objects to be passed in.
643 Dulwich 0.10.1a requires `unicode` objects to be passed in.
643 """
644 """
644 return value.decode(settings.WIRE_ENCODING)
645 return value.decode(settings.WIRE_ENCODING)
@@ -1,746 +1,749 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import io
18 import io
19 import logging
19 import logging
20 import stat
20 import stat
21 import urllib
21 import urllib
22 import urllib2
22 import urllib2
23
23
24 from hgext import largefiles, rebase
24 from hgext import largefiles, rebase
25 from hgext.strip import strip as hgext_strip
25 from hgext.strip import strip as hgext_strip
26 from mercurial import commands
26 from mercurial import commands
27 from mercurial import unionrepo
27 from mercurial import unionrepo
28 from mercurial import verify
28 from mercurial import verify
29
29
30 from vcsserver import exceptions
30 from vcsserver import exceptions
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
31 from vcsserver.base import RepoFactory, obfuscate_qs, raise_from_original
32 from vcsserver.hgcompat import (
32 from vcsserver.hgcompat import (
33 archival, bin, clone, config as hgconfig, diffopts, hex,
33 archival, bin, clone, config as hgconfig, diffopts, hex,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
34 hg_url as url_parser, httpbasicauthhandler, httpdigestauthhandler,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
35 httppeer, localrepository, match, memctx, exchange, memfilectx, nullrev,
36 patch, peer, revrange, ui, Abort, LookupError, RepoError, RepoLookupError,
36 patch, peer, revrange, ui, hg_tag, Abort, LookupError, RepoError,
37 InterventionRequired, RequirementError)
37 RepoLookupError, InterventionRequired, RequirementError)
38
38
39 log = logging.getLogger(__name__)
39 log = logging.getLogger(__name__)
40
40
41
41
42 def make_ui_from_config(repo_config):
42 def make_ui_from_config(repo_config):
43 baseui = ui.ui()
43 baseui = ui.ui()
44
44
45 # clean the baseui object
45 # clean the baseui object
46 baseui._ocfg = hgconfig.config()
46 baseui._ocfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
47 baseui._ucfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
48 baseui._tcfg = hgconfig.config()
49
49
50 for section, option, value in repo_config:
50 for section, option, value in repo_config:
51 baseui.setconfig(section, option, value)
51 baseui.setconfig(section, option, value)
52
52
53 # make our hgweb quiet so it doesn't print output
53 # make our hgweb quiet so it doesn't print output
54 baseui.setconfig('ui', 'quiet', 'true')
54 baseui.setconfig('ui', 'quiet', 'true')
55
55
56 # force mercurial to only use 1 thread, otherwise it may try to set a
56 # force mercurial to only use 1 thread, otherwise it may try to set a
57 # signal in a non-main thread, thus generating a ValueError.
57 # signal in a non-main thread, thus generating a ValueError.
58 baseui.setconfig('worker', 'numcpus', 1)
58 baseui.setconfig('worker', 'numcpus', 1)
59
59
60 # If there is no config for the largefiles extension, we explicitly disable
60 # If there is no config for the largefiles extension, we explicitly disable
61 # it here. This overrides settings from repositories hgrc file. Recent
61 # it here. This overrides settings from repositories hgrc file. Recent
62 # mercurial versions enable largefiles in hgrc on clone from largefile
62 # mercurial versions enable largefiles in hgrc on clone from largefile
63 # repo.
63 # repo.
64 if not baseui.hasconfig('extensions', 'largefiles'):
64 if not baseui.hasconfig('extensions', 'largefiles'):
65 log.debug('Explicitly disable largefiles extension for repo.')
65 log.debug('Explicitly disable largefiles extension for repo.')
66 baseui.setconfig('extensions', 'largefiles', '!')
66 baseui.setconfig('extensions', 'largefiles', '!')
67
67
68 return baseui
68 return baseui
69
69
70
70
71 def reraise_safe_exceptions(func):
71 def reraise_safe_exceptions(func):
72 """Decorator for converting mercurial exceptions to something neutral."""
72 """Decorator for converting mercurial exceptions to something neutral."""
73 def wrapper(*args, **kwargs):
73 def wrapper(*args, **kwargs):
74 try:
74 try:
75 return func(*args, **kwargs)
75 return func(*args, **kwargs)
76 except (Abort, InterventionRequired):
76 except (Abort, InterventionRequired):
77 raise_from_original(exceptions.AbortException)
77 raise_from_original(exceptions.AbortException)
78 except RepoLookupError:
78 except RepoLookupError:
79 raise_from_original(exceptions.LookupException)
79 raise_from_original(exceptions.LookupException)
80 except RequirementError:
80 except RequirementError:
81 raise_from_original(exceptions.RequirementException)
81 raise_from_original(exceptions.RequirementException)
82 except RepoError:
82 except RepoError:
83 raise_from_original(exceptions.VcsException)
83 raise_from_original(exceptions.VcsException)
84 except LookupError:
84 except LookupError:
85 raise_from_original(exceptions.LookupException)
85 raise_from_original(exceptions.LookupException)
86 except Exception as e:
86 except Exception as e:
87 if not hasattr(e, '_vcs_kind'):
87 if not hasattr(e, '_vcs_kind'):
88 log.exception("Unhandled exception in hg remote call")
88 log.exception("Unhandled exception in hg remote call")
89 raise_from_original(exceptions.UnhandledException)
89 raise_from_original(exceptions.UnhandledException)
90 raise
90 raise
91 return wrapper
91 return wrapper
92
92
93
93
94 class MercurialFactory(RepoFactory):
94 class MercurialFactory(RepoFactory):
95
95
96 def _create_config(self, config, hooks=True):
96 def _create_config(self, config, hooks=True):
97 if not hooks:
97 if not hooks:
98 hooks_to_clean = frozenset((
98 hooks_to_clean = frozenset((
99 'changegroup.repo_size', 'preoutgoing.pre_pull',
99 'changegroup.repo_size', 'preoutgoing.pre_pull',
100 'outgoing.pull_logger', 'prechangegroup.pre_push'))
100 'outgoing.pull_logger', 'prechangegroup.pre_push'))
101 new_config = []
101 new_config = []
102 for section, option, value in config:
102 for section, option, value in config:
103 if section == 'hooks' and option in hooks_to_clean:
103 if section == 'hooks' and option in hooks_to_clean:
104 continue
104 continue
105 new_config.append((section, option, value))
105 new_config.append((section, option, value))
106 config = new_config
106 config = new_config
107
107
108 baseui = make_ui_from_config(config)
108 baseui = make_ui_from_config(config)
109 return baseui
109 return baseui
110
110
111 def _create_repo(self, wire, create):
111 def _create_repo(self, wire, create):
112 baseui = self._create_config(wire["config"])
112 baseui = self._create_config(wire["config"])
113 return localrepository(baseui, wire["path"], create)
113 return localrepository(baseui, wire["path"], create)
114
114
115
115
116 class HgRemote(object):
116 class HgRemote(object):
117
117
118 def __init__(self, factory):
118 def __init__(self, factory):
119 self._factory = factory
119 self._factory = factory
120
120
121 self._bulk_methods = {
121 self._bulk_methods = {
122 "affected_files": self.ctx_files,
122 "affected_files": self.ctx_files,
123 "author": self.ctx_user,
123 "author": self.ctx_user,
124 "branch": self.ctx_branch,
124 "branch": self.ctx_branch,
125 "children": self.ctx_children,
125 "children": self.ctx_children,
126 "date": self.ctx_date,
126 "date": self.ctx_date,
127 "message": self.ctx_description,
127 "message": self.ctx_description,
128 "parents": self.ctx_parents,
128 "parents": self.ctx_parents,
129 "status": self.ctx_status,
129 "status": self.ctx_status,
130 "obsolete": self.ctx_obsolete,
131 "phase": self.ctx_phase,
132 "hidden": self.ctx_hidden,
130 "_file_paths": self.ctx_list,
133 "_file_paths": self.ctx_list,
131 }
134 }
132
135
133 @reraise_safe_exceptions
136 @reraise_safe_exceptions
134 def discover_hg_version(self):
137 def discover_hg_version(self):
135 from mercurial import util
138 from mercurial import util
136 return util.version()
139 return util.version()
137
140
138 @reraise_safe_exceptions
141 @reraise_safe_exceptions
139 def archive_repo(self, archive_path, mtime, file_info, kind):
142 def archive_repo(self, archive_path, mtime, file_info, kind):
140 if kind == "tgz":
143 if kind == "tgz":
141 archiver = archival.tarit(archive_path, mtime, "gz")
144 archiver = archival.tarit(archive_path, mtime, "gz")
142 elif kind == "tbz2":
145 elif kind == "tbz2":
143 archiver = archival.tarit(archive_path, mtime, "bz2")
146 archiver = archival.tarit(archive_path, mtime, "bz2")
144 elif kind == 'zip':
147 elif kind == 'zip':
145 archiver = archival.zipit(archive_path, mtime)
148 archiver = archival.zipit(archive_path, mtime)
146 else:
149 else:
147 raise exceptions.ArchiveException(
150 raise exceptions.ArchiveException(
148 'Remote does not support: "%s".' % kind)
151 'Remote does not support: "%s".' % kind)
149
152
150 for f_path, f_mode, f_is_link, f_content in file_info:
153 for f_path, f_mode, f_is_link, f_content in file_info:
151 archiver.addfile(f_path, f_mode, f_is_link, f_content)
154 archiver.addfile(f_path, f_mode, f_is_link, f_content)
152 archiver.done()
155 archiver.done()
153
156
154 @reraise_safe_exceptions
157 @reraise_safe_exceptions
155 def bookmarks(self, wire):
158 def bookmarks(self, wire):
156 repo = self._factory.repo(wire)
159 repo = self._factory.repo(wire)
157 return dict(repo._bookmarks)
160 return dict(repo._bookmarks)
158
161
159 @reraise_safe_exceptions
162 @reraise_safe_exceptions
160 def branches(self, wire, normal, closed):
163 def branches(self, wire, normal, closed):
161 repo = self._factory.repo(wire)
164 repo = self._factory.repo(wire)
162 iter_branches = repo.branchmap().iterbranches()
165 iter_branches = repo.branchmap().iterbranches()
163 bt = {}
166 bt = {}
164 for branch_name, _heads, tip, is_closed in iter_branches:
167 for branch_name, _heads, tip, is_closed in iter_branches:
165 if normal and not is_closed:
168 if normal and not is_closed:
166 bt[branch_name] = tip
169 bt[branch_name] = tip
167 if closed and is_closed:
170 if closed and is_closed:
168 bt[branch_name] = tip
171 bt[branch_name] = tip
169
172
170 return bt
173 return bt
171
174
172 @reraise_safe_exceptions
175 @reraise_safe_exceptions
173 def bulk_request(self, wire, rev, pre_load):
176 def bulk_request(self, wire, rev, pre_load):
174 result = {}
177 result = {}
175 for attr in pre_load:
178 for attr in pre_load:
176 try:
179 try:
177 method = self._bulk_methods[attr]
180 method = self._bulk_methods[attr]
178 result[attr] = method(wire, rev)
181 result[attr] = method(wire, rev)
179 except KeyError:
182 except KeyError:
180 raise exceptions.VcsException(
183 raise exceptions.VcsException(
181 'Unknown bulk attribute: "%s"' % attr)
184 'Unknown bulk attribute: "%s"' % attr)
182 return result
185 return result
183
186
184 @reraise_safe_exceptions
187 @reraise_safe_exceptions
185 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
188 def clone(self, wire, source, dest, update_after_clone=False, hooks=True):
186 baseui = self._factory._create_config(wire["config"], hooks=hooks)
189 baseui = self._factory._create_config(wire["config"], hooks=hooks)
187 clone(baseui, source, dest, noupdate=not update_after_clone)
190 clone(baseui, source, dest, noupdate=not update_after_clone)
188
191
189 @reraise_safe_exceptions
192 @reraise_safe_exceptions
190 def commitctx(
193 def commitctx(
191 self, wire, message, parents, commit_time, commit_timezone,
194 self, wire, message, parents, commit_time, commit_timezone,
192 user, files, extra, removed, updated):
195 user, files, extra, removed, updated):
193
196
194 def _filectxfn(_repo, memctx, path):
197 def _filectxfn(_repo, memctx, path):
195 """
198 """
196 Marks given path as added/changed/removed in a given _repo. This is
199 Marks given path as added/changed/removed in a given _repo. This is
197 for internal mercurial commit function.
200 for internal mercurial commit function.
198 """
201 """
199
202
200 # check if this path is removed
203 # check if this path is removed
201 if path in removed:
204 if path in removed:
202 # returning None is a way to mark node for removal
205 # returning None is a way to mark node for removal
203 return None
206 return None
204
207
205 # check if this path is added
208 # check if this path is added
206 for node in updated:
209 for node in updated:
207 if node['path'] == path:
210 if node['path'] == path:
208 return memfilectx(
211 return memfilectx(
209 _repo,
212 _repo,
210 path=node['path'],
213 path=node['path'],
211 data=node['content'],
214 data=node['content'],
212 islink=False,
215 islink=False,
213 isexec=bool(node['mode'] & stat.S_IXUSR),
216 isexec=bool(node['mode'] & stat.S_IXUSR),
214 copied=False,
217 copied=False,
215 memctx=memctx)
218 memctx=memctx)
216
219
217 raise exceptions.AbortException(
220 raise exceptions.AbortException(
218 "Given path haven't been marked as added, "
221 "Given path haven't been marked as added, "
219 "changed or removed (%s)" % path)
222 "changed or removed (%s)" % path)
220
223
221 repo = self._factory.repo(wire)
224 repo = self._factory.repo(wire)
222
225
223 commit_ctx = memctx(
226 commit_ctx = memctx(
224 repo=repo,
227 repo=repo,
225 parents=parents,
228 parents=parents,
226 text=message,
229 text=message,
227 files=files,
230 files=files,
228 filectxfn=_filectxfn,
231 filectxfn=_filectxfn,
229 user=user,
232 user=user,
230 date=(commit_time, commit_timezone),
233 date=(commit_time, commit_timezone),
231 extra=extra)
234 extra=extra)
232
235
233 n = repo.commitctx(commit_ctx)
236 n = repo.commitctx(commit_ctx)
234 new_id = hex(n)
237 new_id = hex(n)
235
238
236 return new_id
239 return new_id
237
240
238 @reraise_safe_exceptions
241 @reraise_safe_exceptions
239 def ctx_branch(self, wire, revision):
242 def ctx_branch(self, wire, revision):
240 repo = self._factory.repo(wire)
243 repo = self._factory.repo(wire)
241 ctx = repo[revision]
244 ctx = repo[revision]
242 return ctx.branch()
245 return ctx.branch()
243
246
244 @reraise_safe_exceptions
247 @reraise_safe_exceptions
245 def ctx_children(self, wire, revision):
248 def ctx_children(self, wire, revision):
246 repo = self._factory.repo(wire)
249 repo = self._factory.repo(wire)
247 ctx = repo[revision]
250 ctx = repo[revision]
248 return [child.rev() for child in ctx.children()]
251 return [child.rev() for child in ctx.children()]
249
252
250 @reraise_safe_exceptions
253 @reraise_safe_exceptions
251 def ctx_date(self, wire, revision):
254 def ctx_date(self, wire, revision):
252 repo = self._factory.repo(wire)
255 repo = self._factory.repo(wire)
253 ctx = repo[revision]
256 ctx = repo[revision]
254 return ctx.date()
257 return ctx.date()
255
258
256 @reraise_safe_exceptions
259 @reraise_safe_exceptions
257 def ctx_description(self, wire, revision):
260 def ctx_description(self, wire, revision):
258 repo = self._factory.repo(wire)
261 repo = self._factory.repo(wire)
259 ctx = repo[revision]
262 ctx = repo[revision]
260 return ctx.description()
263 return ctx.description()
261
264
262 @reraise_safe_exceptions
265 @reraise_safe_exceptions
263 def ctx_diff(
266 def ctx_diff(
264 self, wire, revision, git=True, ignore_whitespace=True, context=3):
267 self, wire, revision, git=True, ignore_whitespace=True, context=3):
265 repo = self._factory.repo(wire)
268 repo = self._factory.repo(wire)
266 ctx = repo[revision]
269 ctx = repo[revision]
267 result = ctx.diff(
270 result = ctx.diff(
268 git=git, ignore_whitespace=ignore_whitespace, context=context)
271 git=git, ignore_whitespace=ignore_whitespace, context=context)
269 return list(result)
272 return list(result)
270
273
271 @reraise_safe_exceptions
274 @reraise_safe_exceptions
272 def ctx_files(self, wire, revision):
275 def ctx_files(self, wire, revision):
273 repo = self._factory.repo(wire)
276 repo = self._factory.repo(wire)
274 ctx = repo[revision]
277 ctx = repo[revision]
275 return ctx.files()
278 return ctx.files()
276
279
277 @reraise_safe_exceptions
280 @reraise_safe_exceptions
278 def ctx_list(self, path, revision):
281 def ctx_list(self, path, revision):
279 repo = self._factory.repo(path)
282 repo = self._factory.repo(path)
280 ctx = repo[revision]
283 ctx = repo[revision]
281 return list(ctx)
284 return list(ctx)
282
285
283 @reraise_safe_exceptions
286 @reraise_safe_exceptions
284 def ctx_parents(self, wire, revision):
287 def ctx_parents(self, wire, revision):
285 repo = self._factory.repo(wire)
288 repo = self._factory.repo(wire)
286 ctx = repo[revision]
289 ctx = repo[revision]
287 return [parent.rev() for parent in ctx.parents()]
290 return [parent.rev() for parent in ctx.parents()]
288
291
289 @reraise_safe_exceptions
292 @reraise_safe_exceptions
290 def ctx_phase(self, wire, revision):
293 def ctx_phase(self, wire, revision):
291 repo = self._factory.repo(wire)
294 repo = self._factory.repo(wire)
292 ctx = repo[revision]
295 ctx = repo[revision]
293 # public=0, draft=1, secret=3
296 # public=0, draft=1, secret=3
294 return ctx.phase()
297 return ctx.phase()
295
298
296 @reraise_safe_exceptions
299 @reraise_safe_exceptions
297 def ctx_obsolete(self, wire, revision):
300 def ctx_obsolete(self, wire, revision):
298 repo = self._factory.repo(wire)
301 repo = self._factory.repo(wire)
299 ctx = repo[revision]
302 ctx = repo[revision]
300 return ctx.obsolete()
303 return ctx.obsolete()
301
304
302 @reraise_safe_exceptions
305 @reraise_safe_exceptions
303 def ctx_hidden(self, wire, revision):
306 def ctx_hidden(self, wire, revision):
304 repo = self._factory.repo(wire)
307 repo = self._factory.repo(wire)
305 ctx = repo[revision]
308 ctx = repo[revision]
306 return ctx.hidden()
309 return ctx.hidden()
307
310
308 @reraise_safe_exceptions
311 @reraise_safe_exceptions
309 def ctx_substate(self, wire, revision):
312 def ctx_substate(self, wire, revision):
310 repo = self._factory.repo(wire)
313 repo = self._factory.repo(wire)
311 ctx = repo[revision]
314 ctx = repo[revision]
312 return ctx.substate
315 return ctx.substate
313
316
314 @reraise_safe_exceptions
317 @reraise_safe_exceptions
315 def ctx_status(self, wire, revision):
318 def ctx_status(self, wire, revision):
316 repo = self._factory.repo(wire)
319 repo = self._factory.repo(wire)
317 ctx = repo[revision]
320 ctx = repo[revision]
318 status = repo[ctx.p1().node()].status(other=ctx.node())
321 status = repo[ctx.p1().node()].status(other=ctx.node())
319 # object of status (odd, custom named tuple in mercurial) is not
322 # object of status (odd, custom named tuple in mercurial) is not
320 # correctly serializable, we make it a list, as the underling
323 # correctly serializable, we make it a list, as the underling
321 # API expects this to be a list
324 # API expects this to be a list
322 return list(status)
325 return list(status)
323
326
324 @reraise_safe_exceptions
327 @reraise_safe_exceptions
325 def ctx_user(self, wire, revision):
328 def ctx_user(self, wire, revision):
326 repo = self._factory.repo(wire)
329 repo = self._factory.repo(wire)
327 ctx = repo[revision]
330 ctx = repo[revision]
328 return ctx.user()
331 return ctx.user()
329
332
330 @reraise_safe_exceptions
333 @reraise_safe_exceptions
331 def check_url(self, url, config):
334 def check_url(self, url, config):
332 _proto = None
335 _proto = None
333 if '+' in url[:url.find('://')]:
336 if '+' in url[:url.find('://')]:
334 _proto = url[0:url.find('+')]
337 _proto = url[0:url.find('+')]
335 url = url[url.find('+') + 1:]
338 url = url[url.find('+') + 1:]
336 handlers = []
339 handlers = []
337 url_obj = url_parser(url)
340 url_obj = url_parser(url)
338 test_uri, authinfo = url_obj.authinfo()
341 test_uri, authinfo = url_obj.authinfo()
339 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
342 url_obj.passwd = '*****' if url_obj.passwd else url_obj.passwd
340 url_obj.query = obfuscate_qs(url_obj.query)
343 url_obj.query = obfuscate_qs(url_obj.query)
341
344
342 cleaned_uri = str(url_obj)
345 cleaned_uri = str(url_obj)
343 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
346 log.info("Checking URL for remote cloning/import: %s", cleaned_uri)
344
347
345 if authinfo:
348 if authinfo:
346 # create a password manager
349 # create a password manager
347 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
350 passmgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
348 passmgr.add_password(*authinfo)
351 passmgr.add_password(*authinfo)
349
352
350 handlers.extend((httpbasicauthhandler(passmgr),
353 handlers.extend((httpbasicauthhandler(passmgr),
351 httpdigestauthhandler(passmgr)))
354 httpdigestauthhandler(passmgr)))
352
355
353 o = urllib2.build_opener(*handlers)
356 o = urllib2.build_opener(*handlers)
354 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
357 o.addheaders = [('Content-Type', 'application/mercurial-0.1'),
355 ('Accept', 'application/mercurial-0.1')]
358 ('Accept', 'application/mercurial-0.1')]
356
359
357 q = {"cmd": 'between'}
360 q = {"cmd": 'between'}
358 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
361 q.update({'pairs': "%s-%s" % ('0' * 40, '0' * 40)})
359 qs = '?%s' % urllib.urlencode(q)
362 qs = '?%s' % urllib.urlencode(q)
360 cu = "%s%s" % (test_uri, qs)
363 cu = "%s%s" % (test_uri, qs)
361 req = urllib2.Request(cu, None, {})
364 req = urllib2.Request(cu, None, {})
362
365
363 try:
366 try:
364 log.debug("Trying to open URL %s", cleaned_uri)
367 log.debug("Trying to open URL %s", cleaned_uri)
365 resp = o.open(req)
368 resp = o.open(req)
366 if resp.code != 200:
369 if resp.code != 200:
367 raise exceptions.URLError('Return Code is not 200')
370 raise exceptions.URLError('Return Code is not 200')
368 except Exception as e:
371 except Exception as e:
369 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
372 log.warning("URL cannot be opened: %s", cleaned_uri, exc_info=True)
370 # means it cannot be cloned
373 # means it cannot be cloned
371 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
374 raise exceptions.URLError("[%s] org_exc: %s" % (cleaned_uri, e))
372
375
373 # now check if it's a proper hg repo, but don't do it for svn
376 # now check if it's a proper hg repo, but don't do it for svn
374 try:
377 try:
375 if _proto == 'svn':
378 if _proto == 'svn':
376 pass
379 pass
377 else:
380 else:
378 # check for pure hg repos
381 # check for pure hg repos
379 log.debug(
382 log.debug(
380 "Verifying if URL is a Mercurial repository: %s",
383 "Verifying if URL is a Mercurial repository: %s",
381 cleaned_uri)
384 cleaned_uri)
382 httppeer(make_ui_from_config(config), url).lookup('tip')
385 httppeer(make_ui_from_config(config), url).lookup('tip')
383 except Exception as e:
386 except Exception as e:
384 log.warning("URL is not a valid Mercurial repository: %s",
387 log.warning("URL is not a valid Mercurial repository: %s",
385 cleaned_uri)
388 cleaned_uri)
386 raise exceptions.URLError(
389 raise exceptions.URLError(
387 "url [%s] does not look like an hg repo org_exc: %s"
390 "url [%s] does not look like an hg repo org_exc: %s"
388 % (cleaned_uri, e))
391 % (cleaned_uri, e))
389
392
390 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
393 log.info("URL is a valid Mercurial repository: %s", cleaned_uri)
391 return True
394 return True
392
395
393 @reraise_safe_exceptions
396 @reraise_safe_exceptions
394 def diff(
397 def diff(
395 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
398 self, wire, rev1, rev2, file_filter, opt_git, opt_ignorews,
396 context):
399 context):
397 repo = self._factory.repo(wire)
400 repo = self._factory.repo(wire)
398
401
399 if file_filter:
402 if file_filter:
400 match_filter = match(file_filter[0], '', [file_filter[1]])
403 match_filter = match(file_filter[0], '', [file_filter[1]])
401 else:
404 else:
402 match_filter = file_filter
405 match_filter = file_filter
403 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
406 opts = diffopts(git=opt_git, ignorews=opt_ignorews, context=context)
404
407
405 try:
408 try:
406 return "".join(patch.diff(
409 return "".join(patch.diff(
407 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
410 repo, node1=rev1, node2=rev2, match=match_filter, opts=opts))
408 except RepoLookupError:
411 except RepoLookupError:
409 raise exceptions.LookupException()
412 raise exceptions.LookupException()
410
413
411 @reraise_safe_exceptions
414 @reraise_safe_exceptions
412 def file_history(self, wire, revision, path, limit):
415 def file_history(self, wire, revision, path, limit):
413 repo = self._factory.repo(wire)
416 repo = self._factory.repo(wire)
414
417
415 ctx = repo[revision]
418 ctx = repo[revision]
416 fctx = ctx.filectx(path)
419 fctx = ctx.filectx(path)
417
420
418 def history_iter():
421 def history_iter():
419 limit_rev = fctx.rev()
422 limit_rev = fctx.rev()
420 for obj in reversed(list(fctx.filelog())):
423 for obj in reversed(list(fctx.filelog())):
421 obj = fctx.filectx(obj)
424 obj = fctx.filectx(obj)
422 if limit_rev >= obj.rev():
425 if limit_rev >= obj.rev():
423 yield obj
426 yield obj
424
427
425 history = []
428 history = []
426 for cnt, obj in enumerate(history_iter()):
429 for cnt, obj in enumerate(history_iter()):
427 if limit and cnt >= limit:
430 if limit and cnt >= limit:
428 break
431 break
429 history.append(hex(obj.node()))
432 history.append(hex(obj.node()))
430
433
431 return [x for x in history]
434 return [x for x in history]
432
435
433 @reraise_safe_exceptions
436 @reraise_safe_exceptions
434 def file_history_untill(self, wire, revision, path, limit):
437 def file_history_untill(self, wire, revision, path, limit):
435 repo = self._factory.repo(wire)
438 repo = self._factory.repo(wire)
436 ctx = repo[revision]
439 ctx = repo[revision]
437 fctx = ctx.filectx(path)
440 fctx = ctx.filectx(path)
438
441
439 file_log = list(fctx.filelog())
442 file_log = list(fctx.filelog())
440 if limit:
443 if limit:
441 # Limit to the last n items
444 # Limit to the last n items
442 file_log = file_log[-limit:]
445 file_log = file_log[-limit:]
443
446
444 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
447 return [hex(fctx.filectx(cs).node()) for cs in reversed(file_log)]
445
448
446 @reraise_safe_exceptions
449 @reraise_safe_exceptions
447 def fctx_annotate(self, wire, revision, path):
450 def fctx_annotate(self, wire, revision, path):
448 repo = self._factory.repo(wire)
451 repo = self._factory.repo(wire)
449 ctx = repo[revision]
452 ctx = repo[revision]
450 fctx = ctx.filectx(path)
453 fctx = ctx.filectx(path)
451
454
452 result = []
455 result = []
453 for i, annotate_data in enumerate(fctx.annotate()):
456 for i, annotate_data in enumerate(fctx.annotate()):
454 ln_no = i + 1
457 ln_no = i + 1
455 node_info, content = annotate_data
458 node_info, content = annotate_data
456 sha = hex(node_info[0].node())
459 sha = hex(node_info[0].node())
457 result.append((ln_no, sha, content))
460 result.append((ln_no, sha, content))
458 return result
461 return result
459
462
460 @reraise_safe_exceptions
463 @reraise_safe_exceptions
461 def fctx_data(self, wire, revision, path):
464 def fctx_data(self, wire, revision, path):
462 repo = self._factory.repo(wire)
465 repo = self._factory.repo(wire)
463 ctx = repo[revision]
466 ctx = repo[revision]
464 fctx = ctx.filectx(path)
467 fctx = ctx.filectx(path)
465 return fctx.data()
468 return fctx.data()
466
469
467 @reraise_safe_exceptions
470 @reraise_safe_exceptions
468 def fctx_flags(self, wire, revision, path):
471 def fctx_flags(self, wire, revision, path):
469 repo = self._factory.repo(wire)
472 repo = self._factory.repo(wire)
470 ctx = repo[revision]
473 ctx = repo[revision]
471 fctx = ctx.filectx(path)
474 fctx = ctx.filectx(path)
472 return fctx.flags()
475 return fctx.flags()
473
476
474 @reraise_safe_exceptions
477 @reraise_safe_exceptions
475 def fctx_size(self, wire, revision, path):
478 def fctx_size(self, wire, revision, path):
476 repo = self._factory.repo(wire)
479 repo = self._factory.repo(wire)
477 ctx = repo[revision]
480 ctx = repo[revision]
478 fctx = ctx.filectx(path)
481 fctx = ctx.filectx(path)
479 return fctx.size()
482 return fctx.size()
480
483
481 @reraise_safe_exceptions
484 @reraise_safe_exceptions
482 def get_all_commit_ids(self, wire, name):
485 def get_all_commit_ids(self, wire, name):
483 repo = self._factory.repo(wire)
486 repo = self._factory.repo(wire)
484 revs = repo.filtered(name).changelog.index
487 revs = repo.filtered(name).changelog.index
485 return map(lambda x: hex(x[7]), revs)[:-1]
488 return map(lambda x: hex(x[7]), revs)[:-1]
486
489
487 @reraise_safe_exceptions
490 @reraise_safe_exceptions
488 def get_config_value(self, wire, section, name, untrusted=False):
491 def get_config_value(self, wire, section, name, untrusted=False):
489 repo = self._factory.repo(wire)
492 repo = self._factory.repo(wire)
490 return repo.ui.config(section, name, untrusted=untrusted)
493 return repo.ui.config(section, name, untrusted=untrusted)
491
494
492 @reraise_safe_exceptions
495 @reraise_safe_exceptions
493 def get_config_bool(self, wire, section, name, untrusted=False):
496 def get_config_bool(self, wire, section, name, untrusted=False):
494 repo = self._factory.repo(wire)
497 repo = self._factory.repo(wire)
495 return repo.ui.configbool(section, name, untrusted=untrusted)
498 return repo.ui.configbool(section, name, untrusted=untrusted)
496
499
497 @reraise_safe_exceptions
500 @reraise_safe_exceptions
498 def get_config_list(self, wire, section, name, untrusted=False):
501 def get_config_list(self, wire, section, name, untrusted=False):
499 repo = self._factory.repo(wire)
502 repo = self._factory.repo(wire)
500 return repo.ui.configlist(section, name, untrusted=untrusted)
503 return repo.ui.configlist(section, name, untrusted=untrusted)
501
504
502 @reraise_safe_exceptions
505 @reraise_safe_exceptions
503 def is_large_file(self, wire, path):
506 def is_large_file(self, wire, path):
504 return largefiles.lfutil.isstandin(path)
507 return largefiles.lfutil.isstandin(path)
505
508
506 @reraise_safe_exceptions
509 @reraise_safe_exceptions
507 def in_largefiles_store(self, wire, sha):
510 def in_largefiles_store(self, wire, sha):
508 repo = self._factory.repo(wire)
511 repo = self._factory.repo(wire)
509 return largefiles.lfutil.instore(repo, sha)
512 return largefiles.lfutil.instore(repo, sha)
510
513
511 @reraise_safe_exceptions
514 @reraise_safe_exceptions
512 def in_user_cache(self, wire, sha):
515 def in_user_cache(self, wire, sha):
513 repo = self._factory.repo(wire)
516 repo = self._factory.repo(wire)
514 return largefiles.lfutil.inusercache(repo.ui, sha)
517 return largefiles.lfutil.inusercache(repo.ui, sha)
515
518
516 @reraise_safe_exceptions
519 @reraise_safe_exceptions
517 def store_path(self, wire, sha):
520 def store_path(self, wire, sha):
518 repo = self._factory.repo(wire)
521 repo = self._factory.repo(wire)
519 return largefiles.lfutil.storepath(repo, sha)
522 return largefiles.lfutil.storepath(repo, sha)
520
523
521 @reraise_safe_exceptions
524 @reraise_safe_exceptions
522 def link(self, wire, sha, path):
525 def link(self, wire, sha, path):
523 repo = self._factory.repo(wire)
526 repo = self._factory.repo(wire)
524 largefiles.lfutil.link(
527 largefiles.lfutil.link(
525 largefiles.lfutil.usercachepath(repo.ui, sha), path)
528 largefiles.lfutil.usercachepath(repo.ui, sha), path)
526
529
527 @reraise_safe_exceptions
530 @reraise_safe_exceptions
528 def localrepository(self, wire, create=False):
531 def localrepository(self, wire, create=False):
529 self._factory.repo(wire, create=create)
532 self._factory.repo(wire, create=create)
530
533
531 @reraise_safe_exceptions
534 @reraise_safe_exceptions
532 def lookup(self, wire, revision, both):
535 def lookup(self, wire, revision, both):
533 # TODO Paris: Ugly hack to "deserialize" long for msgpack
536 # TODO Paris: Ugly hack to "deserialize" long for msgpack
534 if isinstance(revision, float):
537 if isinstance(revision, float):
535 revision = long(revision)
538 revision = long(revision)
536 repo = self._factory.repo(wire)
539 repo = self._factory.repo(wire)
537 try:
540 try:
538 ctx = repo[revision]
541 ctx = repo[revision]
539 except RepoLookupError:
542 except RepoLookupError:
540 raise exceptions.LookupException(revision)
543 raise exceptions.LookupException(revision)
541 except LookupError as e:
544 except LookupError as e:
542 raise exceptions.LookupException(e.name)
545 raise exceptions.LookupException(e.name)
543
546
544 if not both:
547 if not both:
545 return ctx.hex()
548 return ctx.hex()
546
549
547 ctx = repo[ctx.hex()]
550 ctx = repo[ctx.hex()]
548 return ctx.hex(), ctx.rev()
551 return ctx.hex(), ctx.rev()
549
552
550 @reraise_safe_exceptions
553 @reraise_safe_exceptions
551 def pull(self, wire, url, commit_ids=None):
554 def pull(self, wire, url, commit_ids=None):
552 repo = self._factory.repo(wire)
555 repo = self._factory.repo(wire)
553 remote = peer(repo, {}, url)
556 remote = peer(repo, {}, url)
554 if commit_ids:
557 if commit_ids:
555 commit_ids = [bin(commit_id) for commit_id in commit_ids]
558 commit_ids = [bin(commit_id) for commit_id in commit_ids]
556
559
557 return exchange.pull(
560 return exchange.pull(
558 repo, remote, heads=commit_ids, force=None).cgresult
561 repo, remote, heads=commit_ids, force=None).cgresult
559
562
560 @reraise_safe_exceptions
563 @reraise_safe_exceptions
561 def revision(self, wire, rev):
564 def revision(self, wire, rev):
562 repo = self._factory.repo(wire)
565 repo = self._factory.repo(wire)
563 ctx = repo[rev]
566 ctx = repo[rev]
564 return ctx.rev()
567 return ctx.rev()
565
568
566 @reraise_safe_exceptions
569 @reraise_safe_exceptions
567 def rev_range(self, wire, filter):
570 def rev_range(self, wire, filter):
568 repo = self._factory.repo(wire)
571 repo = self._factory.repo(wire)
569 revisions = [rev for rev in revrange(repo, filter)]
572 revisions = [rev for rev in revrange(repo, filter)]
570 return revisions
573 return revisions
571
574
572 @reraise_safe_exceptions
575 @reraise_safe_exceptions
573 def rev_range_hash(self, wire, node):
576 def rev_range_hash(self, wire, node):
574 repo = self._factory.repo(wire)
577 repo = self._factory.repo(wire)
575
578
576 def get_revs(repo, rev_opt):
579 def get_revs(repo, rev_opt):
577 if rev_opt:
580 if rev_opt:
578 revs = revrange(repo, rev_opt)
581 revs = revrange(repo, rev_opt)
579 if len(revs) == 0:
582 if len(revs) == 0:
580 return (nullrev, nullrev)
583 return (nullrev, nullrev)
581 return max(revs), min(revs)
584 return max(revs), min(revs)
582 else:
585 else:
583 return len(repo) - 1, 0
586 return len(repo) - 1, 0
584
587
585 stop, start = get_revs(repo, [node + ':'])
588 stop, start = get_revs(repo, [node + ':'])
586 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
589 revs = [hex(repo[r].node()) for r in xrange(start, stop + 1)]
587 return revs
590 return revs
588
591
589 @reraise_safe_exceptions
592 @reraise_safe_exceptions
590 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
593 def revs_from_revspec(self, wire, rev_spec, *args, **kwargs):
591 other_path = kwargs.pop('other_path', None)
594 other_path = kwargs.pop('other_path', None)
592
595
593 # case when we want to compare two independent repositories
596 # case when we want to compare two independent repositories
594 if other_path and other_path != wire["path"]:
597 if other_path and other_path != wire["path"]:
595 baseui = self._factory._create_config(wire["config"])
598 baseui = self._factory._create_config(wire["config"])
596 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
599 repo = unionrepo.unionrepository(baseui, other_path, wire["path"])
597 else:
600 else:
598 repo = self._factory.repo(wire)
601 repo = self._factory.repo(wire)
599 return list(repo.revs(rev_spec, *args))
602 return list(repo.revs(rev_spec, *args))
600
603
601 @reraise_safe_exceptions
604 @reraise_safe_exceptions
602 def strip(self, wire, revision, update, backup):
605 def strip(self, wire, revision, update, backup):
603 repo = self._factory.repo(wire)
606 repo = self._factory.repo(wire)
604 ctx = repo[revision]
607 ctx = repo[revision]
605 hgext_strip(
608 hgext_strip(
606 repo.baseui, repo, ctx.node(), update=update, backup=backup)
609 repo.baseui, repo, ctx.node(), update=update, backup=backup)
607
610
608 @reraise_safe_exceptions
611 @reraise_safe_exceptions
609 def verify(self, wire,):
612 def verify(self, wire,):
610 repo = self._factory.repo(wire)
613 repo = self._factory.repo(wire)
611 baseui = self._factory._create_config(wire['config'])
614 baseui = self._factory._create_config(wire['config'])
612 baseui.setconfig('ui', 'quiet', 'false')
615 baseui.setconfig('ui', 'quiet', 'false')
613 output = io.BytesIO()
616 output = io.BytesIO()
614
617
615 def write(data, **unused_kwargs):
618 def write(data, **unused_kwargs):
616 output.write(data)
619 output.write(data)
617 baseui.write = write
620 baseui.write = write
618
621
619 repo.ui = baseui
622 repo.ui = baseui
620 verify.verify(repo)
623 verify.verify(repo)
621 return output.getvalue()
624 return output.getvalue()
622
625
623 @reraise_safe_exceptions
626 @reraise_safe_exceptions
624 def tag(self, wire, name, revision, message, local, user,
627 def tag(self, wire, name, revision, message, local, user,
625 tag_time, tag_timezone):
628 tag_time, tag_timezone):
626 repo = self._factory.repo(wire)
629 repo = self._factory.repo(wire)
627 ctx = repo[revision]
630 ctx = repo[revision]
628 node = ctx.node()
631 node = ctx.node()
629
632
630 date = (tag_time, tag_timezone)
633 date = (tag_time, tag_timezone)
631 try:
634 try:
632 repo.tag(name, node, message, local, user, date)
635 hg_tag.tag(repo, name, node, message, local, user, date)
633 except Abort as e:
636 except Abort as e:
634 log.exception("Tag operation aborted")
637 log.exception("Tag operation aborted")
635 # Exception can contain unicode which we convert
638 # Exception can contain unicode which we convert
636 raise exceptions.AbortException(repr(e))
639 raise exceptions.AbortException(repr(e))
637
640
638 @reraise_safe_exceptions
641 @reraise_safe_exceptions
639 def tags(self, wire):
642 def tags(self, wire):
640 repo = self._factory.repo(wire)
643 repo = self._factory.repo(wire)
641 return repo.tags()
644 return repo.tags()
642
645
643 @reraise_safe_exceptions
646 @reraise_safe_exceptions
644 def update(self, wire, node=None, clean=False):
647 def update(self, wire, node=None, clean=False):
645 repo = self._factory.repo(wire)
648 repo = self._factory.repo(wire)
646 baseui = self._factory._create_config(wire['config'])
649 baseui = self._factory._create_config(wire['config'])
647 commands.update(baseui, repo, node=node, clean=clean)
650 commands.update(baseui, repo, node=node, clean=clean)
648
651
649 @reraise_safe_exceptions
652 @reraise_safe_exceptions
650 def identify(self, wire):
653 def identify(self, wire):
651 repo = self._factory.repo(wire)
654 repo = self._factory.repo(wire)
652 baseui = self._factory._create_config(wire['config'])
655 baseui = self._factory._create_config(wire['config'])
653 output = io.BytesIO()
656 output = io.BytesIO()
654 baseui.write = output.write
657 baseui.write = output.write
655 # This is required to get a full node id
658 # This is required to get a full node id
656 baseui.debugflag = True
659 baseui.debugflag = True
657 commands.identify(baseui, repo, id=True)
660 commands.identify(baseui, repo, id=True)
658
661
659 return output.getvalue()
662 return output.getvalue()
660
663
661 @reraise_safe_exceptions
664 @reraise_safe_exceptions
662 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
665 def pull_cmd(self, wire, source, bookmark=None, branch=None, revision=None,
663 hooks=True):
666 hooks=True):
664 repo = self._factory.repo(wire)
667 repo = self._factory.repo(wire)
665 baseui = self._factory._create_config(wire['config'], hooks=hooks)
668 baseui = self._factory._create_config(wire['config'], hooks=hooks)
666
669
667 # Mercurial internally has a lot of logic that checks ONLY if
670 # Mercurial internally has a lot of logic that checks ONLY if
668 # option is defined, we just pass those if they are defined then
671 # option is defined, we just pass those if they are defined then
669 opts = {}
672 opts = {}
670 if bookmark:
673 if bookmark:
671 opts['bookmark'] = bookmark
674 opts['bookmark'] = bookmark
672 if branch:
675 if branch:
673 opts['branch'] = branch
676 opts['branch'] = branch
674 if revision:
677 if revision:
675 opts['rev'] = revision
678 opts['rev'] = revision
676
679
677 commands.pull(baseui, repo, source, **opts)
680 commands.pull(baseui, repo, source, **opts)
678
681
679 @reraise_safe_exceptions
682 @reraise_safe_exceptions
680 def heads(self, wire, branch=None):
683 def heads(self, wire, branch=None):
681 repo = self._factory.repo(wire)
684 repo = self._factory.repo(wire)
682 baseui = self._factory._create_config(wire['config'])
685 baseui = self._factory._create_config(wire['config'])
683 output = io.BytesIO()
686 output = io.BytesIO()
684
687
685 def write(data, **unused_kwargs):
688 def write(data, **unused_kwargs):
686 output.write(data)
689 output.write(data)
687
690
688 baseui.write = write
691 baseui.write = write
689 if branch:
692 if branch:
690 args = [branch]
693 args = [branch]
691 else:
694 else:
692 args = []
695 args = []
693 commands.heads(baseui, repo, template='{node} ', *args)
696 commands.heads(baseui, repo, template='{node} ', *args)
694
697
695 return output.getvalue()
698 return output.getvalue()
696
699
697 @reraise_safe_exceptions
700 @reraise_safe_exceptions
698 def ancestor(self, wire, revision1, revision2):
701 def ancestor(self, wire, revision1, revision2):
699 repo = self._factory.repo(wire)
702 repo = self._factory.repo(wire)
700 changelog = repo.changelog
703 changelog = repo.changelog
701 lookup = repo.lookup
704 lookup = repo.lookup
702 a = changelog.ancestor(lookup(revision1), lookup(revision2))
705 a = changelog.ancestor(lookup(revision1), lookup(revision2))
703 return hex(a)
706 return hex(a)
704
707
705 @reraise_safe_exceptions
708 @reraise_safe_exceptions
706 def push(self, wire, revisions, dest_path, hooks=True,
709 def push(self, wire, revisions, dest_path, hooks=True,
707 push_branches=False):
710 push_branches=False):
708 repo = self._factory.repo(wire)
711 repo = self._factory.repo(wire)
709 baseui = self._factory._create_config(wire['config'], hooks=hooks)
712 baseui = self._factory._create_config(wire['config'], hooks=hooks)
710 commands.push(baseui, repo, dest=dest_path, rev=revisions,
713 commands.push(baseui, repo, dest=dest_path, rev=revisions,
711 new_branch=push_branches)
714 new_branch=push_branches)
712
715
713 @reraise_safe_exceptions
716 @reraise_safe_exceptions
714 def merge(self, wire, revision):
717 def merge(self, wire, revision):
715 repo = self._factory.repo(wire)
718 repo = self._factory.repo(wire)
716 baseui = self._factory._create_config(wire['config'])
719 baseui = self._factory._create_config(wire['config'])
717 repo.ui.setconfig('ui', 'merge', 'internal:dump')
720 repo.ui.setconfig('ui', 'merge', 'internal:dump')
718
721
719 # In case of sub repositories are used mercurial prompts the user in
722 # In case of sub repositories are used mercurial prompts the user in
720 # case of merge conflicts or different sub repository sources. By
723 # case of merge conflicts or different sub repository sources. By
721 # setting the interactive flag to `False` mercurial doesn't prompt the
724 # setting the interactive flag to `False` mercurial doesn't prompt the
722 # used but instead uses a default value.
725 # used but instead uses a default value.
723 repo.ui.setconfig('ui', 'interactive', False)
726 repo.ui.setconfig('ui', 'interactive', False)
724
727
725 commands.merge(baseui, repo, rev=revision)
728 commands.merge(baseui, repo, rev=revision)
726
729
727 @reraise_safe_exceptions
730 @reraise_safe_exceptions
728 def commit(self, wire, message, username):
731 def commit(self, wire, message, username, close_branch=False):
729 repo = self._factory.repo(wire)
732 repo = self._factory.repo(wire)
730 baseui = self._factory._create_config(wire['config'])
733 baseui = self._factory._create_config(wire['config'])
731 repo.ui.setconfig('ui', 'username', username)
734 repo.ui.setconfig('ui', 'username', username)
732 commands.commit(baseui, repo, message=message)
735 commands.commit(baseui, repo, message=message, close_branch=close_branch)
733
736
734 @reraise_safe_exceptions
737 @reraise_safe_exceptions
735 def rebase(self, wire, source=None, dest=None, abort=False):
738 def rebase(self, wire, source=None, dest=None, abort=False):
736 repo = self._factory.repo(wire)
739 repo = self._factory.repo(wire)
737 baseui = self._factory._create_config(wire['config'])
740 baseui = self._factory._create_config(wire['config'])
738 repo.ui.setconfig('ui', 'merge', 'internal:dump')
741 repo.ui.setconfig('ui', 'merge', 'internal:dump')
739 rebase.rebase(
742 rebase.rebase(
740 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
743 baseui, repo, base=source, dest=dest, abort=abort, keep=not abort)
741
744
742 @reraise_safe_exceptions
745 @reraise_safe_exceptions
743 def bookmark(self, wire, bookmark, revision=None):
746 def bookmark(self, wire, bookmark, revision=None):
744 repo = self._factory.repo(wire)
747 repo = self._factory.repo(wire)
745 baseui = self._factory._create_config(wire['config'])
748 baseui = self._factory._create_config(wire['config'])
746 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
749 commands.bookmark(baseui, repo, bookmark, rev=revision, force=True)
@@ -1,62 +1,63 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 """
18 """
19 Mercurial libs compatibility
19 Mercurial libs compatibility
20 """
20 """
21
21
22 import mercurial
22 import mercurial
23 import mercurial.demandimport
23 import mercurial.demandimport
24 # patch demandimport, due to bug in mercurial when it always triggers
24 # patch demandimport, due to bug in mercurial when it always triggers
25 # demandimport.enable()
25 # demandimport.enable()
26 mercurial.demandimport.enable = lambda *args, **kwargs: 1
26 mercurial.demandimport.enable = lambda *args, **kwargs: 1
27
27
28 from mercurial import ui
28 from mercurial import ui
29 from mercurial import patch
29 from mercurial import patch
30 from mercurial import config
30 from mercurial import config
31 from mercurial import extensions
31 from mercurial import extensions
32 from mercurial import scmutil
32 from mercurial import scmutil
33 from mercurial import archival
33 from mercurial import archival
34 from mercurial import discovery
34 from mercurial import discovery
35 from mercurial import unionrepo
35 from mercurial import unionrepo
36 from mercurial import localrepo
36 from mercurial import localrepo
37 from mercurial import merge as hg_merge
37 from mercurial import merge as hg_merge
38 from mercurial import subrepo
38 from mercurial import subrepo
39 from mercurial import tags as hg_tag
39
40
40 from mercurial.commands import clone, nullid, pull
41 from mercurial.commands import clone, nullid, pull
41 from mercurial.context import memctx, memfilectx
42 from mercurial.context import memctx, memfilectx
42 from mercurial.error import (
43 from mercurial.error import (
43 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
44 LookupError, RepoError, RepoLookupError, Abort, InterventionRequired,
44 RequirementError)
45 RequirementError)
45 from mercurial.hgweb import hgweb_mod
46 from mercurial.hgweb import hgweb_mod
46 from mercurial.localrepo import localrepository
47 from mercurial.localrepo import localrepository
47 from mercurial.match import match
48 from mercurial.match import match
48 from mercurial.mdiff import diffopts
49 from mercurial.mdiff import diffopts
49 from mercurial.node import bin, hex
50 from mercurial.node import bin, hex
50 from mercurial.encoding import tolocal
51 from mercurial.encoding import tolocal
51 from mercurial.discovery import findcommonoutgoing
52 from mercurial.discovery import findcommonoutgoing
52 from mercurial.hg import peer
53 from mercurial.hg import peer
53 from mercurial.httppeer import httppeer
54 from mercurial.httppeer import httppeer
54 from mercurial.util import url as hg_url
55 from mercurial.util import url as hg_url
55 from mercurial.scmutil import revrange
56 from mercurial.scmutil import revrange
56 from mercurial.node import nullrev
57 from mercurial.node import nullrev
57 from mercurial import exchange
58 from mercurial import exchange
58 from hgext import largefiles
59 from hgext import largefiles
59
60
60 # those authnadlers are patched for python 2.6.5 bug an
61 # those authnadlers are patched for python 2.6.5 bug an
61 # infinit looping when given invalid resources
62 # infinit looping when given invalid resources
62 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
63 from mercurial.url import httpbasicauthhandler, httpdigestauthhandler
@@ -1,426 +1,475 b''
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # RhodeCode VCSServer provides access to different vcs backends via network.
3 # RhodeCode VCSServer provides access to different vcs backends via network.
4 # Copyright (C) 2014-2017 RodeCode GmbH
4 # Copyright (C) 2014-2017 RodeCode GmbH
5 #
5 #
6 # This program is free software; you can redistribute it and/or modify
6 # This program is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
9 # (at your option) any later version.
10 #
10 #
11 # This program is distributed in the hope that it will be useful,
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
14 # GNU General Public License for more details.
15 #
15 #
16 # You should have received a copy of the GNU General Public License
16 # You should have received a copy of the GNU General Public License
17 # along with this program; if not, write to the Free Software Foundation,
17 # along with this program; if not, write to the Free Software Foundation,
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19
19
20 import io
20 import io
21 import os
21 import sys
22 import sys
22 import json
23 import json
23 import logging
24 import logging
24 import collections
25 import collections
25 import importlib
26 import importlib
26 import subprocess
27 import subprocess
27
28
28 from httplib import HTTPConnection
29 from httplib import HTTPConnection
29
30
30
31
31 import mercurial.scmutil
32 import mercurial.scmutil
32 import mercurial.node
33 import mercurial.node
33 import simplejson as json
34 import simplejson as json
34
35
35 from vcsserver import exceptions
36 from vcsserver import exceptions
36
37
37 log = logging.getLogger(__name__)
38 log = logging.getLogger(__name__)
38
39
39
40
40 class HooksHttpClient(object):
41 class HooksHttpClient(object):
41 connection = None
42 connection = None
42
43
43 def __init__(self, hooks_uri):
44 def __init__(self, hooks_uri):
44 self.hooks_uri = hooks_uri
45 self.hooks_uri = hooks_uri
45
46
46 def __call__(self, method, extras):
47 def __call__(self, method, extras):
47 connection = HTTPConnection(self.hooks_uri)
48 connection = HTTPConnection(self.hooks_uri)
48 body = self._serialize(method, extras)
49 body = self._serialize(method, extras)
49 connection.request('POST', '/', body)
50 connection.request('POST', '/', body)
50 response = connection.getresponse()
51 response = connection.getresponse()
51 return json.loads(response.read())
52 return json.loads(response.read())
52
53
53 def _serialize(self, hook_name, extras):
54 def _serialize(self, hook_name, extras):
54 data = {
55 data = {
55 'method': hook_name,
56 'method': hook_name,
56 'extras': extras
57 'extras': extras
57 }
58 }
58 return json.dumps(data)
59 return json.dumps(data)
59
60
60
61
61 class HooksDummyClient(object):
62 class HooksDummyClient(object):
62 def __init__(self, hooks_module):
63 def __init__(self, hooks_module):
63 self._hooks_module = importlib.import_module(hooks_module)
64 self._hooks_module = importlib.import_module(hooks_module)
64
65
65 def __call__(self, hook_name, extras):
66 def __call__(self, hook_name, extras):
66 with self._hooks_module.Hooks() as hooks:
67 with self._hooks_module.Hooks() as hooks:
67 return getattr(hooks, hook_name)(extras)
68 return getattr(hooks, hook_name)(extras)
68
69
69
70
70 class RemoteMessageWriter(object):
71 class RemoteMessageWriter(object):
71 """Writer base class."""
72 """Writer base class."""
72 def write(self, message):
73 def write(self, message):
73 raise NotImplementedError()
74 raise NotImplementedError()
74
75
75
76
76 class HgMessageWriter(RemoteMessageWriter):
77 class HgMessageWriter(RemoteMessageWriter):
77 """Writer that knows how to send messages to mercurial clients."""
78 """Writer that knows how to send messages to mercurial clients."""
78
79
79 def __init__(self, ui):
80 def __init__(self, ui):
80 self.ui = ui
81 self.ui = ui
81
82
82 def write(self, message):
83 def write(self, message):
83 # TODO: Check why the quiet flag is set by default.
84 # TODO: Check why the quiet flag is set by default.
84 old = self.ui.quiet
85 old = self.ui.quiet
85 self.ui.quiet = False
86 self.ui.quiet = False
86 self.ui.status(message.encode('utf-8'))
87 self.ui.status(message.encode('utf-8'))
87 self.ui.quiet = old
88 self.ui.quiet = old
88
89
89
90
90 class GitMessageWriter(RemoteMessageWriter):
91 class GitMessageWriter(RemoteMessageWriter):
91 """Writer that knows how to send messages to git clients."""
92 """Writer that knows how to send messages to git clients."""
92
93
93 def __init__(self, stdout=None):
94 def __init__(self, stdout=None):
94 self.stdout = stdout or sys.stdout
95 self.stdout = stdout or sys.stdout
95
96
96 def write(self, message):
97 def write(self, message):
97 self.stdout.write(message.encode('utf-8'))
98 self.stdout.write(message.encode('utf-8'))
98
99
99
100
100 def _handle_exception(result):
101 def _handle_exception(result):
101 exception_class = result.get('exception')
102 exception_class = result.get('exception')
102 exception_traceback = result.get('exception_traceback')
103 exception_traceback = result.get('exception_traceback')
103
104
104 if exception_traceback:
105 if exception_traceback:
105 log.error('Got traceback from remote call:%s', exception_traceback)
106 log.error('Got traceback from remote call:%s', exception_traceback)
106
107
107 if exception_class == 'HTTPLockedRC':
108 if exception_class == 'HTTPLockedRC':
108 raise exceptions.RepositoryLockedException(*result['exception_args'])
109 raise exceptions.RepositoryLockedException(*result['exception_args'])
109 elif exception_class == 'RepositoryError':
110 elif exception_class == 'RepositoryError':
110 raise exceptions.VcsException(*result['exception_args'])
111 raise exceptions.VcsException(*result['exception_args'])
111 elif exception_class:
112 elif exception_class:
112 raise Exception('Got remote exception "%s" with args "%s"' %
113 raise Exception('Got remote exception "%s" with args "%s"' %
113 (exception_class, result['exception_args']))
114 (exception_class, result['exception_args']))
114
115
115
116
116 def _get_hooks_client(extras):
117 def _get_hooks_client(extras):
117 if 'hooks_uri' in extras:
118 if 'hooks_uri' in extras:
118 protocol = extras.get('hooks_protocol')
119 protocol = extras.get('hooks_protocol')
119 return HooksHttpClient(extras['hooks_uri'])
120 return HooksHttpClient(extras['hooks_uri'])
120 else:
121 else:
121 return HooksDummyClient(extras['hooks_module'])
122 return HooksDummyClient(extras['hooks_module'])
122
123
123
124
124 def _call_hook(hook_name, extras, writer):
125 def _call_hook(hook_name, extras, writer):
125 hooks = _get_hooks_client(extras)
126 hooks = _get_hooks_client(extras)
126 result = hooks(hook_name, extras)
127 result = hooks(hook_name, extras)
128 log.debug('Hooks got result: %s', result)
127 writer.write(result['output'])
129 writer.write(result['output'])
128 _handle_exception(result)
130 _handle_exception(result)
129
131
130 return result['status']
132 return result['status']
131
133
132
134
133 def _extras_from_ui(ui):
135 def _extras_from_ui(ui):
134 extras = json.loads(ui.config('rhodecode', 'RC_SCM_DATA'))
136 hook_data = ui.config('rhodecode', 'RC_SCM_DATA')
137 if not hook_data:
138 # maybe it's inside environ ?
139 hook_data = os.environ.get('RC_SCM_DATA')
140 extras = json.loads(hook_data)
135 return extras
141 return extras
136
142
137
143
138 def repo_size(ui, repo, **kwargs):
139 return _call_hook('repo_size', _extras_from_ui(ui), HgMessageWriter(ui))
140
141
142 def pre_pull(ui, repo, **kwargs):
143 return _call_hook('pre_pull', _extras_from_ui(ui), HgMessageWriter(ui))
144
145
146 def post_pull(ui, repo, **kwargs):
147 return _call_hook('post_pull', _extras_from_ui(ui), HgMessageWriter(ui))
148
149
150 def _rev_range_hash(repo, node):
144 def _rev_range_hash(repo, node):
151
145
152 commits = []
146 commits = []
153 for rev in xrange(repo[node], len(repo)):
147 for rev in xrange(repo[node], len(repo)):
154 ctx = repo[rev]
148 ctx = repo[rev]
155 commit_id = mercurial.node.hex(ctx.node())
149 commit_id = mercurial.node.hex(ctx.node())
156 branch = ctx.branch()
150 branch = ctx.branch()
157 commits.append((commit_id, branch))
151 commits.append((commit_id, branch))
158
152
159 return commits
153 return commits
160
154
161
155
156 def repo_size(ui, repo, **kwargs):
157 extras = _extras_from_ui(ui)
158 return _call_hook('repo_size', extras, HgMessageWriter(ui))
159
160
161 def pre_pull(ui, repo, **kwargs):
162 extras = _extras_from_ui(ui)
163 return _call_hook('pre_pull', extras, HgMessageWriter(ui))
164
165
166 def pre_pull_ssh(ui, repo, **kwargs):
167 if _extras_from_ui(ui).get('SSH'):
168 return pre_pull(ui, repo, **kwargs)
169 return 0
170
171
172 def post_pull(ui, repo, **kwargs):
173 extras = _extras_from_ui(ui)
174 return _call_hook('post_pull', extras, HgMessageWriter(ui))
175
176
177 def post_pull_ssh(ui, repo, **kwargs):
178 if _extras_from_ui(ui).get('SSH'):
179 return post_pull(ui, repo, **kwargs)
180 return 0
181
182
162 def pre_push(ui, repo, node=None, **kwargs):
183 def pre_push(ui, repo, node=None, **kwargs):
163 extras = _extras_from_ui(ui)
184 extras = _extras_from_ui(ui)
164
185
165 rev_data = []
186 rev_data = []
166 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
187 if node and kwargs.get('hooktype') == 'pretxnchangegroup':
167 branches = collections.defaultdict(list)
188 branches = collections.defaultdict(list)
168 for commit_id, branch in _rev_range_hash(repo, node):
189 for commit_id, branch in _rev_range_hash(repo, node):
169 branches[branch].append(commit_id)
190 branches[branch].append(commit_id)
170
191
171 for branch, commits in branches.iteritems():
192 for branch, commits in branches.iteritems():
172 old_rev = kwargs.get('node_last') or commits[0]
193 old_rev = kwargs.get('node_last') or commits[0]
173 rev_data.append({
194 rev_data.append({
174 'old_rev': old_rev,
195 'old_rev': old_rev,
175 'new_rev': commits[-1],
196 'new_rev': commits[-1],
176 'ref': '',
197 'ref': '',
177 'type': 'branch',
198 'type': 'branch',
178 'name': branch,
199 'name': branch,
179 })
200 })
180
201
181 extras['commit_ids'] = rev_data
202 extras['commit_ids'] = rev_data
182 return _call_hook('pre_push', extras, HgMessageWriter(ui))
203 return _call_hook('pre_push', extras, HgMessageWriter(ui))
183
204
184
205
206 def pre_push_ssh(ui, repo, node=None, **kwargs):
207 if _extras_from_ui(ui).get('SSH'):
208 return pre_push(ui, repo, node, **kwargs)
209
210 return 0
211
212
213 def pre_push_ssh_auth(ui, repo, node=None, **kwargs):
214 extras = _extras_from_ui(ui)
215 if extras.get('SSH'):
216 permission = extras['SSH_PERMISSIONS']
217
218 if 'repository.write' == permission or 'repository.admin' == permission:
219 return 0
220
221 # non-zero ret code
222 return 1
223
224 return 0
225
226
185 def post_push(ui, repo, node, **kwargs):
227 def post_push(ui, repo, node, **kwargs):
186 extras = _extras_from_ui(ui)
228 extras = _extras_from_ui(ui)
187
229
188 commit_ids = []
230 commit_ids = []
189 branches = []
231 branches = []
190 bookmarks = []
232 bookmarks = []
191 tags = []
233 tags = []
192
234
193 for commit_id, branch in _rev_range_hash(repo, node):
235 for commit_id, branch in _rev_range_hash(repo, node):
194 commit_ids.append(commit_id)
236 commit_ids.append(commit_id)
195 if branch not in branches:
237 if branch not in branches:
196 branches.append(branch)
238 branches.append(branch)
197
239
198 if hasattr(ui, '_rc_pushkey_branches'):
240 if hasattr(ui, '_rc_pushkey_branches'):
199 bookmarks = ui._rc_pushkey_branches
241 bookmarks = ui._rc_pushkey_branches
200
242
201 extras['commit_ids'] = commit_ids
243 extras['commit_ids'] = commit_ids
202 extras['new_refs'] = {
244 extras['new_refs'] = {
203 'branches': branches,
245 'branches': branches,
204 'bookmarks': bookmarks,
246 'bookmarks': bookmarks,
205 'tags': tags
247 'tags': tags
206 }
248 }
207
249
208 return _call_hook('post_push', extras, HgMessageWriter(ui))
250 return _call_hook('post_push', extras, HgMessageWriter(ui))
209
251
210
252
253 def post_push_ssh(ui, repo, node, **kwargs):
254 if _extras_from_ui(ui).get('SSH'):
255 return post_push(ui, repo, node, **kwargs)
256 return 0
257
258
211 def key_push(ui, repo, **kwargs):
259 def key_push(ui, repo, **kwargs):
212 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
260 if kwargs['new'] != '0' and kwargs['namespace'] == 'bookmarks':
213 # store new bookmarks in our UI object propagated later to post_push
261 # store new bookmarks in our UI object propagated later to post_push
214 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
262 ui._rc_pushkey_branches = repo[kwargs['key']].bookmarks()
215 return
263 return
216
264
265
217 # backward compat
266 # backward compat
218 log_pull_action = post_pull
267 log_pull_action = post_pull
219
268
220 # backward compat
269 # backward compat
221 log_push_action = post_push
270 log_push_action = post_push
222
271
223
272
224 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
273 def handle_git_pre_receive(unused_repo_path, unused_revs, unused_env):
225 """
274 """
226 Old hook name: keep here for backward compatibility.
275 Old hook name: keep here for backward compatibility.
227
276
228 This is only required when the installed git hooks are not upgraded.
277 This is only required when the installed git hooks are not upgraded.
229 """
278 """
230 pass
279 pass
231
280
232
281
233 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
282 def handle_git_post_receive(unused_repo_path, unused_revs, unused_env):
234 """
283 """
235 Old hook name: keep here for backward compatibility.
284 Old hook name: keep here for backward compatibility.
236
285
237 This is only required when the installed git hooks are not upgraded.
286 This is only required when the installed git hooks are not upgraded.
238 """
287 """
239 pass
288 pass
240
289
241
290
242 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
291 HookResponse = collections.namedtuple('HookResponse', ('status', 'output'))
243
292
244
293
245 def git_pre_pull(extras):
294 def git_pre_pull(extras):
246 """
295 """
247 Pre pull hook.
296 Pre pull hook.
248
297
249 :param extras: dictionary containing the keys defined in simplevcs
298 :param extras: dictionary containing the keys defined in simplevcs
250 :type extras: dict
299 :type extras: dict
251
300
252 :return: status code of the hook. 0 for success.
301 :return: status code of the hook. 0 for success.
253 :rtype: int
302 :rtype: int
254 """
303 """
255 if 'pull' not in extras['hooks']:
304 if 'pull' not in extras['hooks']:
256 return HookResponse(0, '')
305 return HookResponse(0, '')
257
306
258 stdout = io.BytesIO()
307 stdout = io.BytesIO()
259 try:
308 try:
260 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
309 status = _call_hook('pre_pull', extras, GitMessageWriter(stdout))
261 except Exception as error:
310 except Exception as error:
262 status = 128
311 status = 128
263 stdout.write('ERROR: %s\n' % str(error))
312 stdout.write('ERROR: %s\n' % str(error))
264
313
265 return HookResponse(status, stdout.getvalue())
314 return HookResponse(status, stdout.getvalue())
266
315
267
316
268 def git_post_pull(extras):
317 def git_post_pull(extras):
269 """
318 """
270 Post pull hook.
319 Post pull hook.
271
320
272 :param extras: dictionary containing the keys defined in simplevcs
321 :param extras: dictionary containing the keys defined in simplevcs
273 :type extras: dict
322 :type extras: dict
274
323
275 :return: status code of the hook. 0 for success.
324 :return: status code of the hook. 0 for success.
276 :rtype: int
325 :rtype: int
277 """
326 """
278 if 'pull' not in extras['hooks']:
327 if 'pull' not in extras['hooks']:
279 return HookResponse(0, '')
328 return HookResponse(0, '')
280
329
281 stdout = io.BytesIO()
330 stdout = io.BytesIO()
282 try:
331 try:
283 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
332 status = _call_hook('post_pull', extras, GitMessageWriter(stdout))
284 except Exception as error:
333 except Exception as error:
285 status = 128
334 status = 128
286 stdout.write('ERROR: %s\n' % error)
335 stdout.write('ERROR: %s\n' % error)
287
336
288 return HookResponse(status, stdout.getvalue())
337 return HookResponse(status, stdout.getvalue())
289
338
290
339
291 def _parse_git_ref_lines(revision_lines):
340 def _parse_git_ref_lines(revision_lines):
292 rev_data = []
341 rev_data = []
293 for revision_line in revision_lines or []:
342 for revision_line in revision_lines or []:
294 old_rev, new_rev, ref = revision_line.strip().split(' ')
343 old_rev, new_rev, ref = revision_line.strip().split(' ')
295 ref_data = ref.split('/', 2)
344 ref_data = ref.split('/', 2)
296 if ref_data[1] in ('tags', 'heads'):
345 if ref_data[1] in ('tags', 'heads'):
297 rev_data.append({
346 rev_data.append({
298 'old_rev': old_rev,
347 'old_rev': old_rev,
299 'new_rev': new_rev,
348 'new_rev': new_rev,
300 'ref': ref,
349 'ref': ref,
301 'type': ref_data[1],
350 'type': ref_data[1],
302 'name': ref_data[2],
351 'name': ref_data[2],
303 })
352 })
304 return rev_data
353 return rev_data
305
354
306
355
307 def git_pre_receive(unused_repo_path, revision_lines, env):
356 def git_pre_receive(unused_repo_path, revision_lines, env):
308 """
357 """
309 Pre push hook.
358 Pre push hook.
310
359
311 :param extras: dictionary containing the keys defined in simplevcs
360 :param extras: dictionary containing the keys defined in simplevcs
312 :type extras: dict
361 :type extras: dict
313
362
314 :return: status code of the hook. 0 for success.
363 :return: status code of the hook. 0 for success.
315 :rtype: int
364 :rtype: int
316 """
365 """
317 extras = json.loads(env['RC_SCM_DATA'])
366 extras = json.loads(env['RC_SCM_DATA'])
318 rev_data = _parse_git_ref_lines(revision_lines)
367 rev_data = _parse_git_ref_lines(revision_lines)
319 if 'push' not in extras['hooks']:
368 if 'push' not in extras['hooks']:
320 return 0
369 return 0
321 extras['commit_ids'] = rev_data
370 extras['commit_ids'] = rev_data
322 return _call_hook('pre_push', extras, GitMessageWriter())
371 return _call_hook('pre_push', extras, GitMessageWriter())
323
372
324
373
325 def _run_command(arguments):
374 def _run_command(arguments):
326 """
375 """
327 Run the specified command and return the stdout.
376 Run the specified command and return the stdout.
328
377
329 :param arguments: sequence of program arguments (including the program name)
378 :param arguments: sequence of program arguments (including the program name)
330 :type arguments: list[str]
379 :type arguments: list[str]
331 """
380 """
332 # TODO(skreft): refactor this method and all the other similar ones.
381 # TODO(skreft): refactor this method and all the other similar ones.
333 # Probably this should be using subprocessio.
382 # Probably this should be using subprocessio.
334 process = subprocess.Popen(
383 process = subprocess.Popen(
335 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
384 arguments, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
336 stdout, stderr = process.communicate()
385 stdout, stderr = process.communicate()
337
386
338 if process.returncode != 0:
387 if process.returncode != 0:
339 raise Exception(
388 raise Exception(
340 'Command %s exited with exit code %s: stderr:%s' % (
389 'Command %s exited with exit code %s: stderr:%s' % (
341 arguments, process.returncode, stderr))
390 arguments, process.returncode, stderr))
342
391
343 return stdout
392 return stdout
344
393
345
394
346 def git_post_receive(unused_repo_path, revision_lines, env):
395 def git_post_receive(unused_repo_path, revision_lines, env):
347 """
396 """
348 Post push hook.
397 Post push hook.
349
398
350 :param extras: dictionary containing the keys defined in simplevcs
399 :param extras: dictionary containing the keys defined in simplevcs
351 :type extras: dict
400 :type extras: dict
352
401
353 :return: status code of the hook. 0 for success.
402 :return: status code of the hook. 0 for success.
354 :rtype: int
403 :rtype: int
355 """
404 """
356 extras = json.loads(env['RC_SCM_DATA'])
405 extras = json.loads(env['RC_SCM_DATA'])
357 if 'push' not in extras['hooks']:
406 if 'push' not in extras['hooks']:
358 return 0
407 return 0
359
408
360 rev_data = _parse_git_ref_lines(revision_lines)
409 rev_data = _parse_git_ref_lines(revision_lines)
361
410
362 git_revs = []
411 git_revs = []
363
412
364 # N.B.(skreft): it is ok to just call git, as git before calling a
413 # N.B.(skreft): it is ok to just call git, as git before calling a
365 # subcommand sets the PATH environment variable so that it point to the
414 # subcommand sets the PATH environment variable so that it point to the
366 # correct version of the git executable.
415 # correct version of the git executable.
367 empty_commit_id = '0' * 40
416 empty_commit_id = '0' * 40
368 branches = []
417 branches = []
369 tags = []
418 tags = []
370 for push_ref in rev_data:
419 for push_ref in rev_data:
371 type_ = push_ref['type']
420 type_ = push_ref['type']
372
421
373 if type_ == 'heads':
422 if type_ == 'heads':
374 if push_ref['old_rev'] == empty_commit_id:
423 if push_ref['old_rev'] == empty_commit_id:
375 # starting new branch case
424 # starting new branch case
376 if push_ref['name'] not in branches:
425 if push_ref['name'] not in branches:
377 branches.append(push_ref['name'])
426 branches.append(push_ref['name'])
378
427
379 # Fix up head revision if needed
428 # Fix up head revision if needed
380 cmd = ['git', 'show', 'HEAD']
429 cmd = ['git', 'show', 'HEAD']
381 try:
430 try:
382 _run_command(cmd)
431 _run_command(cmd)
383 except Exception:
432 except Exception:
384 cmd = ['git', 'symbolic-ref', 'HEAD',
433 cmd = ['git', 'symbolic-ref', 'HEAD',
385 'refs/heads/%s' % push_ref['name']]
434 'refs/heads/%s' % push_ref['name']]
386 print("Setting default branch to %s" % push_ref['name'])
435 print("Setting default branch to %s" % push_ref['name'])
387 _run_command(cmd)
436 _run_command(cmd)
388
437
389 cmd = ['git', 'for-each-ref', '--format=%(refname)',
438 cmd = ['git', 'for-each-ref', '--format=%(refname)',
390 'refs/heads/*']
439 'refs/heads/*']
391 heads = _run_command(cmd)
440 heads = _run_command(cmd)
392 heads = heads.replace(push_ref['ref'], '')
441 heads = heads.replace(push_ref['ref'], '')
393 heads = ' '.join(head for head in heads.splitlines() if head)
442 heads = ' '.join(head for head in heads.splitlines() if head)
394 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
443 cmd = ['git', 'log', '--reverse', '--pretty=format:%H',
395 '--', push_ref['new_rev'], '--not', heads]
444 '--', push_ref['new_rev'], '--not', heads]
396 git_revs.extend(_run_command(cmd).splitlines())
445 git_revs.extend(_run_command(cmd).splitlines())
397 elif push_ref['new_rev'] == empty_commit_id:
446 elif push_ref['new_rev'] == empty_commit_id:
398 # delete branch case
447 # delete branch case
399 git_revs.append('delete_branch=>%s' % push_ref['name'])
448 git_revs.append('delete_branch=>%s' % push_ref['name'])
400 else:
449 else:
401 if push_ref['name'] not in branches:
450 if push_ref['name'] not in branches:
402 branches.append(push_ref['name'])
451 branches.append(push_ref['name'])
403
452
404 cmd = ['git', 'log',
453 cmd = ['git', 'log',
405 '{old_rev}..{new_rev}'.format(**push_ref),
454 '{old_rev}..{new_rev}'.format(**push_ref),
406 '--reverse', '--pretty=format:%H']
455 '--reverse', '--pretty=format:%H']
407 git_revs.extend(_run_command(cmd).splitlines())
456 git_revs.extend(_run_command(cmd).splitlines())
408 elif type_ == 'tags':
457 elif type_ == 'tags':
409 if push_ref['name'] not in tags:
458 if push_ref['name'] not in tags:
410 tags.append(push_ref['name'])
459 tags.append(push_ref['name'])
411 git_revs.append('tag=>%s' % push_ref['name'])
460 git_revs.append('tag=>%s' % push_ref['name'])
412
461
413 extras['commit_ids'] = git_revs
462 extras['commit_ids'] = git_revs
414 extras['new_refs'] = {
463 extras['new_refs'] = {
415 'branches': branches,
464 'branches': branches,
416 'bookmarks': [],
465 'bookmarks': [],
417 'tags': tags,
466 'tags': tags,
418 }
467 }
419
468
420 if 'repo_size' in extras['hooks']:
469 if 'repo_size' in extras['hooks']:
421 try:
470 try:
422 _call_hook('repo_size', extras, GitMessageWriter())
471 _call_hook('repo_size', extras, GitMessageWriter())
423 except:
472 except:
424 pass
473 pass
425
474
426 return _call_hook('post_push', extras, GitMessageWriter())
475 return _call_hook('post_push', extras, GitMessageWriter())
@@ -1,434 +1,466 b''
1 # RhodeCode VCSServer provides access to different vcs backends via network.
1 # RhodeCode VCSServer provides access to different vcs backends via network.
2 # Copyright (C) 2014-2017 RodeCode GmbH
2 # Copyright (C) 2014-2017 RodeCode GmbH
3 #
3 #
4 # This program is free software; you can redistribute it and/or modify
4 # This program is free software; you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation; either version 3 of the License, or
6 # the Free Software Foundation; either version 3 of the License, or
7 # (at your option) any later version.
7 # (at your option) any later version.
8 #
8 #
9 # This program is distributed in the hope that it will be useful,
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
12 # GNU General Public License for more details.
13 #
13 #
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17
18 import base64
18 import base64
19 import locale
19 import locale
20 import logging
20 import logging
21 import uuid
21 import uuid
22 import wsgiref.util
22 import wsgiref.util
23 import traceback
23 import traceback
24 from itertools import chain
24 from itertools import chain
25
25
26 import simplejson as json
26 import msgpack
27 import msgpack
27 from beaker.cache import CacheManager
28 from beaker.cache import CacheManager
28 from beaker.util import parse_cache_config_options
29 from beaker.util import parse_cache_config_options
29 from pyramid.config import Configurator
30 from pyramid.config import Configurator
30 from pyramid.wsgi import wsgiapp
31 from pyramid.wsgi import wsgiapp
31
32
32 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
33 from vcsserver import remote_wsgi, scm_app, settings, hgpatches
33 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
34 from vcsserver.git_lfs.app import GIT_LFS_CONTENT_TYPE, GIT_LFS_PROTO_PAT
34 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
35 from vcsserver.echo_stub import remote_wsgi as remote_wsgi_stub
35 from vcsserver.echo_stub.echo_app import EchoApp
36 from vcsserver.echo_stub.echo_app import EchoApp
36 from vcsserver.exceptions import HTTPRepoLocked
37 from vcsserver.exceptions import HTTPRepoLocked
37 from vcsserver.server import VcsServer
38 from vcsserver.server import VcsServer
38
39
39 try:
40 try:
40 from vcsserver.git import GitFactory, GitRemote
41 from vcsserver.git import GitFactory, GitRemote
41 except ImportError:
42 except ImportError:
42 GitFactory = None
43 GitFactory = None
43 GitRemote = None
44 GitRemote = None
44
45
45 try:
46 try:
46 from vcsserver.hg import MercurialFactory, HgRemote
47 from vcsserver.hg import MercurialFactory, HgRemote
47 except ImportError:
48 except ImportError:
48 MercurialFactory = None
49 MercurialFactory = None
49 HgRemote = None
50 HgRemote = None
50
51
51 try:
52 try:
52 from vcsserver.svn import SubversionFactory, SvnRemote
53 from vcsserver.svn import SubversionFactory, SvnRemote
53 except ImportError:
54 except ImportError:
54 SubversionFactory = None
55 SubversionFactory = None
55 SvnRemote = None
56 SvnRemote = None
56
57
57 log = logging.getLogger(__name__)
58 log = logging.getLogger(__name__)
58
59
59
60
60 class VCS(object):
61 class VCS(object):
61 def __init__(self, locale=None, cache_config=None):
62 def __init__(self, locale=None, cache_config=None):
62 self.locale = locale
63 self.locale = locale
63 self.cache_config = cache_config
64 self.cache_config = cache_config
64 self._configure_locale()
65 self._configure_locale()
65 self._initialize_cache()
66 self._initialize_cache()
66
67
67 if GitFactory and GitRemote:
68 if GitFactory and GitRemote:
68 git_repo_cache = self.cache.get_cache_region(
69 git_repo_cache = self.cache.get_cache_region(
69 'git', region='repo_object')
70 'git', region='repo_object')
70 git_factory = GitFactory(git_repo_cache)
71 git_factory = GitFactory(git_repo_cache)
71 self._git_remote = GitRemote(git_factory)
72 self._git_remote = GitRemote(git_factory)
72 else:
73 else:
73 log.info("Git client import failed")
74 log.info("Git client import failed")
74
75
75 if MercurialFactory and HgRemote:
76 if MercurialFactory and HgRemote:
76 hg_repo_cache = self.cache.get_cache_region(
77 hg_repo_cache = self.cache.get_cache_region(
77 'hg', region='repo_object')
78 'hg', region='repo_object')
78 hg_factory = MercurialFactory(hg_repo_cache)
79 hg_factory = MercurialFactory(hg_repo_cache)
79 self._hg_remote = HgRemote(hg_factory)
80 self._hg_remote = HgRemote(hg_factory)
80 else:
81 else:
81 log.info("Mercurial client import failed")
82 log.info("Mercurial client import failed")
82
83
83 if SubversionFactory and SvnRemote:
84 if SubversionFactory and SvnRemote:
84 svn_repo_cache = self.cache.get_cache_region(
85 svn_repo_cache = self.cache.get_cache_region(
85 'svn', region='repo_object')
86 'svn', region='repo_object')
86 svn_factory = SubversionFactory(svn_repo_cache)
87 svn_factory = SubversionFactory(svn_repo_cache)
87 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
88 self._svn_remote = SvnRemote(svn_factory, hg_factory=hg_factory)
88 else:
89 else:
89 log.info("Subversion client import failed")
90 log.info("Subversion client import failed")
90
91
91 self._vcsserver = VcsServer()
92 self._vcsserver = VcsServer()
92
93
93 def _initialize_cache(self):
94 def _initialize_cache(self):
94 cache_config = parse_cache_config_options(self.cache_config)
95 cache_config = parse_cache_config_options(self.cache_config)
95 log.info('Initializing beaker cache: %s' % cache_config)
96 log.info('Initializing beaker cache: %s' % cache_config)
96 self.cache = CacheManager(**cache_config)
97 self.cache = CacheManager(**cache_config)
97
98
98 def _configure_locale(self):
99 def _configure_locale(self):
99 if self.locale:
100 if self.locale:
100 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
101 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
101 else:
102 else:
102 log.info(
103 log.info(
103 'Configuring locale subsystem based on environment variables')
104 'Configuring locale subsystem based on environment variables')
104 try:
105 try:
105 # If self.locale is the empty string, then the locale
106 # If self.locale is the empty string, then the locale
106 # module will use the environment variables. See the
107 # module will use the environment variables. See the
107 # documentation of the package `locale`.
108 # documentation of the package `locale`.
108 locale.setlocale(locale.LC_ALL, self.locale)
109 locale.setlocale(locale.LC_ALL, self.locale)
109
110
110 language_code, encoding = locale.getlocale()
111 language_code, encoding = locale.getlocale()
111 log.info(
112 log.info(
112 'Locale set to language code "%s" with encoding "%s".',
113 'Locale set to language code "%s" with encoding "%s".',
113 language_code, encoding)
114 language_code, encoding)
114 except locale.Error:
115 except locale.Error:
115 log.exception(
116 log.exception(
116 'Cannot set locale, not configuring the locale system')
117 'Cannot set locale, not configuring the locale system')
117
118
118
119
119 class WsgiProxy(object):
120 class WsgiProxy(object):
120 def __init__(self, wsgi):
121 def __init__(self, wsgi):
121 self.wsgi = wsgi
122 self.wsgi = wsgi
122
123
123 def __call__(self, environ, start_response):
124 def __call__(self, environ, start_response):
124 input_data = environ['wsgi.input'].read()
125 input_data = environ['wsgi.input'].read()
125 input_data = msgpack.unpackb(input_data)
126 input_data = msgpack.unpackb(input_data)
126
127
127 error = None
128 error = None
128 try:
129 try:
129 data, status, headers = self.wsgi.handle(
130 data, status, headers = self.wsgi.handle(
130 input_data['environment'], input_data['input_data'],
131 input_data['environment'], input_data['input_data'],
131 *input_data['args'], **input_data['kwargs'])
132 *input_data['args'], **input_data['kwargs'])
132 except Exception as e:
133 except Exception as e:
133 data, status, headers = [], None, None
134 data, status, headers = [], None, None
134 error = {
135 error = {
135 'message': str(e),
136 'message': str(e),
136 '_vcs_kind': getattr(e, '_vcs_kind', None)
137 '_vcs_kind': getattr(e, '_vcs_kind', None)
137 }
138 }
138
139
139 start_response(200, {})
140 start_response(200, {})
140 return self._iterator(error, status, headers, data)
141 return self._iterator(error, status, headers, data)
141
142
142 def _iterator(self, error, status, headers, data):
143 def _iterator(self, error, status, headers, data):
143 initial_data = [
144 initial_data = [
144 error,
145 error,
145 status,
146 status,
146 headers,
147 headers,
147 ]
148 ]
148
149
149 for d in chain(initial_data, data):
150 for d in chain(initial_data, data):
150 yield msgpack.packb(d)
151 yield msgpack.packb(d)
151
152
152
153
153 class HTTPApplication(object):
154 class HTTPApplication(object):
154 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
155 ALLOWED_EXCEPTIONS = ('KeyError', 'URLError')
155
156
156 remote_wsgi = remote_wsgi
157 remote_wsgi = remote_wsgi
157 _use_echo_app = False
158 _use_echo_app = False
158
159
159 def __init__(self, settings=None, global_config=None):
160 def __init__(self, settings=None, global_config=None):
160 self.config = Configurator(settings=settings)
161 self.config = Configurator(settings=settings)
161 self.global_config = global_config
162 self.global_config = global_config
162
163
163 locale = settings.get('locale', '') or 'en_US.UTF-8'
164 locale = settings.get('locale', '') or 'en_US.UTF-8'
164 vcs = VCS(locale=locale, cache_config=settings)
165 vcs = VCS(locale=locale, cache_config=settings)
165 self._remotes = {
166 self._remotes = {
166 'hg': vcs._hg_remote,
167 'hg': vcs._hg_remote,
167 'git': vcs._git_remote,
168 'git': vcs._git_remote,
168 'svn': vcs._svn_remote,
169 'svn': vcs._svn_remote,
169 'server': vcs._vcsserver,
170 'server': vcs._vcsserver,
170 }
171 }
171 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
172 if settings.get('dev.use_echo_app', 'false').lower() == 'true':
172 self._use_echo_app = True
173 self._use_echo_app = True
173 log.warning("Using EchoApp for VCS operations.")
174 log.warning("Using EchoApp for VCS operations.")
174 self.remote_wsgi = remote_wsgi_stub
175 self.remote_wsgi = remote_wsgi_stub
175 self._configure_settings(settings)
176 self._configure_settings(settings)
176 self._configure()
177 self._configure()
177
178
178 def _configure_settings(self, app_settings):
179 def _configure_settings(self, app_settings):
179 """
180 """
180 Configure the settings module.
181 Configure the settings module.
181 """
182 """
182 git_path = app_settings.get('git_path', None)
183 git_path = app_settings.get('git_path', None)
183 if git_path:
184 if git_path:
184 settings.GIT_EXECUTABLE = git_path
185 settings.GIT_EXECUTABLE = git_path
185
186
186 def _configure(self):
187 def _configure(self):
187 self.config.add_renderer(
188 self.config.add_renderer(
188 name='msgpack',
189 name='msgpack',
189 factory=self._msgpack_renderer_factory)
190 factory=self._msgpack_renderer_factory)
190
191
191 self.config.add_route('service', '/_service')
192 self.config.add_route('service', '/_service')
192 self.config.add_route('status', '/status')
193 self.config.add_route('status', '/status')
193 self.config.add_route('hg_proxy', '/proxy/hg')
194 self.config.add_route('hg_proxy', '/proxy/hg')
194 self.config.add_route('git_proxy', '/proxy/git')
195 self.config.add_route('git_proxy', '/proxy/git')
195 self.config.add_route('vcs', '/{backend}')
196 self.config.add_route('vcs', '/{backend}')
196 self.config.add_route('stream_git', '/stream/git/*repo_name')
197 self.config.add_route('stream_git', '/stream/git/*repo_name')
197 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
198 self.config.add_route('stream_hg', '/stream/hg/*repo_name')
198
199
199 self.config.add_view(
200 self.config.add_view(
200 self.status_view, route_name='status', renderer='json')
201 self.status_view, route_name='status', renderer='json')
201 self.config.add_view(
202 self.config.add_view(
202 self.service_view, route_name='service', renderer='msgpack')
203 self.service_view, route_name='service', renderer='msgpack')
203
204
204 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
205 self.config.add_view(self.hg_proxy(), route_name='hg_proxy')
205 self.config.add_view(self.git_proxy(), route_name='git_proxy')
206 self.config.add_view(self.git_proxy(), route_name='git_proxy')
206 self.config.add_view(
207 self.config.add_view(
207 self.vcs_view, route_name='vcs', renderer='msgpack',
208 self.vcs_view, route_name='vcs', renderer='msgpack',
208 custom_predicates=[self.is_vcs_view])
209 custom_predicates=[self.is_vcs_view])
209
210
210 self.config.add_view(self.hg_stream(), route_name='stream_hg')
211 self.config.add_view(self.hg_stream(), route_name='stream_hg')
211 self.config.add_view(self.git_stream(), route_name='stream_git')
212 self.config.add_view(self.git_stream(), route_name='stream_git')
212
213
213 def notfound(request):
214 def notfound(request):
214 return {'status': '404 NOT FOUND'}
215 return {'status': '404 NOT FOUND'}
215 self.config.add_notfound_view(notfound, renderer='json')
216 self.config.add_notfound_view(notfound, renderer='json')
216
217
217 self.config.add_view(self.handle_vcs_exception, context=Exception)
218 self.config.add_view(self.handle_vcs_exception, context=Exception)
218
219
219 self.config.add_tween(
220 self.config.add_tween(
220 'vcsserver.tweens.RequestWrapperTween',
221 'vcsserver.tweens.RequestWrapperTween',
221 )
222 )
222
223
223 def wsgi_app(self):
224 def wsgi_app(self):
224 return self.config.make_wsgi_app()
225 return self.config.make_wsgi_app()
225
226
226 def vcs_view(self, request):
227 def vcs_view(self, request):
227 remote = self._remotes[request.matchdict['backend']]
228 remote = self._remotes[request.matchdict['backend']]
228 payload = msgpack.unpackb(request.body, use_list=True)
229 payload = msgpack.unpackb(request.body, use_list=True)
229 method = payload.get('method')
230 method = payload.get('method')
230 params = payload.get('params')
231 params = payload.get('params')
231 wire = params.get('wire')
232 wire = params.get('wire')
232 args = params.get('args')
233 args = params.get('args')
233 kwargs = params.get('kwargs')
234 kwargs = params.get('kwargs')
234 if wire:
235 if wire:
235 try:
236 try:
236 wire['context'] = uuid.UUID(wire['context'])
237 wire['context'] = uuid.UUID(wire['context'])
237 except KeyError:
238 except KeyError:
238 pass
239 pass
239 args.insert(0, wire)
240 args.insert(0, wire)
240
241
241 log.debug('method called:%s with kwargs:%s', method, kwargs)
242 log.debug('method called:%s with kwargs:%s', method, kwargs)
242 try:
243 try:
243 resp = getattr(remote, method)(*args, **kwargs)
244 resp = getattr(remote, method)(*args, **kwargs)
244 except Exception as e:
245 except Exception as e:
245 tb_info = traceback.format_exc()
246 tb_info = traceback.format_exc()
246
247
247 type_ = e.__class__.__name__
248 type_ = e.__class__.__name__
248 if type_ not in self.ALLOWED_EXCEPTIONS:
249 if type_ not in self.ALLOWED_EXCEPTIONS:
249 type_ = None
250 type_ = None
250
251
251 resp = {
252 resp = {
252 'id': payload.get('id'),
253 'id': payload.get('id'),
253 'error': {
254 'error': {
254 'message': e.message,
255 'message': e.message,
255 'traceback': tb_info,
256 'traceback': tb_info,
256 'type': type_
257 'type': type_
257 }
258 }
258 }
259 }
259 try:
260 try:
260 resp['error']['_vcs_kind'] = e._vcs_kind
261 resp['error']['_vcs_kind'] = e._vcs_kind
261 except AttributeError:
262 except AttributeError:
262 pass
263 pass
263 else:
264 else:
264 resp = {
265 resp = {
265 'id': payload.get('id'),
266 'id': payload.get('id'),
266 'result': resp
267 'result': resp
267 }
268 }
268
269
269 return resp
270 return resp
270
271
271 def status_view(self, request):
272 def status_view(self, request):
272 return {'status': 'OK'}
273 import vcsserver
274 return {'status': 'OK', 'vcsserver_version': vcsserver.__version__}
273
275
274 def service_view(self, request):
276 def service_view(self, request):
275 import vcsserver
277 import vcsserver
276 import ConfigParser as configparser
278 import ConfigParser as configparser
277
279
278 payload = msgpack.unpackb(request.body, use_list=True)
280 payload = msgpack.unpackb(request.body, use_list=True)
279
281
280 try:
282 try:
281 path = self.global_config['__file__']
283 path = self.global_config['__file__']
282 config = configparser.ConfigParser()
284 config = configparser.ConfigParser()
283 config.read(path)
285 config.read(path)
284 parsed_ini = config
286 parsed_ini = config
285 if parsed_ini.has_section('server:main'):
287 if parsed_ini.has_section('server:main'):
286 parsed_ini = dict(parsed_ini.items('server:main'))
288 parsed_ini = dict(parsed_ini.items('server:main'))
287 except Exception:
289 except Exception:
288 log.exception('Failed to read .ini file for display')
290 log.exception('Failed to read .ini file for display')
289 parsed_ini = {}
291 parsed_ini = {}
290
292
291 resp = {
293 resp = {
292 'id': payload.get('id'),
294 'id': payload.get('id'),
293 'result': dict(
295 'result': dict(
294 version=vcsserver.__version__,
296 version=vcsserver.__version__,
295 config=parsed_ini,
297 config=parsed_ini,
296 payload=payload,
298 payload=payload,
297 )
299 )
298 }
300 }
299 return resp
301 return resp
300
302
301 def _msgpack_renderer_factory(self, info):
303 def _msgpack_renderer_factory(self, info):
302 def _render(value, system):
304 def _render(value, system):
303 value = msgpack.packb(value)
305 value = msgpack.packb(value)
304 request = system.get('request')
306 request = system.get('request')
305 if request is not None:
307 if request is not None:
306 response = request.response
308 response = request.response
307 ct = response.content_type
309 ct = response.content_type
308 if ct == response.default_content_type:
310 if ct == response.default_content_type:
309 response.content_type = 'application/x-msgpack'
311 response.content_type = 'application/x-msgpack'
310 return value
312 return value
311 return _render
313 return _render
312
314
315 def set_env_from_config(self, environ, config):
316 dict_conf = {}
317 try:
318 for elem in config:
319 if elem[0] == 'rhodecode':
320 dict_conf = json.loads(elem[2])
321 break
322 except Exception:
323 log.exception('Failed to fetch SCM CONFIG')
324 return
325
326 username = dict_conf.get('username')
327 if username:
328 environ['REMOTE_USER'] = username
329
330 ip = dict_conf.get('ip')
331 if ip:
332 environ['REMOTE_HOST'] = ip
333
313 def hg_proxy(self):
334 def hg_proxy(self):
314 @wsgiapp
335 @wsgiapp
315 def _hg_proxy(environ, start_response):
336 def _hg_proxy(environ, start_response):
316 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
337 app = WsgiProxy(self.remote_wsgi.HgRemoteWsgi())
317 return app(environ, start_response)
338 return app(environ, start_response)
318 return _hg_proxy
339 return _hg_proxy
319
340
320 def git_proxy(self):
341 def git_proxy(self):
321 @wsgiapp
342 @wsgiapp
322 def _git_proxy(environ, start_response):
343 def _git_proxy(environ, start_response):
323 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
344 app = WsgiProxy(self.remote_wsgi.GitRemoteWsgi())
324 return app(environ, start_response)
345 return app(environ, start_response)
325 return _git_proxy
346 return _git_proxy
326
347
327 def hg_stream(self):
348 def hg_stream(self):
328 if self._use_echo_app:
349 if self._use_echo_app:
329 @wsgiapp
350 @wsgiapp
330 def _hg_stream(environ, start_response):
351 def _hg_stream(environ, start_response):
331 app = EchoApp('fake_path', 'fake_name', None)
352 app = EchoApp('fake_path', 'fake_name', None)
332 return app(environ, start_response)
353 return app(environ, start_response)
333 return _hg_stream
354 return _hg_stream
334 else:
355 else:
335 @wsgiapp
356 @wsgiapp
336 def _hg_stream(environ, start_response):
357 def _hg_stream(environ, start_response):
358 log.debug('http-app: handling hg stream')
337 repo_path = environ['HTTP_X_RC_REPO_PATH']
359 repo_path = environ['HTTP_X_RC_REPO_PATH']
338 repo_name = environ['HTTP_X_RC_REPO_NAME']
360 repo_name = environ['HTTP_X_RC_REPO_NAME']
339 packed_config = base64.b64decode(
361 packed_config = base64.b64decode(
340 environ['HTTP_X_RC_REPO_CONFIG'])
362 environ['HTTP_X_RC_REPO_CONFIG'])
341 config = msgpack.unpackb(packed_config)
363 config = msgpack.unpackb(packed_config)
342 app = scm_app.create_hg_wsgi_app(
364 app = scm_app.create_hg_wsgi_app(
343 repo_path, repo_name, config)
365 repo_path, repo_name, config)
344
366
345 # Consitent path information for hgweb
367 # Consistent path information for hgweb
346 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
368 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
347 environ['REPO_NAME'] = repo_name
369 environ['REPO_NAME'] = repo_name
370 self.set_env_from_config(environ, config)
371
372 log.debug('http-app: starting app handler '
373 'with %s and process request', app)
348 return app(environ, ResponseFilter(start_response))
374 return app(environ, ResponseFilter(start_response))
349 return _hg_stream
375 return _hg_stream
350
376
351 def git_stream(self):
377 def git_stream(self):
352 if self._use_echo_app:
378 if self._use_echo_app:
353 @wsgiapp
379 @wsgiapp
354 def _git_stream(environ, start_response):
380 def _git_stream(environ, start_response):
355 app = EchoApp('fake_path', 'fake_name', None)
381 app = EchoApp('fake_path', 'fake_name', None)
356 return app(environ, start_response)
382 return app(environ, start_response)
357 return _git_stream
383 return _git_stream
358 else:
384 else:
359 @wsgiapp
385 @wsgiapp
360 def _git_stream(environ, start_response):
386 def _git_stream(environ, start_response):
387 log.debug('http-app: handling git stream')
361 repo_path = environ['HTTP_X_RC_REPO_PATH']
388 repo_path = environ['HTTP_X_RC_REPO_PATH']
362 repo_name = environ['HTTP_X_RC_REPO_NAME']
389 repo_name = environ['HTTP_X_RC_REPO_NAME']
363 packed_config = base64.b64decode(
390 packed_config = base64.b64decode(
364 environ['HTTP_X_RC_REPO_CONFIG'])
391 environ['HTTP_X_RC_REPO_CONFIG'])
365 config = msgpack.unpackb(packed_config)
392 config = msgpack.unpackb(packed_config)
366
393
367 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
394 environ['PATH_INFO'] = environ['HTTP_X_RC_PATH_INFO']
395 self.set_env_from_config(environ, config)
396
368 content_type = environ.get('CONTENT_TYPE', '')
397 content_type = environ.get('CONTENT_TYPE', '')
369
398
370 path = environ['PATH_INFO']
399 path = environ['PATH_INFO']
371 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
400 is_lfs_request = GIT_LFS_CONTENT_TYPE in content_type
372 log.debug(
401 log.debug(
373 'LFS: Detecting if request `%s` is LFS server path based '
402 'LFS: Detecting if request `%s` is LFS server path based '
374 'on content type:`%s`, is_lfs:%s',
403 'on content type:`%s`, is_lfs:%s',
375 path, content_type, is_lfs_request)
404 path, content_type, is_lfs_request)
376
405
377 if not is_lfs_request:
406 if not is_lfs_request:
378 # fallback detection by path
407 # fallback detection by path
379 if GIT_LFS_PROTO_PAT.match(path):
408 if GIT_LFS_PROTO_PAT.match(path):
380 is_lfs_request = True
409 is_lfs_request = True
381 log.debug(
410 log.debug(
382 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
411 'LFS: fallback detection by path of: `%s`, is_lfs:%s',
383 path, is_lfs_request)
412 path, is_lfs_request)
384
413
385 if is_lfs_request:
414 if is_lfs_request:
386 app = scm_app.create_git_lfs_wsgi_app(
415 app = scm_app.create_git_lfs_wsgi_app(
387 repo_path, repo_name, config)
416 repo_path, repo_name, config)
388 else:
417 else:
389 app = scm_app.create_git_wsgi_app(
418 app = scm_app.create_git_wsgi_app(
390 repo_path, repo_name, config)
419 repo_path, repo_name, config)
420
421 log.debug('http-app: starting app handler '
422 'with %s and process request', app)
391 return app(environ, start_response)
423 return app(environ, start_response)
392
424
393 return _git_stream
425 return _git_stream
394
426
395 def is_vcs_view(self, context, request):
427 def is_vcs_view(self, context, request):
396 """
428 """
397 View predicate that returns true if given backend is supported by
429 View predicate that returns true if given backend is supported by
398 defined remotes.
430 defined remotes.
399 """
431 """
400 backend = request.matchdict.get('backend')
432 backend = request.matchdict.get('backend')
401 return backend in self._remotes
433 return backend in self._remotes
402
434
403 def handle_vcs_exception(self, exception, request):
435 def handle_vcs_exception(self, exception, request):
404 _vcs_kind = getattr(exception, '_vcs_kind', '')
436 _vcs_kind = getattr(exception, '_vcs_kind', '')
405 if _vcs_kind == 'repo_locked':
437 if _vcs_kind == 'repo_locked':
406 # Get custom repo-locked status code if present.
438 # Get custom repo-locked status code if present.
407 status_code = request.headers.get('X-RC-Locked-Status-Code')
439 status_code = request.headers.get('X-RC-Locked-Status-Code')
408 return HTTPRepoLocked(
440 return HTTPRepoLocked(
409 title=exception.message, status_code=status_code)
441 title=exception.message, status_code=status_code)
410
442
411 # Re-raise exception if we can not handle it.
443 # Re-raise exception if we can not handle it.
412 log.exception(
444 log.exception(
413 'error occurred handling this request for path: %s', request.path)
445 'error occurred handling this request for path: %s', request.path)
414 raise exception
446 raise exception
415
447
416
448
417 class ResponseFilter(object):
449 class ResponseFilter(object):
418
450
419 def __init__(self, start_response):
451 def __init__(self, start_response):
420 self._start_response = start_response
452 self._start_response = start_response
421
453
422 def __call__(self, status, response_headers, exc_info=None):
454 def __call__(self, status, response_headers, exc_info=None):
423 headers = tuple(
455 headers = tuple(
424 (h, v) for h, v in response_headers
456 (h, v) for h, v in response_headers
425 if not wsgiref.util.is_hop_by_hop(h))
457 if not wsgiref.util.is_hop_by_hop(h))
426 return self._start_response(status, headers, exc_info)
458 return self._start_response(status, headers, exc_info)
427
459
428
460
429 def main(global_config, **settings):
461 def main(global_config, **settings):
430 if MercurialFactory:
462 if MercurialFactory:
431 hgpatches.patch_largefiles_capabilities()
463 hgpatches.patch_largefiles_capabilities()
432 hgpatches.patch_subrepo_type_mapping()
464 hgpatches.patch_subrepo_type_mapping()
433 app = HTTPApplication(settings=settings, global_config=global_config)
465 app = HTTPApplication(settings=settings, global_config=global_config)
434 return app.wsgi_app()
466 return app.wsgi_app()
General Comments 0
You need to be logged in to leave comments. Login now