##// END OF EJS Templates
release: Merge default into stable for release preparation
marcink -
r572:fd48aa4e merge stable
parent child Browse files
Show More
@@ -1,5 +1,5 b''
1 [bumpversion]
1 [bumpversion]
2 current_version = 4.13.3
2 current_version = 4.14.0
3 message = release: Bump version {current_version} to {new_version}
3 message = release: Bump version {current_version} to {new_version}
4
4
5 [bumpversion:file:vcsserver/VERSION]
5 [bumpversion:file:vcsserver/VERSION]
@@ -5,12 +5,10 b' done = false'
5 done = true
5 done = true
6
6
7 [task:fixes_on_stable]
7 [task:fixes_on_stable]
8 done = true
9
8
10 [task:pip2nix_generated]
9 [task:pip2nix_generated]
11 done = true
12
10
13 [release]
11 [release]
14 state = prepared
12 state = in_progress
15 version = 4.13.3
13 version = 4.14.0
16
14
@@ -1,6 +1,5 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
3 ################################################################################
5
4
6
5
@@ -26,12 +25,13 b' locale = en_US.UTF-8'
26 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
25 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
27 core.binary_dir = ""
26 core.binary_dir = ""
28
27
29 ## custom exception store path, defaults to TMPDIR
28 ## Custom exception store path, defaults to TMPDIR
30 exception_tracker.store_path =
29 ## This is used to store exception from RhodeCode in shared directory
30 #exception_tracker.store_path =
31
31
32 ## Default cache dir for caches. Putting this into a ramdisk
32 ## Default cache dir for caches. Putting this into a ramdisk
33 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
33 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
34 ## large ammount of space
34 ## large amount of space
35 cache_dir = %(here)s/rcdev/data
35 cache_dir = %(here)s/rcdev/data
36
36
37 ## cache region for storing repo_objects cache
37 ## cache region for storing repo_objects cache
@@ -1,6 +1,5 b''
1 ################################################################################
1 ################################################################################
2 # RhodeCode VCSServer with HTTP Backend - configuration #
2 # RhodeCode VCSServer with HTTP Backend - configuration #
3 # #
4 ################################################################################
3 ################################################################################
5
4
6
5
@@ -47,12 +46,13 b' locale = en_US.UTF-8'
47 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
46 ## at installation time, e.g /home/user/vcsserver-1/profile/bin
48 core.binary_dir = ""
47 core.binary_dir = ""
49
48
50 ## custom exception store path, defaults to TMPDIR
49 ## Custom exception store path, defaults to TMPDIR
51 exception_tracker.store_path =
50 ## This is used to store exception from RhodeCode in shared directory
51 #exception_tracker.store_path =
52
52
53 ## Default cache dir for caches. Putting this into a ramdisk
53 ## Default cache dir for caches. Putting this into a ramdisk
54 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
54 ## can boost performance, eg. /tmpfs/data_ramdisk, however this directory might require
55 ## large ammount of space
55 ## large amount of space
56 cache_dir = %(here)s/rcdev/data
56 cache_dir = %(here)s/rcdev/data
57
57
58 ## cache region for storing repo_objects cache
58 ## cache region for storing repo_objects cache
@@ -11,17 +11,17 b' args@'
11 , ...
11 , ...
12 }:
12 }:
13
13
14 let pkgs_ = (import <nixpkgs> {}); in
14 let
15 pkgs_ = (import <nixpkgs> {});
16 in
15
17
16 let
18 let
17
18 # TODO: Currently we ignore the passed in pkgs, instead we should use it
19 # somehow as a base and apply overlays to it.
20 pkgs = import <nixpkgs> {
19 pkgs = import <nixpkgs> {
21 overlays = [
20 overlays = [
22 (import ./pkgs/overlays.nix)
21 (import ./pkgs/overlays.nix)
23 ];
22 ];
24 inherit (pkgs_)
23 inherit
24 (pkgs_)
25 system;
25 system;
26 };
26 };
27
27
@@ -40,7 +40,7 b' let'
40 in
40 in
41 !builtins.elem (basename path) [
41 !builtins.elem (basename path) [
42 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
42 ".git" ".hg" "__pycache__" ".eggs" ".idea" ".dev"
43 "bower_components" "node_modules"
43 "node_modules" "node_binaries"
44 "build" "data" "result" "tmp"] &&
44 "build" "data" "result" "tmp"] &&
45 !builtins.elem ext ["egg-info" "pyc"] &&
45 !builtins.elem ext ["egg-info" "pyc"] &&
46 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
46 # TODO: johbo: This check is wrong, since "path" contains an absolute path,
@@ -49,7 +49,11 b' let'
49
49
50 sources =
50 sources =
51 let
51 let
52 inherit (pkgs.lib) all isString attrValues;
52 inherit
53 (pkgs.lib)
54 all
55 isString
56 attrValues;
53 sourcesConfig = pkgs.config.rc.sources or {};
57 sourcesConfig = pkgs.config.rc.sources or {};
54 in
58 in
55 # Ensure that sources are configured as strings. Using a path
59 # Ensure that sources are configured as strings. Using a path
@@ -121,27 +125,34 b' let'
121
125
122 # python based programs need to be wrapped
126 # python based programs need to be wrapped
123 mkdir -p $out/bin
127 mkdir -p $out/bin
124 ln -s ${self.python}/bin/python $out/bin
128 ln -s ${self.python}/bin/python $out/bin/
125 ln -s ${self.pyramid}/bin/* $out/bin/
126 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
129 ln -s ${self.gunicorn}/bin/gunicorn $out/bin/
130 ln -s ${self.pyramid}/bin/prequest $out/bin/
131 ln -s ${self.pyramid}/bin/pserve $out/bin/
127
132
128 # Symlink version control utilities
133 # Symlink version control utilities
129 # We ensure that always the correct version is available as a symlink.
134 # We ensure that always the correct version is available as a symlink.
130 # So that users calling them via the profile path will always use the
135 # So that users calling them via the profile path will always use the
131 # correct version.
136 # correct version. Wrapping is required so those can "import"
137 # vcsserver python hooks.
132
138
133 ln -s ${pkgs.git}/bin/git $out/bin
139 ln -s ${pkgs.git}/bin/git $out/bin
134 ln -s ${self.mercurial}/bin/hg $out/bin
140 ln -s ${self.mercurial}/bin/hg $out/bin
135 ln -s ${pkgs.subversion}/bin/svn* $out/bin
141 ln -s ${pkgs.subversion}/bin/svn* $out/bin
142
136 echo "DONE: created symlinks into $out/bin"
143 echo "DONE: created symlinks into $out/bin"
144 DEPS="$out/bin/*"
137
145
138 for file in $out/bin/*;
146 # wrap only dependency scripts, they require to have full PYTHONPATH set
147 # to be able to import all packages
148 for file in $DEPS;
139 do
149 do
140 wrapProgram $file \
150 wrapProgram $file \
141 --prefix PATH : $PATH \
151 --prefix PATH : $PATH \
142 --prefix PYTHONPATH : $PYTHONPATH \
152 --prefix PYTHONPATH : $PYTHONPATH \
143 --set PYTHONHASHSEED random
153 --set PYTHONHASHSEED random
144 done
154 done
155
145 echo "DONE: vcsserver binary wrapping"
156 echo "DONE: vcsserver binary wrapping"
146
157
147 '';
158 '';
@@ -156,12 +167,19 b' let'
156 getAttr pythonPackages pkgs;
167 getAttr pythonPackages pkgs;
157
168
158 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
169 pythonGeneratedPackages = import ./pkgs/python-packages.nix {
159 inherit pkgs;
170 inherit
160 inherit (pkgs) fetchurl fetchgit fetchhg;
171 pkgs;
172 inherit
173 (pkgs)
174 fetchurl
175 fetchgit
176 fetchhg;
161 };
177 };
162
178
163 pythonVCSServerOverrides = import ./pkgs/python-packages-overrides.nix {
179 pythonVCSServerOverrides = import ./pkgs/python-packages-overrides.nix {
164 inherit pkgs basePythonPackages;
180 inherit
181 pkgs
182 basePythonPackages;
165 };
183 };
166
184
167
185
@@ -8,10 +8,10 b' self: super: {'
8 };
8 };
9
9
10 patches = [
10 patches = [
11 ./git_patches/docbook2texi.patch
11 ./patches/git/docbook2texi.patch
12 ./git_patches/symlinks-in-bin.patch
12 ./patches/git/symlinks-in-bin.patch
13 ./git_patches/git-sh-i18n.patch
13 ./patches/git/git-sh-i18n.patch
14 ./git_patches/ssh-path.patch
14 ./patches/git/ssh-path.patch
15 ];
15 ];
16
16
17 });
17 });
1 NO CONTENT: file renamed from pkgs/git_patches/docbook2texi.patch to pkgs/patches/git/docbook2texi.patch
NO CONTENT: file renamed from pkgs/git_patches/docbook2texi.patch to pkgs/patches/git/docbook2texi.patch
1 NO CONTENT: file renamed from pkgs/git_patches/git-sh-i18n.patch to pkgs/patches/git/git-sh-i18n.patch
NO CONTENT: file renamed from pkgs/git_patches/git-sh-i18n.patch to pkgs/patches/git/git-sh-i18n.patch
1 NO CONTENT: file renamed from pkgs/git_patches/ssh-path.patch to pkgs/patches/git/ssh-path.patch
NO CONTENT: file renamed from pkgs/git_patches/ssh-path.patch to pkgs/patches/git/ssh-path.patch
@@ -5,22 +5,22 b''
5
5
6 self: super: {
6 self: super: {
7 "atomicwrites" = super.buildPythonPackage {
7 "atomicwrites" = super.buildPythonPackage {
8 name = "atomicwrites-1.1.5";
8 name = "atomicwrites-1.2.1";
9 doCheck = false;
9 doCheck = false;
10 src = fetchurl {
10 src = fetchurl {
11 url = "https://files.pythonhosted.org/packages/a1/e1/2d9bc76838e6e6667fde5814aa25d7feb93d6fa471bf6816daac2596e8b2/atomicwrites-1.1.5.tar.gz";
11 url = "https://files.pythonhosted.org/packages/ac/ed/a311712ef6b4355035489f665e63e1a73f9eb371929e3c98e5efd451069e/atomicwrites-1.2.1.tar.gz";
12 sha256 = "11bm90fwm2avvf4f3ib8g925w7jr4m11vcsinn1bi6ns4bm32214";
12 sha256 = "1vmkbw9j0qammwxbxycrs39gvdg4lc2d4lk98kwf8ag2manyi6pc";
13 };
13 };
14 meta = {
14 meta = {
15 license = [ pkgs.lib.licenses.mit ];
15 license = [ pkgs.lib.licenses.mit ];
16 };
16 };
17 };
17 };
18 "attrs" = super.buildPythonPackage {
18 "attrs" = super.buildPythonPackage {
19 name = "attrs-18.1.0";
19 name = "attrs-18.2.0";
20 doCheck = false;
20 doCheck = false;
21 src = fetchurl {
21 src = fetchurl {
22 url = "https://files.pythonhosted.org/packages/e4/ac/a04671e118b57bee87dabca1e0f2d3bda816b7a551036012d0ca24190e71/attrs-18.1.0.tar.gz";
22 url = "https://files.pythonhosted.org/packages/0f/9e/26b1d194aab960063b266170e53c39f73ea0d0d3f5ce23313e0ec8ee9bdf/attrs-18.2.0.tar.gz";
23 sha256 = "0yzqz8wv3w1srav5683a55v49i0szkm47dyrnkd56fqs8j8ypl70";
23 sha256 = "0s9ydh058wmmf5v391pym877x4ahxg45dw6a0w4c7s5wgpigdjqh";
24 };
24 };
25 meta = {
25 meta = {
26 license = [ pkgs.lib.licenses.mit ];
26 license = [ pkgs.lib.licenses.mit ];
@@ -77,14 +77,14 b' self: super: {'
77 };
77 };
78 };
78 };
79 "coverage" = super.buildPythonPackage {
79 "coverage" = super.buildPythonPackage {
80 name = "coverage-3.7.1";
80 name = "coverage-4.5.1";
81 doCheck = false;
81 doCheck = false;
82 src = fetchurl {
82 src = fetchurl {
83 url = "https://files.pythonhosted.org/packages/09/4f/89b06c7fdc09687bca507dc411c342556ef9c5a3b26756137a4878ff19bf/coverage-3.7.1.tar.gz";
83 url = "https://files.pythonhosted.org/packages/35/fe/e7df7289d717426093c68d156e0fd9117c8f4872b6588e8a8928a0f68424/coverage-4.5.1.tar.gz";
84 sha256 = "0knlbq79g2ww6xzsyknj9rirrgrgc983dpa2d9nkdf31mb2a3bni";
84 sha256 = "1wbrzpxka3xd4nmmkc6q0ir343d91kymwsm8pbmwa0d2a7q4ir2n";
85 };
85 };
86 meta = {
86 meta = {
87 license = [ pkgs.lib.licenses.bsdOriginal ];
87 license = [ pkgs.lib.licenses.asl20 ];
88 };
88 };
89 };
89 };
90 "decorator" = super.buildPythonPackage {
90 "decorator" = super.buildPythonPackage {
@@ -99,11 +99,11 b' self: super: {'
99 };
99 };
100 };
100 };
101 "dogpile.cache" = super.buildPythonPackage {
101 "dogpile.cache" = super.buildPythonPackage {
102 name = "dogpile.cache-0.6.6";
102 name = "dogpile.cache-0.6.7";
103 doCheck = false;
103 doCheck = false;
104 src = fetchurl {
104 src = fetchurl {
105 url = "https://files.pythonhosted.org/packages/48/ca/604154d835c3668efb8a31bd979b0ea4bf39c2934a40ffecc0662296cb51/dogpile.cache-0.6.6.tar.gz";
105 url = "https://files.pythonhosted.org/packages/ee/bd/440da735a11c6087eed7cc8747fc4b995cbac2464168682f8ee1c8e43844/dogpile.cache-0.6.7.tar.gz";
106 sha256 = "1h8n1lxd4l2qvahfkiinljkqz7pww7w3sgag0j8j9ixbl2h4wk84";
106 sha256 = "1aw8rx8vhb75y7zc6gi67g21sw057jdx7i8m3jq7kf3nqavxx9zw";
107 };
107 };
108 meta = {
108 meta = {
109 license = [ pkgs.lib.licenses.bsdOriginal ];
109 license = [ pkgs.lib.licenses.bsdOriginal ];
@@ -154,14 +154,14 b' self: super: {'
154 };
154 };
155 };
155 };
156 "gevent" = super.buildPythonPackage {
156 "gevent" = super.buildPythonPackage {
157 name = "gevent-1.3.5";
157 name = "gevent-1.3.6";
158 doCheck = false;
158 doCheck = false;
159 propagatedBuildInputs = [
159 propagatedBuildInputs = [
160 self."greenlet"
160 self."greenlet"
161 ];
161 ];
162 src = fetchurl {
162 src = fetchurl {
163 url = "https://files.pythonhosted.org/packages/e6/0a/fc345c6e6161f84484870dbcaa58e427c10bd9bdcd08a69bed3d6b398bf1/gevent-1.3.5.tar.gz";
163 url = "https://files.pythonhosted.org/packages/49/13/aa4bb3640b5167fe58875d3d7e65390cdb14f9682a41a741a566bb560842/gevent-1.3.6.tar.gz";
164 sha256 = "1w3gydxirgd2f60c5yv579w4903ds9s4g3587ik4jby97hgqc5bz";
164 sha256 = "1ih4k73dqz2zb561hda99vbanja3m6cdch3mgxxn1mla3qwkqhbv";
165 };
165 };
166 meta = {
166 meta = {
167 license = [ pkgs.lib.licenses.mit ];
167 license = [ pkgs.lib.licenses.mit ];
@@ -179,11 +179,11 b' self: super: {'
179 };
179 };
180 };
180 };
181 "greenlet" = super.buildPythonPackage {
181 "greenlet" = super.buildPythonPackage {
182 name = "greenlet-0.4.13";
182 name = "greenlet-0.4.15";
183 doCheck = false;
183 doCheck = false;
184 src = fetchurl {
184 src = fetchurl {
185 url = "https://files.pythonhosted.org/packages/13/de/ba92335e9e76040ca7274224942282a80d54f85e342a5e33c5277c7f87eb/greenlet-0.4.13.tar.gz";
185 url = "https://files.pythonhosted.org/packages/f8/e8/b30ae23b45f69aa3f024b46064c0ac8e5fcb4f22ace0dca8d6f9c8bbe5e7/greenlet-0.4.15.tar.gz";
186 sha256 = "1r412gfx25jrdiv444prmz5a8igrfabwnwqyr6b52ypq7ga87vqg";
186 sha256 = "1g4g1wwc472ds89zmqlpyan3fbnzpa8qm48z3z1y6mlk44z485ll";
187 };
187 };
188 meta = {
188 meta = {
189 license = [ pkgs.lib.licenses.mit ];
189 license = [ pkgs.lib.licenses.mit ];
@@ -227,11 +227,11 b' self: super: {'
227 };
227 };
228 };
228 };
229 "hupper" = super.buildPythonPackage {
229 "hupper" = super.buildPythonPackage {
230 name = "hupper-1.3";
230 name = "hupper-1.3.1";
231 doCheck = false;
231 doCheck = false;
232 src = fetchurl {
232 src = fetchurl {
233 url = "https://files.pythonhosted.org/packages/51/0c/96335b1f2f32245fb871eea5bb9773196505ddb71fad15190056a282df9e/hupper-1.3.tar.gz";
233 url = "https://files.pythonhosted.org/packages/cf/4b/467b826a84c8594b81f414b5ab6794e981951dac90ca40abaf9ea1cb36b0/hupper-1.3.1.tar.gz";
234 sha256 = "1pkyrm9c2crc32ps00k1ahnc5clj3pjwiarc7j0x8aykwih7ff10";
234 sha256 = "03mf13n6i4dd60wlb9m99ddl4m3lmly70cjp7f82vdkibfl1v6l9";
235 };
235 };
236 meta = {
236 meta = {
237 license = [ pkgs.lib.licenses.mit ];
237 license = [ pkgs.lib.licenses.mit ];
@@ -371,15 +371,15 b' self: super: {'
371 };
371 };
372 };
372 };
373 "pathlib2" = super.buildPythonPackage {
373 "pathlib2" = super.buildPythonPackage {
374 name = "pathlib2-2.3.0";
374 name = "pathlib2-2.3.2";
375 doCheck = false;
375 doCheck = false;
376 propagatedBuildInputs = [
376 propagatedBuildInputs = [
377 self."six"
377 self."six"
378 self."scandir"
378 self."scandir"
379 ];
379 ];
380 src = fetchurl {
380 src = fetchurl {
381 url = "https://files.pythonhosted.org/packages/a1/14/df0deb867c2733f7d857523c10942b3d6612a1b222502fdffa9439943dfb/pathlib2-2.3.0.tar.gz";
381 url = "https://files.pythonhosted.org/packages/db/a8/7d6439c1aec525ed70810abee5b7d7f3aa35347f59bc28343e8f62019aa2/pathlib2-2.3.2.tar.gz";
382 sha256 = "1cx5gs2v9j2vnzmcrbq5l8fq2mwrr1h6pyf1sjdji2w1bavm09fk";
382 sha256 = "10yb0iv5x2hs631rcppkhbddx799d3h8pcwmkbh2a66ns3w71ccf";
383 };
383 };
384 meta = {
384 meta = {
385 license = [ pkgs.lib.licenses.mit ];
385 license = [ pkgs.lib.licenses.mit ];
@@ -400,14 +400,14 b' self: super: {'
400 };
400 };
401 };
401 };
402 "pickleshare" = super.buildPythonPackage {
402 "pickleshare" = super.buildPythonPackage {
403 name = "pickleshare-0.7.4";
403 name = "pickleshare-0.7.5";
404 doCheck = false;
404 doCheck = false;
405 propagatedBuildInputs = [
405 propagatedBuildInputs = [
406 self."pathlib2"
406 self."pathlib2"
407 ];
407 ];
408 src = fetchurl {
408 src = fetchurl {
409 url = "https://files.pythonhosted.org/packages/69/fe/dd137d84daa0fd13a709e448138e310d9ea93070620c9db5454e234af525/pickleshare-0.7.4.tar.gz";
409 url = "https://files.pythonhosted.org/packages/d8/b6/df3c1c9b616e9c0edbc4fbab6ddd09df9535849c64ba51fcb6531c32d4d8/pickleshare-0.7.5.tar.gz";
410 sha256 = "0yvk14dzxk7g6qpr7iw23vzqbsr0dh4ij4xynkhnzpfz4xr2bac4";
410 sha256 = "1jmghg3c53yp1i8cm6pcrm280ayi8621rwyav9fac7awjr3kss47";
411 };
411 };
412 meta = {
412 meta = {
413 license = [ pkgs.lib.licenses.mit ];
413 license = [ pkgs.lib.licenses.mit ];
@@ -443,11 +443,11 b' self: super: {'
443 };
443 };
444 };
444 };
445 "pluggy" = super.buildPythonPackage {
445 "pluggy" = super.buildPythonPackage {
446 name = "pluggy-0.6.0";
446 name = "pluggy-0.8.0";
447 doCheck = false;
447 doCheck = false;
448 src = fetchurl {
448 src = fetchurl {
449 url = "https://files.pythonhosted.org/packages/11/bf/cbeb8cdfaffa9f2ea154a30ae31a9d04a1209312e2919138b4171a1f8199/pluggy-0.6.0.tar.gz";
449 url = "https://files.pythonhosted.org/packages/65/25/81d0de17cd00f8ca994a4e74e3c4baf7cd25072c0b831dad5c7d9d6138f8/pluggy-0.8.0.tar.gz";
450 sha256 = "1zqckndfn85l1cd8pndw212zg1bq9fkg1nnj32kp2mppppsyg2kz";
450 sha256 = "1580p47l2zqzsza8jcnw1h2wh3vvmygk6ly8bvi4w0g8j14sjys4";
451 };
451 };
452 meta = {
452 meta = {
453 license = [ pkgs.lib.licenses.mit ];
453 license = [ pkgs.lib.licenses.mit ];
@@ -469,11 +469,11 b' self: super: {'
469 };
469 };
470 };
470 };
471 "psutil" = super.buildPythonPackage {
471 "psutil" = super.buildPythonPackage {
472 name = "psutil-5.4.6";
472 name = "psutil-5.4.7";
473 doCheck = false;
473 doCheck = false;
474 src = fetchurl {
474 src = fetchurl {
475 url = "https://files.pythonhosted.org/packages/51/9e/0f8f5423ce28c9109807024f7bdde776ed0b1161de20b408875de7e030c3/psutil-5.4.6.tar.gz";
475 url = "https://files.pythonhosted.org/packages/7d/9a/1e93d41708f8ed2b564395edfa3389f0fd6d567597401c2e5e2775118d8b/psutil-5.4.7.tar.gz";
476 sha256 = "1xmw4qi6hnrhw81xqzkvmsm9im7j2vkk4v26ycjwq2jczqsmlvk8";
476 sha256 = "0fsgmvzwbdbszkwfnqhib8jcxm4w6zyhvlxlcda0rfm5cyqj4qsv";
477 };
477 };
478 meta = {
478 meta = {
479 license = [ pkgs.lib.licenses.bsdOriginal ];
479 license = [ pkgs.lib.licenses.bsdOriginal ];
@@ -491,11 +491,11 b' self: super: {'
491 };
491 };
492 };
492 };
493 "py" = super.buildPythonPackage {
493 "py" = super.buildPythonPackage {
494 name = "py-1.5.3";
494 name = "py-1.6.0";
495 doCheck = false;
495 doCheck = false;
496 src = fetchurl {
496 src = fetchurl {
497 url = "https://files.pythonhosted.org/packages/f7/84/b4c6e84672c4ceb94f727f3da8344037b62cee960d80e999b1cd9b832d83/py-1.5.3.tar.gz";
497 url = "https://files.pythonhosted.org/packages/4f/38/5f427d1eedae73063ce4da680d2bae72014995f9fdeaa57809df61c968cd/py-1.6.0.tar.gz";
498 sha256 = "10gq2lckvgwlk9w6yzijhzkarx44hsaknd0ypa08wlnpjnsgmj99";
498 sha256 = "1wcs3zv9wl5m5x7p16avqj2gsrviyb23yvc3pr330isqs0sh98q6";
499 };
499 };
500 meta = {
500 meta = {
501 license = [ pkgs.lib.licenses.mit ];
501 license = [ pkgs.lib.licenses.mit ];
@@ -552,7 +552,7 b' self: super: {'
552 };
552 };
553 };
553 };
554 "pytest" = super.buildPythonPackage {
554 "pytest" = super.buildPythonPackage {
555 name = "pytest-3.6.0";
555 name = "pytest-3.8.2";
556 doCheck = false;
556 doCheck = false;
557 propagatedBuildInputs = [
557 propagatedBuildInputs = [
558 self."py"
558 self."py"
@@ -563,25 +563,26 b' self: super: {'
563 self."atomicwrites"
563 self."atomicwrites"
564 self."pluggy"
564 self."pluggy"
565 self."funcsigs"
565 self."funcsigs"
566 self."pathlib2"
566 ];
567 ];
567 src = fetchurl {
568 src = fetchurl {
568 url = "https://files.pythonhosted.org/packages/67/6a/5bcdc22f8dbada1d2910d6e1a3a03f6b14306c78f81122890735b28be4bf/pytest-3.6.0.tar.gz";
569 url = "https://files.pythonhosted.org/packages/5f/d2/7f77f406ac505abda02ab4afb50d06ebf304f6ea42fca34f8f37529106b2/pytest-3.8.2.tar.gz";
569 sha256 = "0bdfazvjjbxssqzyvkb3m2x2in7xv56ipr899l00s87k7815sm9r";
570 sha256 = "18nrwzn61kph2y6gxwfz9ms68rfvr9d4vcffsxng9p7jk9z18clk";
570 };
571 };
571 meta = {
572 meta = {
572 license = [ pkgs.lib.licenses.mit ];
573 license = [ pkgs.lib.licenses.mit ];
573 };
574 };
574 };
575 };
575 "pytest-cov" = super.buildPythonPackage {
576 "pytest-cov" = super.buildPythonPackage {
576 name = "pytest-cov-2.5.1";
577 name = "pytest-cov-2.6.0";
577 doCheck = false;
578 doCheck = false;
578 propagatedBuildInputs = [
579 propagatedBuildInputs = [
579 self."pytest"
580 self."pytest"
580 self."coverage"
581 self."coverage"
581 ];
582 ];
582 src = fetchurl {
583 src = fetchurl {
583 url = "https://files.pythonhosted.org/packages/24/b4/7290d65b2f3633db51393bdf8ae66309b37620bc3ec116c5e357e3e37238/pytest-cov-2.5.1.tar.gz";
584 url = "https://files.pythonhosted.org/packages/d9/e2/58f90a316fbd94dd50bf5c826a23f3f5d079fb3cc448c1e9f0e3c33a3d2a/pytest-cov-2.6.0.tar.gz";
584 sha256 = "0bbfpwdh9k3636bxc88vz9fa7vf4akchgn513ql1vd0xy4n7bah3";
585 sha256 = "0qnpp9y3ygx4jk4pf5ad71fh2skbvnr6gl54m7rg5qysnx4g0q73";
585 };
586 };
586 meta = {
587 meta = {
587 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
588 license = [ pkgs.lib.licenses.bsdOriginal pkgs.lib.licenses.mit ];
@@ -630,14 +631,14 b' self: super: {'
630 };
631 };
631 };
632 };
632 "pytest-timeout" = super.buildPythonPackage {
633 "pytest-timeout" = super.buildPythonPackage {
633 name = "pytest-timeout-1.2.1";
634 name = "pytest-timeout-1.3.2";
634 doCheck = false;
635 doCheck = false;
635 propagatedBuildInputs = [
636 propagatedBuildInputs = [
636 self."pytest"
637 self."pytest"
637 ];
638 ];
638 src = fetchurl {
639 src = fetchurl {
639 url = "https://files.pythonhosted.org/packages/be/e9/a9106b8bc87521c6813060f50f7d1fdc15665bc1bbbe71c0ffc1c571aaa2/pytest-timeout-1.2.1.tar.gz";
640 url = "https://files.pythonhosted.org/packages/8c/3e/1b6a319d12ae7baa3acb7c18ff2c8630a09471a0319d43535c683b4d03eb/pytest-timeout-1.3.2.tar.gz";
640 sha256 = "1kdp6qbh5v1168l99rba5yfzvy05gmzkmkhldgp36p9xcdjd5dv8";
641 sha256 = "09wnmzvnls2mnsdz7x3c3sk2zdp6jl4dryvyj5i8hqz16q2zq5qi";
641 };
642 };
642 meta = {
643 meta = {
643 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
644 license = [ pkgs.lib.licenses.mit { fullName = "DFSG approved"; } ];
@@ -655,7 +656,7 b' self: super: {'
655 };
656 };
656 };
657 };
657 "rhodecode-vcsserver" = super.buildPythonPackage {
658 "rhodecode-vcsserver" = super.buildPythonPackage {
658 name = "rhodecode-vcsserver-4.13.3";
659 name = "rhodecode-vcsserver-4.14.0";
659 buildInputs = [
660 buildInputs = [
660 self."pytest"
661 self."pytest"
661 self."py"
662 self."py"
@@ -663,8 +664,8 b' self: super: {'
663 self."pytest-sugar"
664 self."pytest-sugar"
664 self."pytest-runner"
665 self."pytest-runner"
665 self."pytest-profiling"
666 self."pytest-profiling"
667 self."pytest-timeout"
666 self."gprof2dot"
668 self."gprof2dot"
667 self."pytest-timeout"
668 self."mock"
669 self."mock"
669 self."webtest"
670 self."webtest"
670 self."cov-core"
671 self."cov-core"
@@ -695,7 +696,6 b' self: super: {'
695 self."repoze.lru"
696 self."repoze.lru"
696 self."simplejson"
697 self."simplejson"
697 self."subprocess32"
698 self."subprocess32"
698 self."setproctitle"
699 self."subvertpy"
699 self."subvertpy"
700 self."six"
700 self."six"
701 self."translationstring"
701 self."translationstring"
@@ -706,6 +706,7 b' self: super: {'
706 self."greenlet"
706 self."greenlet"
707 self."gunicorn"
707 self."gunicorn"
708 self."waitress"
708 self."waitress"
709 self."setproctitle"
709 self."ipdb"
710 self."ipdb"
710 self."ipython"
711 self."ipython"
711 self."pytest"
712 self."pytest"
@@ -714,8 +715,8 b' self: super: {'
714 self."pytest-sugar"
715 self."pytest-sugar"
715 self."pytest-runner"
716 self."pytest-runner"
716 self."pytest-profiling"
717 self."pytest-profiling"
718 self."pytest-timeout"
717 self."gprof2dot"
719 self."gprof2dot"
718 self."pytest-timeout"
719 self."mock"
720 self."mock"
720 self."webtest"
721 self."webtest"
721 self."cov-core"
722 self."cov-core"
@@ -749,11 +750,11 b' self: super: {'
749 };
750 };
750 };
751 };
751 "setuptools" = super.buildPythonPackage {
752 "setuptools" = super.buildPythonPackage {
752 name = "setuptools-40.1.0";
753 name = "setuptools-40.4.3";
753 doCheck = false;
754 doCheck = false;
754 src = fetchurl {
755 src = fetchurl {
755 url = "https://files.pythonhosted.org/packages/5a/df/b2e3d9693bb0dcbeac516a73dd7a9eb82b126ae52e4a74605a9b01beddd5/setuptools-40.1.0.zip";
756 url = "https://files.pythonhosted.org/packages/6e/9c/6a003320b00ef237f94aa74e4ad66c57a7618f6c79d67527136e2544b728/setuptools-40.4.3.zip";
756 sha256 = "0w1blx5ajga5y15dci0mddk49cf2xpq0mp7rp7jrqr2diqk00ib6";
757 sha256 = "058v6zns4634n4al2nmmvp15j8nrgwn8wjrbdks47wk3vm05gg5c";
757 };
758 };
758 meta = {
759 meta = {
759 license = [ pkgs.lib.licenses.mit ];
760 license = [ pkgs.lib.licenses.mit ];
@@ -793,11 +794,11 b' self: super: {'
793 };
794 };
794 };
795 };
795 "subprocess32" = super.buildPythonPackage {
796 "subprocess32" = super.buildPythonPackage {
796 name = "subprocess32-3.5.1";
797 name = "subprocess32-3.5.2";
797 doCheck = false;
798 doCheck = false;
798 src = fetchurl {
799 src = fetchurl {
799 url = "https://files.pythonhosted.org/packages/de/fb/fd3e91507021e2aecdb081d1b920082628d6b8869ead845e3e87b3d2e2ca/subprocess32-3.5.1.tar.gz";
800 url = "https://files.pythonhosted.org/packages/c3/5f/7117737fc7114061837a4f51670d863dd7f7f9c762a6546fa8a0dcfe61c8/subprocess32-3.5.2.tar.gz";
800 sha256 = "0wgi3bfnssid1g6h0v803z3k1wjal6il16nr3r9c587cfzwfkv0q";
801 sha256 = "11v62shwmdys48g7ncs3a8jwwnkcl8d4zcwy6dk73z1zy2f9hazb";
801 };
802 };
802 meta = {
803 meta = {
803 license = [ pkgs.lib.licenses.psfl ];
804 license = [ pkgs.lib.licenses.psfl ];
@@ -1,14 +1,12 b''
1 # This file defines how to "build" for packaging.
1 # This file defines how to "build" for packaging.
2
2
3 { pkgs ? import <nixpkgs> {}
3 { doCheck ? true
4 , doCheck ? true
5 }:
4 }:
6
5
7 let
6 let
8 vcsserver = import ./default.nix {
7 vcsserver = import ./default.nix {
9 inherit
8 inherit
10 doCheck
9 doCheck;
11 pkgs;
12 };
10 };
13
11
14 in {
12 in {
@@ -2,9 +2,9 b''
2
2
3 # our custom configobj
3 # our custom configobj
4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
4 https://code.rhodecode.com/upstream/configobj/archive/a11ff0a0bd4fbda9e3a91267e720f88329efb4a6.tar.gz?md5=9916c524ea11a6c418217af6b28d4b3c#egg=configobj==5.0.6
5 atomicwrites==1.1.5
5 atomicwrites==1.2.1
6 attrs==18.1.0
6 attrs==18.2.0
7 dogpile.cache==0.6.6
7 dogpile.cache==0.6.7
8 dogpile.core==0.4.1
8 dogpile.core==0.4.1
9 decorator==4.1.2
9 decorator==4.1.2
10 dulwich==0.13.0
10 dulwich==0.13.0
@@ -16,16 +16,15 b' mercurial==4.6.2'
16 msgpack-python==0.5.6
16 msgpack-python==0.5.6
17
17
18 pastedeploy==1.5.2
18 pastedeploy==1.5.2
19 psutil==5.4.6
19 psutil==5.4.7
20 pyramid==1.9.2
20 pyramid==1.9.2
21 pyramid-mako==1.0.2
21 pyramid-mako==1.0.2
22
22
23 pygments==2.2.0
23 pygments==2.2.0
24 pathlib2==2.3.0
24 pathlib2==2.3.2
25 repoze.lru==0.7
25 repoze.lru==0.7
26 simplejson==3.11.1
26 simplejson==3.11.1
27 subprocess32==3.5.1
27 subprocess32==3.5.2
28 setproctitle==1.1.10
29 subvertpy==0.10.1
28 subvertpy==0.10.1
30
29
31 six==1.11.0
30 six==1.11.0
@@ -35,10 +34,11 b' zope.deprecation==4.3.0'
35 zope.interface==4.5.0
34 zope.interface==4.5.0
36
35
37 ## http servers
36 ## http servers
38 gevent==1.3.5
37 gevent==1.3.6
39 greenlet==0.4.13
38 greenlet==0.4.15
40 gunicorn==19.9.0
39 gunicorn==19.9.0
41 waitress==1.1.0
40 waitress==1.1.0
41 setproctitle==1.1.10
42
42
43 ## debug
43 ## debug
44 ipdb==0.11.0
44 ipdb==0.11.0
@@ -1,14 +1,14 b''
1 # test related requirements
1 # test related requirements
2 pytest==3.6.0
2 pytest==3.8.2
3 py==1.5.3
3 py==1.6.0
4 pytest-cov==2.5.1
4 pytest-cov==2.6.0
5 pytest-sugar==0.9.1
5 pytest-sugar==0.9.1
6 pytest-runner==4.2.0
6 pytest-runner==4.2.0
7 pytest-profiling==1.3.0
7 pytest-profiling==1.3.0
8 pytest-timeout==1.3.2
8 gprof2dot==2017.9.19
9 gprof2dot==2017.9.19
9 pytest-timeout==1.2.1
10
10
11 mock==1.0.1
11 mock==1.0.1
12 webtest==2.0.29
12 webtest==2.0.29
13 cov-core==1.15.0
13 cov-core==1.15.0
14 coverage==3.7.1
14 coverage==4.5.1
@@ -131,9 +131,6 b' setup('
131 'Programming Language :: Python :: 2.7',
131 'Programming Language :: Python :: 2.7',
132 ],
132 ],
133 entry_points={
133 entry_points={
134 'console_scripts': [
135 'vcsserver=vcsserver.main:main',
136 ],
137 'paste.app_factory': ['main=vcsserver.http_main:main']
134 'paste.app_factory': ['main=vcsserver.http_main:main']
138 },
135 },
139 )
136 )
@@ -10,7 +10,6 b' let'
10
10
11 vcsserver = import ./default.nix {
11 vcsserver = import ./default.nix {
12 inherit
12 inherit
13 pkgs
14 doCheck;
13 doCheck;
15 };
14 };
16
15
@@ -1,1 +1,1 b''
1 4.13.3 No newline at end of file
1 4.14.0 No newline at end of file
@@ -14,7 +14,7 b''
14 # You should have received a copy of the GNU General Public License
14 # You should have received a copy of the GNU General Public License
15 # along with this program; if not, write to the Free Software Foundation,
15 # along with this program; if not, write to the Free Software Foundation,
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
16 # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17
17 import collections
18 import logging
18 import logging
19 import os
19 import os
20 import posixpath as vcspath
20 import posixpath as vcspath
@@ -56,9 +56,11 b' def reraise_safe_exceptions(func):'
56 return func(*args, **kwargs)
56 return func(*args, **kwargs)
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
57 except (ChecksumMismatch, WrongObjectException, MissingCommitError,
58 ObjectMissing) as e:
58 ObjectMissing) as e:
59 raise exceptions.LookupException(e)(e.message)
59 exc = exceptions.LookupException(e)
60 raise exc(e)
60 except (HangupException, UnexpectedCommandError) as e:
61 except (HangupException, UnexpectedCommandError) as e:
61 raise exceptions.VcsException(e)(e.message)
62 exc = exceptions.VcsException(e)
63 raise exc(e)
62 except Exception as e:
64 except Exception as e:
63 # NOTE(marcink): becuase of how dulwich handles some exceptions
65 # NOTE(marcink): becuase of how dulwich handles some exceptions
64 # (KeyError on empty repos), we cannot track this and catch all
66 # (KeyError on empty repos), we cannot track this and catch all
@@ -98,7 +100,7 b' class GitRemote(object):'
98
100
99 def __init__(self, factory):
101 def __init__(self, factory):
100 self._factory = factory
102 self._factory = factory
101
103 self.peeled_ref_marker = '^{}'
102 self._bulk_methods = {
104 self._bulk_methods = {
103 "author": self.commit_attribute,
105 "author": self.commit_attribute,
104 "date": self.get_object_attrs,
106 "date": self.get_object_attrs,
@@ -279,7 +281,8 b' class GitRemote(object):'
279
281
280 @reraise_safe_exceptions
282 @reraise_safe_exceptions
281 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
283 def clone(self, wire, url, deferred, valid_refs, update_after_clone):
282 remote_refs = self.fetch(wire, url, apply_refs=False)
284 # TODO(marcink): deprecate this method. Last i checked we don't use it anymore
285 remote_refs = self.pull(wire, url, apply_refs=False)
283 repo = self._factory.repo(wire)
286 repo = self._factory.repo(wire)
284 if isinstance(valid_refs, list):
287 if isinstance(valid_refs, list):
285 valid_refs = tuple(valid_refs)
288 valid_refs = tuple(valid_refs)
@@ -396,7 +399,7 b' class GitRemote(object):'
396 return commit.id
399 return commit.id
397
400
398 @reraise_safe_exceptions
401 @reraise_safe_exceptions
399 def fetch(self, wire, url, apply_refs=True, refs=None):
402 def pull(self, wire, url, apply_refs=True, refs=None, update_after=False):
400 if url != 'default' and '://' not in url:
403 if url != 'default' and '://' not in url:
401 client = LocalGitClient(url)
404 client = LocalGitClient(url)
402 else:
405 else:
@@ -431,25 +434,73 b' class GitRemote(object):'
431 # TODO: johbo: Needs proper test coverage with a git repository
434 # TODO: johbo: Needs proper test coverage with a git repository
432 # that contains a tag object, so that we would end up with
435 # that contains a tag object, so that we would end up with
433 # a peeled ref at this point.
436 # a peeled ref at this point.
434 PEELED_REF_MARKER = '^{}'
435 for k in remote_refs:
437 for k in remote_refs:
436 if k.endswith(PEELED_REF_MARKER):
438 if k.endswith(self.peeled_ref_marker):
437 log.info("Skipping peeled reference %s", k)
439 log.debug("Skipping peeled reference %s", k)
438 continue
440 continue
439 repo[k] = remote_refs[k]
441 repo[k] = remote_refs[k]
440
442
441 if refs:
443 if refs and not update_after:
442 # mikhail: explicitly set the head to the last ref.
444 # mikhail: explicitly set the head to the last ref.
443 repo['HEAD'] = remote_refs[refs[-1]]
445 repo['HEAD'] = remote_refs[refs[-1]]
444
446
445 # TODO: mikhail: should we return remote_refs here to be
447 if update_after:
446 # consistent?
448 # we want to checkout HEAD
447 else:
449 repo["HEAD"] = remote_refs["HEAD"]
450 index.build_index_from_tree(repo.path, repo.index_path(),
451 repo.object_store, repo["HEAD"].tree)
452 return remote_refs
453
454 @reraise_safe_exceptions
455 def sync_fetch(self, wire, url, refs=None):
456 repo = self._factory.repo(wire)
457 if refs and not isinstance(refs, (list, tuple)):
458 refs = [refs]
459
460 # get all remote refs we'll use to fetch later
461 output, __ = self.run_git_command(
462 wire, ['ls-remote', url], fail_on_stderr=False,
463 _copts=['-c', 'core.askpass=""'],
464 extra_env={'GIT_TERMINAL_PROMPT': '0'})
465
466 remote_refs = collections.OrderedDict()
467 fetch_refs = []
468
469 for ref_line in output.splitlines():
470 sha, ref = ref_line.split('\t')
471 sha = sha.strip()
472 if ref in remote_refs:
473 # duplicate, skip
474 continue
475 if ref.endswith(self.peeled_ref_marker):
476 log.debug("Skipping peeled reference %s", ref)
477 continue
478 # don't sync HEAD
479 if ref in ['HEAD']:
480 continue
481
482 remote_refs[ref] = sha
483
484 if refs and sha in refs:
485 # we filter fetch using our specified refs
486 fetch_refs.append('{}:{}'.format(ref, ref))
487 elif not refs:
488 fetch_refs.append('{}:{}'.format(ref, ref))
489
490 if fetch_refs:
491 _out, _err = self.run_git_command(
492 wire, ['fetch', url, '--force', '--prune', '--'] + fetch_refs,
493 fail_on_stderr=False,
494 _copts=['-c', 'core.askpass=""'],
495 extra_env={'GIT_TERMINAL_PROMPT': '0'})
496
448 return remote_refs
497 return remote_refs
449
498
450 @reraise_safe_exceptions
499 @reraise_safe_exceptions
451 def sync_push(self, wire, url, refs=None):
500 def sync_push(self, wire, url, refs=None):
452 if self.check_url(url, wire):
501 if not self.check_url(url, wire):
502 return
503
453 repo = self._factory.repo(wire)
504 repo = self._factory.repo(wire)
454 self.run_git_command(
505 self.run_git_command(
455 wire, ['push', url, '--mirror'], fail_on_stderr=False,
506 wire, ['push', url, '--mirror'], fail_on_stderr=False,
@@ -644,9 +695,9 b' class GitRemote(object):'
644 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
695 gitenv['GIT_DISCOVERY_ACROSS_FILESYSTEM'] = '1'
645
696
646 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
697 cmd = [settings.GIT_EXECUTABLE] + _copts + cmd
698 _opts = {'env': gitenv, 'shell': False}
647
699
648 try:
700 try:
649 _opts = {'env': gitenv, 'shell': False}
650 _opts.update(opts)
701 _opts.update(opts)
651 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
702 p = subprocessio.SubprocessIOChunker(cmd, **_opts)
652
703
@@ -654,7 +705,9 b' class GitRemote(object):'
654 except (EnvironmentError, OSError) as err:
705 except (EnvironmentError, OSError) as err:
655 cmd = ' '.join(cmd) # human friendly CMD
706 cmd = ' '.join(cmd) # human friendly CMD
656 tb_err = ("Couldn't run git command (%s).\n"
707 tb_err = ("Couldn't run git command (%s).\n"
657 "Original error was:%s\n" % (cmd, err))
708 "Original error was:%s\n"
709 "Call options:%s\n"
710 % (cmd, err, _opts))
658 log.exception(tb_err)
711 log.exception(tb_err)
659 if safe_call:
712 if safe_call:
660 return '', err
713 return '', err
@@ -585,7 +585,9 b' class HgRemote(object):'
585
585
586 @reraise_safe_exceptions
586 @reraise_safe_exceptions
587 def sync_push(self, wire, url):
587 def sync_push(self, wire, url):
588 if self.check_url(url, wire['config']):
588 if not self.check_url(url, wire['config']):
589 return
590
589 repo = self._factory.repo(wire)
591 repo = self._factory.repo(wire)
590
592
591 # Disable any prompts for this repo
593 # Disable any prompts for this repo
@@ -214,6 +214,30 b' def _check_heads(repo, start, end, commi'
214 return []
214 return []
215
215
216
216
217 def _get_git_env():
218 env = {}
219 for k, v in os.environ.items():
220 if k.startswith('GIT'):
221 env[k] = v
222
223 # serialized version
224 return [(k, v) for k, v in env.items()]
225
226
227 def _get_hg_env(old_rev, new_rev, txnid, repo_path):
228 env = {}
229 for k, v in os.environ.items():
230 if k.startswith('HG'):
231 env[k] = v
232
233 env['HG_NODE'] = old_rev
234 env['HG_NODE_LAST'] = new_rev
235 env['HG_TXNID'] = txnid
236 env['HG_PENDING'] = repo_path
237
238 return [(k, v) for k, v in env.items()]
239
240
217 def repo_size(ui, repo, **kwargs):
241 def repo_size(ui, repo, **kwargs):
218 extras = _extras_from_ui(ui)
242 extras = _extras_from_ui(ui)
219 return _call_hook('repo_size', extras, HgMessageWriter(ui))
243 return _call_hook('repo_size', extras, HgMessageWriter(ui))
@@ -260,6 +284,7 b' def pre_push(ui, repo, node=None, **kwar'
260 for branch, commits in branches.items():
284 for branch, commits in branches.items():
261 old_rev = kwargs.get('node_last') or commits[0]
285 old_rev = kwargs.get('node_last') or commits[0]
262 rev_data.append({
286 rev_data.append({
287 'total_commits': len(commits),
263 'old_rev': old_rev,
288 'old_rev': old_rev,
264 'new_rev': commits[-1],
289 'new_rev': commits[-1],
265 'ref': '',
290 'ref': '',
@@ -270,7 +295,16 b' def pre_push(ui, repo, node=None, **kwar'
270 for push_ref in rev_data:
295 for push_ref in rev_data:
271 push_ref['multiple_heads'] = _heads
296 push_ref['multiple_heads'] = _heads
272
297
298 repo_path = os.path.join(
299 extras.get('repo_store', ''), extras.get('repository', ''))
300 push_ref['hg_env'] = _get_hg_env(
301 old_rev=push_ref['old_rev'],
302 new_rev=push_ref['new_rev'], txnid=kwargs.get('txnid'),
303 repo_path=repo_path)
304
305 extras['hook_type'] = kwargs.get('hooktype', 'pre_push')
273 extras['commit_ids'] = rev_data
306 extras['commit_ids'] = rev_data
307
274 return _call_hook('pre_push', extras, HgMessageWriter(ui))
308 return _call_hook('pre_push', extras, HgMessageWriter(ui))
275
309
276
310
@@ -319,6 +353,7 b' def post_push(ui, repo, node, **kwargs):'
319 if hasattr(ui, '_rc_pushkey_branches'):
353 if hasattr(ui, '_rc_pushkey_branches'):
320 bookmarks = ui._rc_pushkey_branches
354 bookmarks = ui._rc_pushkey_branches
321
355
356 extras['hook_type'] = kwargs.get('hooktype', 'post_push')
322 extras['commit_ids'] = commit_ids
357 extras['commit_ids'] = commit_ids
323 extras['new_refs'] = {
358 extras['new_refs'] = {
324 'branches': branches,
359 'branches': branches,
@@ -426,6 +461,10 b' def _parse_git_ref_lines(revision_lines)'
426 ref_data = ref.split('/', 2)
461 ref_data = ref.split('/', 2)
427 if ref_data[1] in ('tags', 'heads'):
462 if ref_data[1] in ('tags', 'heads'):
428 rev_data.append({
463 rev_data.append({
464 # NOTE(marcink):
465 # we're unable to tell total_commits for git at this point
466 # but we set the variable for consistency with GIT
467 'total_commits': -1,
429 'old_rev': old_rev,
468 'old_rev': old_rev,
430 'new_rev': new_rev,
469 'new_rev': new_rev,
431 'ref': ref,
470 'ref': ref,
@@ -455,8 +494,7 b' def git_pre_receive(unused_repo_path, re'
455
494
456 for push_ref in rev_data:
495 for push_ref in rev_data:
457 # store our git-env which holds the temp store
496 # store our git-env which holds the temp store
458 push_ref['git_env'] = [
497 push_ref['git_env'] = _get_git_env()
459 (k, v) for k, v in os.environ.items() if k.startswith('GIT')]
460 push_ref['pruned_sha'] = ''
498 push_ref['pruned_sha'] = ''
461 if not detect_force_push:
499 if not detect_force_push:
462 # don't check for forced-push when we don't need to
500 # don't check for forced-push when we don't need to
@@ -476,6 +514,7 b' def git_pre_receive(unused_repo_path, re'
476 if stdout:
514 if stdout:
477 push_ref['pruned_sha'] = stdout.splitlines()
515 push_ref['pruned_sha'] = stdout.splitlines()
478
516
517 extras['hook_type'] = 'pre_receive'
479 extras['commit_ids'] = rev_data
518 extras['commit_ids'] = rev_data
480 return _call_hook('pre_push', extras, GitMessageWriter())
519 return _call_hook('pre_push', extras, GitMessageWriter())
481
520
@@ -555,6 +594,7 b' def git_post_receive(unused_repo_path, r'
555 tags.append(push_ref['name'])
594 tags.append(push_ref['name'])
556 git_revs.append('tag=>%s' % push_ref['name'])
595 git_revs.append('tag=>%s' % push_ref['name'])
557
596
597 extras['hook_type'] = 'post_receive'
558 extras['commit_ids'] = git_revs
598 extras['commit_ids'] = git_revs
559 extras['new_refs'] = {
599 extras['new_refs'] = {
560 'branches': branches,
600 'branches': branches,
@@ -586,6 +626,21 b' def _get_extras_from_txn_id(path, txn_id'
586 return extras
626 return extras
587
627
588
628
629 def _get_extras_from_commit_id(commit_id, path):
630 extras = {}
631 try:
632 cmd = ['svnlook', 'pget',
633 '-r', commit_id,
634 '--revprop', path, 'rc-scm-extras']
635 stdout, stderr = subprocessio.run_command(
636 cmd, env=os.environ.copy())
637 extras = json.loads(base64.urlsafe_b64decode(stdout))
638 except Exception:
639 log.exception('Failed to extract extras info from commit_id')
640
641 return extras
642
643
589 def svn_pre_commit(repo_path, commit_data, env):
644 def svn_pre_commit(repo_path, commit_data, env):
590 path, txn_id = commit_data
645 path, txn_id = commit_data
591 branches = []
646 branches = []
@@ -602,6 +657,7 b' def svn_pre_commit(repo_path, commit_dat'
602 extras['commit_ids'] = []
657 extras['commit_ids'] = []
603 extras['txn_id'] = txn_id
658 extras['txn_id'] = txn_id
604 extras['new_refs'] = {
659 extras['new_refs'] = {
660 'total_commits': 1,
605 'branches': branches,
661 'branches': branches,
606 'bookmarks': [],
662 'bookmarks': [],
607 'tags': tags,
663 'tags': tags,
@@ -610,21 +666,6 b' def svn_pre_commit(repo_path, commit_dat'
610 return _call_hook('pre_push', extras, SvnMessageWriter())
666 return _call_hook('pre_push', extras, SvnMessageWriter())
611
667
612
668
613 def _get_extras_from_commit_id(commit_id, path):
614 extras = {}
615 try:
616 cmd = ['svnlook', 'pget',
617 '-r', commit_id,
618 '--revprop', path, 'rc-scm-extras']
619 stdout, stderr = subprocessio.run_command(
620 cmd, env=os.environ.copy())
621 extras = json.loads(base64.urlsafe_b64decode(stdout))
622 except Exception:
623 log.exception('Failed to extract extras info from commit_id')
624
625 return extras
626
627
628 def svn_post_commit(repo_path, commit_data, env):
669 def svn_post_commit(repo_path, commit_data, env):
629 """
670 """
630 commit_data is path, rev, txn_id
671 commit_data is path, rev, txn_id
@@ -647,6 +688,7 b' def svn_post_commit(repo_path, commit_da'
647 'branches': branches,
688 'branches': branches,
648 'bookmarks': [],
689 'bookmarks': [],
649 'tags': tags,
690 'tags': tags,
691 'total_commits': 1,
650 }
692 }
651
693
652 if 'repo_size' in extras['hooks']:
694 if 'repo_size' in extras['hooks']:
@@ -144,7 +144,7 b' class VCS(object):'
144
144
145 def _configure_locale(self):
145 def _configure_locale(self):
146 if self.locale:
146 if self.locale:
147 log.info('Settings locale: `LC_ALL` to %s' % self.locale)
147 log.info('Settings locale: `LC_ALL` to %s', self.locale)
148 else:
148 else:
149 log.info(
149 log.info(
150 'Configuring locale subsystem based on environment variables')
150 'Configuring locale subsystem based on environment variables')
@@ -58,7 +58,7 b' class LRUDictDebug(LRUDict):'
58 fmt = '\n'
58 fmt = '\n'
59 for cnt, elem in enumerate(self.keys()):
59 for cnt, elem in enumerate(self.keys()):
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
60 fmt += '%s - %s\n' % (cnt+1, safe_str(elem))
61 log.debug('current LRU keys (%s):%s' % (elems_cnt, fmt))
61 log.debug('current LRU keys (%s):%s', elems_cnt, fmt)
62
62
63 def __getitem__(self, key):
63 def __getitem__(self, key):
64 self._report_keys()
64 self._report_keys()
@@ -77,7 +77,7 b' def no_newline_id_generator(test_name):'
77 nicer output of progress of test
77 nicer output of progress of test
78 """
78 """
79 org_name = test_name
79 org_name = test_name
80 test_name = test_name\
80 test_name = str(test_name)\
81 .replace('\n', '_N') \
81 .replace('\n', '_N') \
82 .replace('\r', '_N') \
82 .replace('\r', '_N') \
83 .replace('\t', '_T') \
83 .replace('\t', '_T') \
@@ -61,7 +61,7 b' class TestGitFetch(object):'
61
61
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
62 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
63 mock_fetch.side_effect = side_effect
63 mock_fetch.side_effect = side_effect
64 self.remote_git.fetch(wire=None, url='/tmp/', apply_refs=False)
64 self.remote_git.pull(wire=None, url='/tmp/', apply_refs=False)
65 determine_wants = self.mock_repo.object_store.determine_wants_all
65 determine_wants = self.mock_repo.object_store.determine_wants_all
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
66 determine_wants.assert_called_once_with(SAMPLE_REFS)
67
67
@@ -78,7 +78,7 b' class TestGitFetch(object):'
78
78
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
79 with patch('dulwich.client.LocalGitClient.fetch') as mock_fetch:
80 mock_fetch.side_effect = side_effect
80 mock_fetch.side_effect = side_effect
81 self.remote_git.fetch(
81 self.remote_git.pull(
82 wire=None, url='/tmp/', apply_refs=False,
82 wire=None, url='/tmp/', apply_refs=False,
83 refs=selected_refs.keys())
83 refs=selected_refs.keys())
84 determine_wants = self.mock_repo.object_store.determine_wants_all
84 determine_wants = self.mock_repo.object_store.determine_wants_all
@@ -70,7 +70,7 b' def test_git_post_receive_calls_repo_siz'
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
70 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
71 hooks.git_post_receive(
71 hooks.git_post_receive(
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
72 None, '', {'RC_SCM_DATA': json.dumps(extras)})
73 extras.update({'commit_ids': [],
73 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
74 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
75 expected_calls = [
75 expected_calls = [
76 mock.call('repo_size', extras, mock.ANY),
76 mock.call('repo_size', extras, mock.ANY),
@@ -84,7 +84,7 b' def test_git_post_receive_does_not_call_'
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
84 with mock.patch.object(hooks, '_call_hook') as call_hook_mock:
85 hooks.git_post_receive(
85 hooks.git_post_receive(
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
86 None, '', {'RC_SCM_DATA': json.dumps(extras)})
87 extras.update({'commit_ids': [],
87 extras.update({'commit_ids': [], 'hook_type': 'post_receive',
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
88 'new_refs': {'bookmarks': [], 'branches': [], 'tags': []}})
89 expected_calls = [
89 expected_calls = [
90 mock.call('post_push', extras, mock.ANY)
90 mock.call('post_push', extras, mock.ANY)
@@ -46,10 +46,8 b' class RequestWrapperTween(object):'
46 finally:
46 finally:
47 end = time.time()
47 end = time.time()
48
48
49 log.info('IP: %s Request to path: `%s` time: %.3fs' % (
49 log.info('IP: %s Request to path: `%s` time: %.3fs',
50 '127.0.0.1',
50 '127.0.0.1', safe_str(get_access_path(request)), end - start)
51 safe_str(get_access_path(request)), end - start)
52 )
53
51
54 return response
52 return response
55
53
General Comments 0
You need to be logged in to leave comments. Login now