Show More
@@ -0,0 +1,41 b'' | |||||
|
1 | |RCE| 4.16.2 |RNS| | |||
|
2 | ------------------ | |||
|
3 | ||||
|
4 | Release Date | |||
|
5 | ^^^^^^^^^^^^ | |||
|
6 | ||||
|
7 | - 2019-04-02 | |||
|
8 | ||||
|
9 | ||||
|
10 | New Features | |||
|
11 | ^^^^^^^^^^^^ | |||
|
12 | ||||
|
13 | ||||
|
14 | ||||
|
15 | General | |||
|
16 | ^^^^^^^ | |||
|
17 | ||||
|
18 | ||||
|
19 | ||||
|
20 | Security | |||
|
21 | ^^^^^^^^ | |||
|
22 | ||||
|
23 | ||||
|
24 | ||||
|
25 | Performance | |||
|
26 | ^^^^^^^^^^^ | |||
|
27 | ||||
|
28 | ||||
|
29 | ||||
|
30 | Fixes | |||
|
31 | ^^^^^ | |||
|
32 | ||||
|
33 | - Integrations: fixed missing template variable for fork reference checks. | |||
|
34 | - Permissions: fixed server error when showing permissions for user groups. | |||
|
35 | - Pull requests: fixed a bug in removal of multiple reviewers at once. | |||
|
36 | ||||
|
37 | ||||
|
38 | Upgrade notes | |||
|
39 | ^^^^^^^^^^^^^ | |||
|
40 | ||||
|
41 | - Scheduled release addressing problems in 4.16.X releases. |
@@ -0,0 +1,54 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | import logging | |||
|
4 | ||||
|
5 | from alembic.migration import MigrationContext | |||
|
6 | from alembic.operations import Operations | |||
|
7 | from sqlalchemy import String, Column | |||
|
8 | from sqlalchemy.sql import text | |||
|
9 | ||||
|
10 | from rhodecode.lib.dbmigrate.versions import _reset_base | |||
|
11 | from rhodecode.model import meta, init_model_encryption | |||
|
12 | from rhodecode.model.db import RepoGroup | |||
|
13 | ||||
|
14 | ||||
|
15 | log = logging.getLogger(__name__) | |||
|
16 | ||||
|
17 | ||||
|
18 | def upgrade(migrate_engine): | |||
|
19 | """ | |||
|
20 | Upgrade operations go here. | |||
|
21 | Don't create your own engine; bind migrate_engine to your metadata | |||
|
22 | """ | |||
|
23 | _reset_base(migrate_engine) | |||
|
24 | from rhodecode.lib.dbmigrate.schema import db_4_16_0_2 | |||
|
25 | ||||
|
26 | init_model_encryption(db_4_16_0_2) | |||
|
27 | ||||
|
28 | context = MigrationContext.configure(migrate_engine.connect()) | |||
|
29 | op = Operations(context) | |||
|
30 | ||||
|
31 | repo_group = db_4_16_0_2.RepoGroup.__table__ | |||
|
32 | ||||
|
33 | with op.batch_alter_table(repo_group.name) as batch_op: | |||
|
34 | batch_op.add_column( | |||
|
35 | Column("repo_group_name_hash", String(1024), nullable=True, unique=False)) | |||
|
36 | ||||
|
37 | _generate_repo_group_name_hashes(db_4_16_0_2, op, meta.Session) | |||
|
38 | ||||
|
39 | ||||
|
40 | def downgrade(migrate_engine): | |||
|
41 | pass | |||
|
42 | ||||
|
43 | ||||
|
44 | def _generate_repo_group_name_hashes(models, op, session): | |||
|
45 | repo_groups = models.RepoGroup.get_all() | |||
|
46 | for repo_group in repo_groups: | |||
|
47 | print(repo_group.group_name) | |||
|
48 | hash_ = RepoGroup.hash_repo_group_name(repo_group.group_name) | |||
|
49 | params = {'hash': hash_, 'id': repo_group.group_id} | |||
|
50 | query = text( | |||
|
51 | 'UPDATE groups SET repo_group_name_hash = :hash' | |||
|
52 | ' WHERE group_id = :id').bindparams(**params) | |||
|
53 | op.execute(query) | |||
|
54 | session().commit() |
@@ -0,0 +1,39 b'' | |||||
|
1 | # -*- coding: utf-8 -*- | |||
|
2 | ||||
|
3 | import logging | |||
|
4 | ||||
|
5 | from alembic.migration import MigrationContext | |||
|
6 | from alembic.operations import Operations | |||
|
7 | ||||
|
8 | from rhodecode.lib.dbmigrate.versions import _reset_base | |||
|
9 | from rhodecode.model import init_model_encryption | |||
|
10 | ||||
|
11 | ||||
|
12 | log = logging.getLogger(__name__) | |||
|
13 | ||||
|
14 | ||||
|
15 | def upgrade(migrate_engine): | |||
|
16 | """ | |||
|
17 | Upgrade operations go here. | |||
|
18 | Don't create your own engine; bind migrate_engine to your metadata | |||
|
19 | """ | |||
|
20 | _reset_base(migrate_engine) | |||
|
21 | from rhodecode.lib.dbmigrate.schema import db_4_16_0_2 | |||
|
22 | ||||
|
23 | init_model_encryption(db_4_16_0_2) | |||
|
24 | ||||
|
25 | context = MigrationContext.configure(migrate_engine.connect()) | |||
|
26 | op = Operations(context) | |||
|
27 | ||||
|
28 | repo_group = db_4_16_0_2.RepoGroup.__table__ | |||
|
29 | ||||
|
30 | with op.batch_alter_table(repo_group.name) as batch_op: | |||
|
31 | batch_op.alter_column("repo_group_name_hash", nullable=False) | |||
|
32 | ||||
|
33 | ||||
|
34 | def downgrade(migrate_engine): | |||
|
35 | pass | |||
|
36 | ||||
|
37 | ||||
|
38 | def _generate_repo_group_name_hashes(models, op, session): | |||
|
39 | pass |
@@ -51,3 +51,4 b' 14502561d22e6b70613674cd675ae9a604b7989f' | |||||
51 | 4aaa40b605b01af78a9f6882eca561c54b525ef0 v4.15.2 |
|
51 | 4aaa40b605b01af78a9f6882eca561c54b525ef0 v4.15.2 | |
52 | 797744642eca86640ed20bef2cd77445780abaec v4.16.0 |
|
52 | 797744642eca86640ed20bef2cd77445780abaec v4.16.0 | |
53 | 6c3452c7c25ed35ff269690929e11960ed6ad7d3 v4.16.1 |
|
53 | 6c3452c7c25ed35ff269690929e11960ed6ad7d3 v4.16.1 | |
|
54 | 5d8057df561c4b6b81b6401aed7d2f911e6e77f7 v4.16.2 |
@@ -9,6 +9,7 b' Release Notes' | |||||
9 | .. toctree:: |
|
9 | .. toctree:: | |
10 | :maxdepth: 1 |
|
10 | :maxdepth: 1 | |
11 |
|
11 | |||
|
12 | release-notes-4.16.2.rst | |||
12 | release-notes-4.16.1.rst |
|
13 | release-notes-4.16.1.rst | |
13 | release-notes-4.16.0.rst |
|
14 | release-notes-4.16.0.rst | |
14 | release-notes-4.15.2.rst |
|
15 | release-notes-4.15.2.rst |
@@ -5,7 +5,7 b'' | |||||
5 |
|
5 | |||
6 | self: super: { |
|
6 | self: super: { | |
7 | "alembic" = super.buildPythonPackage { |
|
7 | "alembic" = super.buildPythonPackage { | |
8 |
name = "alembic-1.0. |
|
8 | name = "alembic-1.0.9"; | |
9 | doCheck = false; |
|
9 | doCheck = false; | |
10 | propagatedBuildInputs = [ |
|
10 | propagatedBuildInputs = [ | |
11 | self."sqlalchemy" |
|
11 | self."sqlalchemy" | |
@@ -14,8 +14,8 b' self: super: {' | |||||
14 | self."python-dateutil" |
|
14 | self."python-dateutil" | |
15 | ]; |
|
15 | ]; | |
16 | src = fetchurl { |
|
16 | src = fetchurl { | |
17 |
url = "https://files.pythonhosted.org/packages/d |
|
17 | url = "https://files.pythonhosted.org/packages/fc/42/8729e2491fa9b8eae160d1cbb429f61712bfc2d779816488c25cfdabf7b8/alembic-1.0.9.tar.gz"; | |
18 | sha256 = "1s34i1j0dsxbflxligwhnkf37a5hvcshsv8ibkcfdjf03ph42pah"; |
|
18 | sha256 = "0a88rwp7fp0y8ykczj82ivr4ww1kiflcvb882lgfl9azm8csdfa0"; | |
19 | }; |
|
19 | }; | |
20 | meta = { |
|
20 | meta = { | |
21 | license = [ pkgs.lib.licenses.mit ]; |
|
21 | license = [ pkgs.lib.licenses.mit ]; | |
@@ -310,11 +310,11 b' self: super: {' | |||||
310 | }; |
|
310 | }; | |
311 | }; |
|
311 | }; | |
312 | "coverage" = super.buildPythonPackage { |
|
312 | "coverage" = super.buildPythonPackage { | |
313 |
name = "coverage-4.5. |
|
313 | name = "coverage-4.5.3"; | |
314 | doCheck = false; |
|
314 | doCheck = false; | |
315 | src = fetchurl { |
|
315 | src = fetchurl { | |
316 |
url = "https://files.pythonhosted.org/packages/35 |
|
316 | url = "https://files.pythonhosted.org/packages/82/70/2280b5b29a0352519bb95ab0ef1ea942d40466ca71c53a2085bdeff7b0eb/coverage-4.5.3.tar.gz"; | |
317 | sha256 = "1wbrzpxka3xd4nmmkc6q0ir343d91kymwsm8pbmwa0d2a7q4ir2n"; |
|
317 | sha256 = "02f6m073qdispn96rc616hg0rnmw1pgqzw3bgxwiwza4zf9hirlx"; | |
318 | }; |
|
318 | }; | |
319 | meta = { |
|
319 | meta = { | |
320 | license = [ pkgs.lib.licenses.asl20 ]; |
|
320 | license = [ pkgs.lib.licenses.asl20 ]; | |
@@ -1118,14 +1118,14 b' self: super: {' | |||||
1118 | }; |
|
1118 | }; | |
1119 | }; |
|
1119 | }; | |
1120 | "pexpect" = super.buildPythonPackage { |
|
1120 | "pexpect" = super.buildPythonPackage { | |
1121 |
name = "pexpect-4. |
|
1121 | name = "pexpect-4.7.0"; | |
1122 | doCheck = false; |
|
1122 | doCheck = false; | |
1123 | propagatedBuildInputs = [ |
|
1123 | propagatedBuildInputs = [ | |
1124 | self."ptyprocess" |
|
1124 | self."ptyprocess" | |
1125 | ]; |
|
1125 | ]; | |
1126 | src = fetchurl { |
|
1126 | src = fetchurl { | |
1127 |
url = "https://files.pythonhosted.org/packages/ |
|
1127 | url = "https://files.pythonhosted.org/packages/1c/b1/362a0d4235496cb42c33d1d8732b5e2c607b0129ad5fdd76f5a583b9fcb3/pexpect-4.7.0.tar.gz"; | |
1128 | sha256 = "1fla85g47iaxxpjhp9vkxdnv4pgc7rplfy6ja491smrrk0jqi3ia"; |
|
1128 | sha256 = "1sv2rri15zwhds85a4kamwh9pj49qcxv7m4miyr4jfpfwv81yb4y"; | |
1129 | }; |
|
1129 | }; | |
1130 | meta = { |
|
1130 | meta = { | |
1131 | license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ]; |
|
1131 | license = [ pkgs.lib.licenses.isc { fullName = "ISC License (ISCL)"; } ]; | |
@@ -1160,15 +1160,15 b' self: super: {' | |||||
1160 | }; |
|
1160 | }; | |
1161 | }; |
|
1161 | }; | |
1162 | "plaster-pastedeploy" = super.buildPythonPackage { |
|
1162 | "plaster-pastedeploy" = super.buildPythonPackage { | |
1163 |
name = "plaster-pastedeploy-0. |
|
1163 | name = "plaster-pastedeploy-0.7"; | |
1164 | doCheck = false; |
|
1164 | doCheck = false; | |
1165 | propagatedBuildInputs = [ |
|
1165 | propagatedBuildInputs = [ | |
1166 | self."pastedeploy" |
|
1166 | self."pastedeploy" | |
1167 | self."plaster" |
|
1167 | self."plaster" | |
1168 | ]; |
|
1168 | ]; | |
1169 | src = fetchurl { |
|
1169 | src = fetchurl { | |
1170 |
url = "https://files.pythonhosted.org/packages/ |
|
1170 | url = "https://files.pythonhosted.org/packages/99/69/2d3bc33091249266a1bd3cf24499e40ab31d54dffb4a7d76fe647950b98c/plaster_pastedeploy-0.7.tar.gz"; | |
1171 | sha256 = "1bkggk18f4z2bmsmxyxabvf62znvjwbivzh880419r3ap0616cf2"; |
|
1171 | sha256 = "1zg7gcsvc1kzay1ry5p699rg2qavfsxqwl17mqxzr0gzw6j9679r"; | |
1172 | }; |
|
1172 | }; | |
1173 | meta = { |
|
1173 | meta = { | |
1174 | license = [ pkgs.lib.licenses.mit ]; |
|
1174 | license = [ pkgs.lib.licenses.mit ]; | |
@@ -1186,15 +1186,15 b' self: super: {' | |||||
1186 | }; |
|
1186 | }; | |
1187 | }; |
|
1187 | }; | |
1188 | "prompt-toolkit" = super.buildPythonPackage { |
|
1188 | "prompt-toolkit" = super.buildPythonPackage { | |
1189 |
name = "prompt-toolkit-1.0.1 |
|
1189 | name = "prompt-toolkit-1.0.16"; | |
1190 | doCheck = false; |
|
1190 | doCheck = false; | |
1191 | propagatedBuildInputs = [ |
|
1191 | propagatedBuildInputs = [ | |
1192 | self."six" |
|
1192 | self."six" | |
1193 | self."wcwidth" |
|
1193 | self."wcwidth" | |
1194 | ]; |
|
1194 | ]; | |
1195 | src = fetchurl { |
|
1195 | src = fetchurl { | |
1196 |
url = "https://files.pythonhosted.org/packages/ |
|
1196 | url = "https://files.pythonhosted.org/packages/f1/03/bb36771dc9fa7553ac4bdc639a9ecdf6fda0ff4176faf940d97e3c16e41d/prompt_toolkit-1.0.16.tar.gz"; | |
1197 | sha256 = "05v9h5nydljwpj5nm8n804ms0glajwfy1zagrzqrg91wk3qqi1c5"; |
|
1197 | sha256 = "1d65hm6nf0cbq0q0121m60zzy4s1fpg9fn761s1yxf08dridvkn1"; | |
1198 | }; |
|
1198 | }; | |
1199 | meta = { |
|
1199 | meta = { | |
1200 | license = [ pkgs.lib.licenses.bsdOriginal ]; |
|
1200 | license = [ pkgs.lib.licenses.bsdOriginal ]; | |
@@ -1212,11 +1212,11 b' self: super: {' | |||||
1212 | }; |
|
1212 | }; | |
1213 | }; |
|
1213 | }; | |
1214 | "psycopg2" = super.buildPythonPackage { |
|
1214 | "psycopg2" = super.buildPythonPackage { | |
1215 |
name = "psycopg2-2. |
|
1215 | name = "psycopg2-2.8.2"; | |
1216 | doCheck = false; |
|
1216 | doCheck = false; | |
1217 | src = fetchurl { |
|
1217 | src = fetchurl { | |
1218 |
url = "https://files.pythonhosted.org/packages/ |
|
1218 | url = "https://files.pythonhosted.org/packages/23/7e/93c325482c328619870b6cd09370f6dbe1148283daca65115cd63642e60f/psycopg2-2.8.2.tar.gz"; | |
1219 | sha256 = "0zjbabb4qjx9dm07imhf8y5a9rpa06d5zah80myiimgdi83nslpl"; |
|
1219 | sha256 = "122mn2z3r0zgs8jyq682jjjr6vq5690qmxqf22gj6g41dwdz5b2w"; | |
1220 | }; |
|
1220 | }; | |
1221 | meta = { |
|
1221 | meta = { | |
1222 | license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ]; |
|
1222 | license = [ pkgs.lib.licenses.zpl21 { fullName = "GNU Library or Lesser General Public License (LGPL)"; } { fullName = "LGPL with exceptions or ZPL"; } ]; | |
@@ -1373,7 +1373,7 b' self: super: {' | |||||
1373 | }; |
|
1373 | }; | |
1374 | }; |
|
1374 | }; | |
1375 | "pyramid" = super.buildPythonPackage { |
|
1375 | "pyramid" = super.buildPythonPackage { | |
1376 |
name = "pyramid-1.10. |
|
1376 | name = "pyramid-1.10.4"; | |
1377 | doCheck = false; |
|
1377 | doCheck = false; | |
1378 | propagatedBuildInputs = [ |
|
1378 | propagatedBuildInputs = [ | |
1379 | self."hupper" |
|
1379 | self."hupper" | |
@@ -1388,8 +1388,8 b' self: super: {' | |||||
1388 | self."repoze.lru" |
|
1388 | self."repoze.lru" | |
1389 | ]; |
|
1389 | ]; | |
1390 | src = fetchurl { |
|
1390 | src = fetchurl { | |
1391 |
url = "https://files.pythonhosted.org/packages/bc |
|
1391 | url = "https://files.pythonhosted.org/packages/c2/43/1ae701c9c6bb3a434358e678a5e72c96e8aa55cf4cb1d2fa2041b5dd38b7/pyramid-1.10.4.tar.gz"; | |
1392 | sha256 = "0gn6sw6ml67ir150ffivc0ad5hd448p43p9z2bkyp12jh2n9n2p7"; |
|
1392 | sha256 = "0rkxs1ajycg2zh1c94xlmls56mx5m161sn8112skj0amza6cn36q"; | |
1393 | }; |
|
1393 | }; | |
1394 | meta = { |
|
1394 | meta = { | |
1395 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
1395 | license = [ { fullName = "Repoze Public License"; } { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; | |
@@ -1931,11 +1931,11 b' self: super: {' | |||||
1931 | }; |
|
1931 | }; | |
1932 | }; |
|
1932 | }; | |
1933 | "setuptools" = super.buildPythonPackage { |
|
1933 | "setuptools" = super.buildPythonPackage { | |
1934 |
name = "setuptools-40 |
|
1934 | name = "setuptools-41.0.0"; | |
1935 | doCheck = false; |
|
1935 | doCheck = false; | |
1936 | src = fetchurl { |
|
1936 | src = fetchurl { | |
1937 |
url = "https://files.pythonhosted.org/packages/ |
|
1937 | url = "https://files.pythonhosted.org/packages/ed/69/c805067de1feedbb98c53174b0f2df44cc05e0e9ee73bb85eebc59e508c6/setuptools-41.0.0.zip"; | |
1938 | sha256 = "0k9hifpgahnw2a26w3cr346iy733k6d3nwh3f7g9m13y6f8fqkkf"; |
|
1938 | sha256 = "1cfwy2g23qj3262ivj0b1182lgwz7bqqbka35rkqwypynra05lvr"; | |
1939 | }; |
|
1939 | }; | |
1940 | meta = { |
|
1940 | meta = { | |
1941 | license = [ pkgs.lib.licenses.mit ]; |
|
1941 | license = [ pkgs.lib.licenses.mit ]; | |
@@ -2012,14 +2012,14 b' self: super: {' | |||||
2012 | }; |
|
2012 | }; | |
2013 | }; |
|
2013 | }; | |
2014 | "supervisor" = super.buildPythonPackage { |
|
2014 | "supervisor" = super.buildPythonPackage { | |
2015 |
name = "supervisor- |
|
2015 | name = "supervisor-4.0.1"; | |
2016 | doCheck = false; |
|
2016 | doCheck = false; | |
2017 | propagatedBuildInputs = [ |
|
2017 | propagatedBuildInputs = [ | |
2018 | self."meld3" |
|
2018 | self."meld3" | |
2019 | ]; |
|
2019 | ]; | |
2020 | src = fetchurl { |
|
2020 | src = fetchurl { | |
2021 |
url = "https://files.pythonhosted.org/packages/ |
|
2021 | url = "https://files.pythonhosted.org/packages/96/ec/f8190beeb0c6d29a30aea10389c11d0164b6ff221931ee84093315ecde6a/supervisor-4.0.1.tar.gz"; | |
2022 | sha256 = "1w3ahridzbc6rxfpbyx8lij6pjlcgf2ymzyg53llkjqxalp6sk8v"; |
|
2022 | sha256 = "10l3z7v6v1fyv7m5zbazzxciwvli2n9a41pxi27p4kixgsfp0s1j"; | |
2023 | }; |
|
2023 | }; | |
2024 | meta = { |
|
2024 | meta = { | |
2025 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
|
2025 | license = [ { fullName = "BSD-derived (http://www.repoze.org/LICENSE.txt)"; } ]; |
@@ -47,7 +47,7 b' pyparsing==2.3.0' | |||||
47 | pyramid-beaker==0.8 |
|
47 | pyramid-beaker==0.8 | |
48 | pyramid-debugtoolbar==4.5.0 |
|
48 | pyramid-debugtoolbar==4.5.0 | |
49 | pyramid-mako==1.0.2 |
|
49 | pyramid-mako==1.0.2 | |
50 |
pyramid==1.10. |
|
50 | pyramid==1.10.4 | |
51 | pyramid_mailer==0.15.1 |
|
51 | pyramid_mailer==0.15.1 | |
52 | python-dateutil |
|
52 | python-dateutil | |
53 | python-ldap==3.1.0 |
|
53 | python-ldap==3.1.0 | |
@@ -67,7 +67,7 b' six==1.11.0' | |||||
67 | sqlalchemy==1.1.18 |
|
67 | sqlalchemy==1.1.18 | |
68 | sshpubkeys==3.1.0 |
|
68 | sshpubkeys==3.1.0 | |
69 | subprocess32==3.5.3 |
|
69 | subprocess32==3.5.3 | |
70 |
supervisor== |
|
70 | supervisor==4.0.1 | |
71 | translationstring==1.3 |
|
71 | translationstring==1.3 | |
72 | urllib3==1.24.1 |
|
72 | urllib3==1.24.1 | |
73 | urlobject==2.4.3 |
|
73 | urlobject==2.4.3 | |
@@ -87,7 +87,7 b' zope.interface==4.6.0' | |||||
87 | mysql-python==1.2.5 |
|
87 | mysql-python==1.2.5 | |
88 | pymysql==0.8.1 |
|
88 | pymysql==0.8.1 | |
89 | pysqlite==2.8.3 |
|
89 | pysqlite==2.8.3 | |
90 |
psycopg2==2. |
|
90 | psycopg2==2.8.2 | |
91 |
|
91 | |||
92 | # IPYTHON RENDERING |
|
92 | # IPYTHON RENDERING | |
93 | # entrypoints backport, pypi version doesn't support egg installs |
|
93 | # entrypoints backport, pypi version doesn't support egg installs | |
@@ -97,7 +97,7 b' nbformat==4.4.0' | |||||
97 | jupyter_client==5.0.0 |
|
97 | jupyter_client==5.0.0 | |
98 |
|
98 | |||
99 | ## cli tools |
|
99 | ## cli tools | |
100 |
alembic==1.0. |
|
100 | alembic==1.0.9 | |
101 | invoke==0.13.0 |
|
101 | invoke==0.13.0 | |
102 | bumpversion==0.5.3 |
|
102 | bumpversion==0.5.3 | |
103 |
|
103 |
@@ -10,7 +10,7 b' gprof2dot==2017.9.19' | |||||
10 |
|
10 | |||
11 | mock==1.0.1 |
|
11 | mock==1.0.1 | |
12 | cov-core==1.15.0 |
|
12 | cov-core==1.15.0 | |
13 |
coverage==4.5. |
|
13 | coverage==4.5.3 | |
14 |
|
14 | |||
15 | webtest==2.0.33 |
|
15 | webtest==2.0.33 | |
16 | beautifulsoup4==4.6.3 |
|
16 | beautifulsoup4==4.6.3 |
@@ -45,7 +45,7 b' PYRAMID_SETTINGS = {}' | |||||
45 | EXTENSIONS = {} |
|
45 | EXTENSIONS = {} | |
46 |
|
46 | |||
47 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) |
|
47 | __version__ = ('.'.join((str(each) for each in VERSION[:3]))) | |
48 |
__dbversion__ = 9 |
|
48 | __dbversion__ = 97 # defines current db version for migrations | |
49 | __platform__ = platform.system() |
|
49 | __platform__ = platform.system() | |
50 | __license__ = 'AGPLv3, and Commercial License' |
|
50 | __license__ = 'AGPLv3, and Commercial License' | |
51 | __author__ = 'RhodeCode GmbH' |
|
51 | __author__ = 'RhodeCode GmbH' |
@@ -424,6 +424,10 b' def admin_routes(config):' | |||||
424 | pattern='/repo_groups') |
|
424 | pattern='/repo_groups') | |
425 |
|
425 | |||
426 | config.add_route( |
|
426 | config.add_route( | |
|
427 | name='repo_groups_data', | |||
|
428 | pattern='/repo_groups_data') | |||
|
429 | ||||
|
430 | config.add_route( | |||
427 | name='repo_group_new', |
|
431 | name='repo_group_new', | |
428 | pattern='/repo_group/new') |
|
432 | pattern='/repo_group/new') | |
429 |
|
433 |
@@ -23,11 +23,11 b' import pytest' | |||||
23 |
|
23 | |||
24 | from rhodecode.apps._base import ADMIN_PREFIX |
|
24 | from rhodecode.apps._base import ADMIN_PREFIX | |
25 | from rhodecode.lib import helpers as h |
|
25 | from rhodecode.lib import helpers as h | |
26 | from rhodecode.model.db import Repository, UserRepoToPerm, User |
|
26 | from rhodecode.model.db import Repository, UserRepoToPerm, User, RepoGroup | |
27 | from rhodecode.model.meta import Session |
|
27 | from rhodecode.model.meta import Session | |
28 | from rhodecode.model.repo_group import RepoGroupModel |
|
28 | from rhodecode.model.repo_group import RepoGroupModel | |
29 | from rhodecode.tests import ( |
|
29 | from rhodecode.tests import ( | |
30 |
assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH |
|
30 | assert_session_flash, TEST_USER_REGULAR_LOGIN, TESTS_TMP_PATH) | |
31 | from rhodecode.tests.fixture import Fixture |
|
31 | from rhodecode.tests.fixture import Fixture | |
32 |
|
32 | |||
33 | fixture = Fixture() |
|
33 | fixture = Fixture() | |
@@ -38,6 +38,7 b' def route_path(name, params=None, **kwar' | |||||
38 |
|
38 | |||
39 | base_url = { |
|
39 | base_url = { | |
40 | 'repo_groups': ADMIN_PREFIX + '/repo_groups', |
|
40 | 'repo_groups': ADMIN_PREFIX + '/repo_groups', | |
|
41 | 'repo_groups_data': ADMIN_PREFIX + '/repo_groups_data', | |||
41 | 'repo_group_new': ADMIN_PREFIX + '/repo_group/new', |
|
42 | 'repo_group_new': ADMIN_PREFIX + '/repo_group/new', | |
42 | 'repo_group_create': ADMIN_PREFIX + '/repo_group/create', |
|
43 | 'repo_group_create': ADMIN_PREFIX + '/repo_group/create', | |
43 |
|
44 | |||
@@ -59,13 +60,30 b' def _get_permission_for_user(user, repo)' | |||||
59 |
|
60 | |||
60 | @pytest.mark.usefixtures("app") |
|
61 | @pytest.mark.usefixtures("app") | |
61 | class TestAdminRepositoryGroups(object): |
|
62 | class TestAdminRepositoryGroups(object): | |
|
63 | ||||
62 | def test_show_repo_groups(self, autologin_user): |
|
64 | def test_show_repo_groups(self, autologin_user): | |
63 |
|
|
65 | self.app.get(route_path('repo_groups')) | |
64 | response.mustcontain('data: []') |
|
66 | ||
|
67 | def test_show_repo_groups_data(self, autologin_user, xhr_header): | |||
|
68 | response = self.app.get(route_path( | |||
|
69 | 'repo_groups_data'), extra_environ=xhr_header) | |||
|
70 | ||||
|
71 | all_repo_groups = RepoGroup.query().count() | |||
|
72 | assert response.json['recordsTotal'] == all_repo_groups | |||
65 |
|
73 | |||
66 |
def test_show_repo_groups_ |
|
74 | def test_show_repo_groups_data_filtered(self, autologin_user, xhr_header): | |
|
75 | response = self.app.get(route_path( | |||
|
76 | 'repo_groups_data', params={'search[value]': 'empty_search'}), | |||
|
77 | extra_environ=xhr_header) | |||
|
78 | ||||
|
79 | all_repo_groups = RepoGroup.query().count() | |||
|
80 | assert response.json['recordsTotal'] == all_repo_groups | |||
|
81 | assert response.json['recordsFiltered'] == 0 | |||
|
82 | ||||
|
83 | def test_show_repo_groups_after_creating_group(self, autologin_user, xhr_header): | |||
67 | fixture.create_repo_group('test_repo_group') |
|
84 | fixture.create_repo_group('test_repo_group') | |
68 |
response = self.app.get(route_path( |
|
85 | response = self.app.get(route_path( | |
|
86 | 'repo_groups_data'), extra_environ=xhr_header) | |||
69 | response.mustcontain('"name_raw": "test_repo_group"') |
|
87 | response.mustcontain('"name_raw": "test_repo_group"') | |
70 | fixture.destroy_repo_group('test_repo_group') |
|
88 | fixture.destroy_repo_group('test_repo_group') | |
71 |
|
89 |
@@ -17,7 +17,7 b'' | |||||
17 | # This program is dual-licensed. If you wish to learn more about the |
|
17 | # This program is dual-licensed. If you wish to learn more about the | |
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | import datetime | ||
21 | import logging |
|
21 | import logging | |
22 | import formencode |
|
22 | import formencode | |
23 | import formencode.htmlfill |
|
23 | import formencode.htmlfill | |
@@ -30,16 +30,16 b' from pyramid.response import Response' | |||||
30 | from rhodecode import events |
|
30 | from rhodecode import events | |
31 | from rhodecode.apps._base import BaseAppView, DataGridAppView |
|
31 | from rhodecode.apps._base import BaseAppView, DataGridAppView | |
32 |
|
32 | |||
33 | from rhodecode.lib.ext_json import json |
|
|||
34 | from rhodecode.lib.auth import ( |
|
33 | from rhodecode.lib.auth import ( | |
35 | LoginRequired, CSRFRequired, NotAnonymous, |
|
34 | LoginRequired, CSRFRequired, NotAnonymous, | |
36 | HasPermissionAny, HasRepoGroupPermissionAny) |
|
35 | HasPermissionAny, HasRepoGroupPermissionAny) | |
37 | from rhodecode.lib import helpers as h, audit_logger |
|
36 | from rhodecode.lib import helpers as h, audit_logger | |
38 | from rhodecode.lib.utils2 import safe_int, safe_unicode |
|
37 | from rhodecode.lib.utils2 import safe_int, safe_unicode, datetime_to_time | |
39 | from rhodecode.model.forms import RepoGroupForm |
|
38 | from rhodecode.model.forms import RepoGroupForm | |
40 | from rhodecode.model.repo_group import RepoGroupModel |
|
39 | from rhodecode.model.repo_group import RepoGroupModel | |
41 | from rhodecode.model.scm import RepoGroupList |
|
40 | from rhodecode.model.scm import RepoGroupList | |
42 |
from rhodecode.model.db import |
|
41 | from rhodecode.model.db import ( | |
|
42 | or_, count, func, in_filter_generator, Session, RepoGroup, User, Repository) | |||
43 |
|
43 | |||
44 | log = logging.getLogger(__name__) |
|
44 | log = logging.getLogger(__name__) | |
45 |
|
45 | |||
@@ -88,22 +88,168 b' class AdminRepoGroupsView(BaseAppView, D' | |||||
88 | return False |
|
88 | return False | |
89 | return False |
|
89 | return False | |
90 |
|
90 | |||
|
91 | # permission check in data loading of | |||
|
92 | # `repo_group_list_data` via RepoGroupList | |||
91 | @LoginRequired() |
|
93 | @LoginRequired() | |
92 | @NotAnonymous() |
|
94 | @NotAnonymous() | |
93 | # perms check inside |
|
|||
94 | @view_config( |
|
95 | @view_config( | |
95 | route_name='repo_groups', request_method='GET', |
|
96 | route_name='repo_groups', request_method='GET', | |
96 | renderer='rhodecode:templates/admin/repo_groups/repo_groups.mako') |
|
97 | renderer='rhodecode:templates/admin/repo_groups/repo_groups.mako') | |
97 | def repo_group_list(self): |
|
98 | def repo_group_list(self): | |
98 | c = self.load_default_context() |
|
99 | c = self.load_default_context() | |
|
100 | return self._get_template_context(c) | |||
99 |
|
101 | |||
100 | repo_group_list = RepoGroup.get_all_repo_groups() |
|
102 | # permission check inside | |
101 | repo_group_list_acl = RepoGroupList( |
|
103 | @LoginRequired() | |
102 | repo_group_list, perm_set=['group.admin']) |
|
104 | @NotAnonymous() | |
103 | repo_group_data = RepoGroupModel().get_repo_groups_as_dict( |
|
105 | @view_config( | |
104 | repo_group_list=repo_group_list_acl, admin=True) |
|
106 | route_name='repo_groups_data', request_method='GET', | |
105 | c.data = json.dumps(repo_group_data) |
|
107 | renderer='json_ext', xhr=True) | |
106 | return self._get_template_context(c) |
|
108 | def repo_group_list_data(self): | |
|
109 | self.load_default_context() | |||
|
110 | column_map = { | |||
|
111 | 'name_raw': 'group_name_hash', | |||
|
112 | 'desc': 'group_description', | |||
|
113 | 'last_change_raw': 'updated_on', | |||
|
114 | 'top_level_repos': 'repos_total', | |||
|
115 | 'owner': 'user_username', | |||
|
116 | } | |||
|
117 | draw, start, limit = self._extract_chunk(self.request) | |||
|
118 | search_q, order_by, order_dir = self._extract_ordering( | |||
|
119 | self.request, column_map=column_map) | |||
|
120 | ||||
|
121 | _render = self.request.get_partial_renderer( | |||
|
122 | 'rhodecode:templates/data_table/_dt_elements.mako') | |||
|
123 | c = _render.get_call_context() | |||
|
124 | ||||
|
125 | def quick_menu(repo_group_name): | |||
|
126 | return _render('quick_repo_group_menu', repo_group_name) | |||
|
127 | ||||
|
128 | def repo_group_lnk(repo_group_name): | |||
|
129 | return _render('repo_group_name', repo_group_name) | |||
|
130 | ||||
|
131 | def last_change(last_change): | |||
|
132 | if isinstance(last_change, datetime.datetime) and not last_change.tzinfo: | |||
|
133 | delta = datetime.timedelta( | |||
|
134 | seconds=(datetime.datetime.now() - datetime.datetime.utcnow()).seconds) | |||
|
135 | last_change = last_change + delta | |||
|
136 | return _render("last_change", last_change) | |||
|
137 | ||||
|
138 | def desc(desc, personal): | |||
|
139 | return _render( | |||
|
140 | 'repo_group_desc', desc, personal, c.visual.stylify_metatags) | |||
|
141 | ||||
|
142 | def repo_group_actions(repo_group_id, repo_group_name, gr_count): | |||
|
143 | return _render( | |||
|
144 | 'repo_group_actions', repo_group_id, repo_group_name, gr_count) | |||
|
145 | ||||
|
146 | def user_profile(username): | |||
|
147 | return _render('user_profile', username) | |||
|
148 | ||||
|
149 | auth_repo_group_list = RepoGroupList( | |||
|
150 | RepoGroup.query().all(), perm_set=['group.admin']) | |||
|
151 | ||||
|
152 | allowed_ids = [-1] | |||
|
153 | for repo_group in auth_repo_group_list: | |||
|
154 | allowed_ids.append(repo_group.group_id) | |||
|
155 | ||||
|
156 | repo_groups_data_total_count = RepoGroup.query()\ | |||
|
157 | .filter(or_( | |||
|
158 | # generate multiple IN to fix limitation problems | |||
|
159 | *in_filter_generator(RepoGroup.group_id, allowed_ids) | |||
|
160 | )) \ | |||
|
161 | .count() | |||
|
162 | ||||
|
163 | repo_groups_data_total_inactive_count = RepoGroup.query()\ | |||
|
164 | .filter(RepoGroup.group_id.in_(allowed_ids))\ | |||
|
165 | .count() | |||
|
166 | ||||
|
167 | repo_count = count(Repository.repo_id) | |||
|
168 | base_q = Session.query( | |||
|
169 | RepoGroup.group_name, | |||
|
170 | RepoGroup.group_name_hash, | |||
|
171 | RepoGroup.group_description, | |||
|
172 | RepoGroup.group_id, | |||
|
173 | RepoGroup.personal, | |||
|
174 | RepoGroup.updated_on, | |||
|
175 | User, | |||
|
176 | repo_count.label('repos_count') | |||
|
177 | ) \ | |||
|
178 | .filter(or_( | |||
|
179 | # generate multiple IN to fix limitation problems | |||
|
180 | *in_filter_generator(RepoGroup.group_id, allowed_ids) | |||
|
181 | )) \ | |||
|
182 | .outerjoin(Repository) \ | |||
|
183 | .join(User, User.user_id == RepoGroup.user_id) \ | |||
|
184 | .group_by(RepoGroup, User) | |||
|
185 | ||||
|
186 | if search_q: | |||
|
187 | like_expression = u'%{}%'.format(safe_unicode(search_q)) | |||
|
188 | base_q = base_q.filter(or_( | |||
|
189 | RepoGroup.group_name.ilike(like_expression), | |||
|
190 | )) | |||
|
191 | ||||
|
192 | repo_groups_data_total_filtered_count = base_q.count() | |||
|
193 | # the inactive isn't really used, but we still make it same as other data grids | |||
|
194 | # which use inactive (users,user groups) | |||
|
195 | repo_groups_data_total_filtered_inactive_count = repo_groups_data_total_filtered_count | |||
|
196 | ||||
|
197 | sort_defined = False | |||
|
198 | if order_by == 'group_name': | |||
|
199 | sort_col = func.lower(RepoGroup.group_name) | |||
|
200 | sort_defined = True | |||
|
201 | elif order_by == 'repos_total': | |||
|
202 | sort_col = repo_count | |||
|
203 | sort_defined = True | |||
|
204 | elif order_by == 'user_username': | |||
|
205 | sort_col = User.username | |||
|
206 | else: | |||
|
207 | sort_col = getattr(RepoGroup, order_by, None) | |||
|
208 | ||||
|
209 | if sort_defined or sort_col: | |||
|
210 | if order_dir == 'asc': | |||
|
211 | sort_col = sort_col.asc() | |||
|
212 | else: | |||
|
213 | sort_col = sort_col.desc() | |||
|
214 | ||||
|
215 | base_q = base_q.order_by(sort_col) | |||
|
216 | base_q = base_q.offset(start).limit(limit) | |||
|
217 | ||||
|
218 | # authenticated access to user groups | |||
|
219 | auth_repo_group_list = base_q.all() | |||
|
220 | ||||
|
221 | repo_groups_data = [] | |||
|
222 | for repo_gr in auth_repo_group_list: | |||
|
223 | row = { | |||
|
224 | "menu": quick_menu(repo_gr.group_name), | |||
|
225 | "name": repo_group_lnk(repo_gr.group_name), | |||
|
226 | "name_raw": repo_gr.group_name, | |||
|
227 | "last_change": last_change(repo_gr.updated_on), | |||
|
228 | "last_change_raw": datetime_to_time(repo_gr.updated_on), | |||
|
229 | ||||
|
230 | "last_changeset": "", | |||
|
231 | "last_changeset_raw": "", | |||
|
232 | ||||
|
233 | "desc": desc(repo_gr.group_description, repo_gr.personal), | |||
|
234 | "owner": user_profile(repo_gr.User.username), | |||
|
235 | "top_level_repos": repo_gr.repos_count, | |||
|
236 | "action": repo_group_actions( | |||
|
237 | repo_gr.group_id, repo_gr.group_name, repo_gr.repos_count), | |||
|
238 | ||||
|
239 | } | |||
|
240 | ||||
|
241 | repo_groups_data.append(row) | |||
|
242 | ||||
|
243 | data = ({ | |||
|
244 | 'draw': draw, | |||
|
245 | 'data': repo_groups_data, | |||
|
246 | 'recordsTotal': repo_groups_data_total_count, | |||
|
247 | 'recordsTotalInactive': repo_groups_data_total_inactive_count, | |||
|
248 | 'recordsFiltered': repo_groups_data_total_filtered_count, | |||
|
249 | 'recordsFilteredInactive': repo_groups_data_total_filtered_inactive_count, | |||
|
250 | }) | |||
|
251 | ||||
|
252 | return data | |||
107 |
|
253 | |||
108 | @LoginRequired() |
|
254 | @LoginRequired() | |
109 | @NotAnonymous() |
|
255 | @NotAnonymous() |
@@ -39,7 +39,7 b' from rhodecode.model.forms import UserGr' | |||||
39 | from rhodecode.model.permission import PermissionModel |
|
39 | from rhodecode.model.permission import PermissionModel | |
40 | from rhodecode.model.scm import UserGroupList |
|
40 | from rhodecode.model.scm import UserGroupList | |
41 | from rhodecode.model.db import ( |
|
41 | from rhodecode.model.db import ( | |
42 | or_, count, User, UserGroup, UserGroupMember) |
|
42 | or_, count, User, UserGroup, UserGroupMember, in_filter_generator) | |
43 | from rhodecode.model.meta import Session |
|
43 | from rhodecode.model.meta import Session | |
44 | from rhodecode.model.user_group import UserGroupModel |
|
44 | from rhodecode.model.user_group import UserGroupModel | |
45 | from rhodecode.model.db import true |
|
45 | from rhodecode.model.db import true | |
@@ -107,11 +107,17 b' class AdminUserGroupsView(BaseAppView, D' | |||||
107 | allowed_ids.append(user_group.users_group_id) |
|
107 | allowed_ids.append(user_group.users_group_id) | |
108 |
|
108 | |||
109 | user_groups_data_total_count = UserGroup.query()\ |
|
109 | user_groups_data_total_count = UserGroup.query()\ | |
110 | .filter(UserGroup.users_group_id.in_(allowed_ids))\ |
|
110 | .filter(or_( | |
|
111 | # generate multiple IN to fix limitation problems | |||
|
112 | *in_filter_generator(UserGroup.users_group_id, allowed_ids) | |||
|
113 | ))\ | |||
111 | .count() |
|
114 | .count() | |
112 |
|
115 | |||
113 | user_groups_data_total_inactive_count = UserGroup.query()\ |
|
116 | user_groups_data_total_inactive_count = UserGroup.query()\ | |
114 | .filter(UserGroup.users_group_id.in_(allowed_ids))\ |
|
117 | .filter(or_( | |
|
118 | # generate multiple IN to fix limitation problems | |||
|
119 | *in_filter_generator(UserGroup.users_group_id, allowed_ids) | |||
|
120 | ))\ | |||
115 | .filter(UserGroup.users_group_active != true()).count() |
|
121 | .filter(UserGroup.users_group_active != true()).count() | |
116 |
|
122 | |||
117 | member_count = count(UserGroupMember.user_id) |
|
123 | member_count = count(UserGroupMember.user_id) | |
@@ -123,11 +129,14 b' class AdminUserGroupsView(BaseAppView, D' | |||||
123 | UserGroup.group_data, |
|
129 | UserGroup.group_data, | |
124 | User, |
|
130 | User, | |
125 | member_count.label('member_count') |
|
131 | member_count.label('member_count') | |
126 | ) \ |
|
132 | ) \ | |
127 | .filter(UserGroup.users_group_id.in_(allowed_ids)) \ |
|
133 | .filter(or_( | |
128 | .outerjoin(UserGroupMember) \ |
|
134 | # generate multiple IN to fix limitation problems | |
129 | .join(User, User.user_id == UserGroup.user_id) \ |
|
135 | *in_filter_generator(UserGroup.users_group_id, allowed_ids) | |
130 | .group_by(UserGroup, User) |
|
136 | )) \ | |
|
137 | .outerjoin(UserGroupMember) \ | |||
|
138 | .join(User, User.user_id == UserGroup.user_id) \ | |||
|
139 | .group_by(UserGroup, User) | |||
131 |
|
140 | |||
132 | base_q_inactive = base_q.filter(UserGroup.users_group_active != true()) |
|
141 | base_q_inactive = base_q.filter(UserGroup.users_group_active != true()) | |
133 |
|
142 | |||
@@ -141,14 +150,16 b' class AdminUserGroupsView(BaseAppView, D' | |||||
141 | user_groups_data_total_filtered_count = base_q.count() |
|
150 | user_groups_data_total_filtered_count = base_q.count() | |
142 | user_groups_data_total_filtered_inactive_count = base_q_inactive.count() |
|
151 | user_groups_data_total_filtered_inactive_count = base_q_inactive.count() | |
143 |
|
152 | |||
|
153 | sort_defined = False | |||
144 | if order_by == 'members_total': |
|
154 | if order_by == 'members_total': | |
145 | sort_col = member_count |
|
155 | sort_col = member_count | |
|
156 | sort_defined = True | |||
146 | elif order_by == 'user_username': |
|
157 | elif order_by == 'user_username': | |
147 | sort_col = User.username |
|
158 | sort_col = User.username | |
148 | else: |
|
159 | else: | |
149 | sort_col = getattr(UserGroup, order_by, None) |
|
160 | sort_col = getattr(UserGroup, order_by, None) | |
150 |
|
161 | |||
151 |
if |
|
162 | if sort_defined or sort_col: | |
152 | if order_dir == 'asc': |
|
163 | if order_dir == 'asc': | |
153 | sort_col = sort_col.asc() |
|
164 | sort_col = sort_col.asc() | |
154 | else: |
|
165 | else: | |
@@ -162,7 +173,7 b' class AdminUserGroupsView(BaseAppView, D' | |||||
162 |
|
173 | |||
163 | user_groups_data = [] |
|
174 | user_groups_data = [] | |
164 | for user_gr in auth_user_group_list: |
|
175 | for user_gr in auth_user_group_list: | |
165 | user_groups_data.append({ |
|
176 | row = { | |
166 | "users_group_name": user_group_name(user_gr.users_group_name), |
|
177 | "users_group_name": user_group_name(user_gr.users_group_name), | |
167 | "name_raw": h.escape(user_gr.users_group_name), |
|
178 | "name_raw": h.escape(user_gr.users_group_name), | |
168 | "description": h.escape(user_gr.user_group_description), |
|
179 | "description": h.escape(user_gr.user_group_description), | |
@@ -175,7 +186,8 b' class AdminUserGroupsView(BaseAppView, D' | |||||
175 | "owner": user_profile(user_gr.User.username), |
|
186 | "owner": user_profile(user_gr.User.username), | |
176 | "action": user_group_actions( |
|
187 | "action": user_group_actions( | |
177 | user_gr.users_group_id, user_gr.users_group_name) |
|
188 | user_gr.users_group_id, user_gr.users_group_name) | |
178 |
} |
|
189 | } | |
|
190 | user_groups_data.append(row) | |||
179 |
|
191 | |||
180 | data = ({ |
|
192 | data = ({ | |
181 | 'draw': draw, |
|
193 | 'draw': draw, |
@@ -67,12 +67,6 b' class RepoSettingsView(RepoAppView):' | |||||
67 | .filter(UserFollowing.user_id == c.default_user_id) \ |
|
67 | .filter(UserFollowing.user_id == c.default_user_id) \ | |
68 | .filter(UserFollowing.follows_repository == self.db_repo).scalar() |
|
68 | .filter(UserFollowing.follows_repository == self.db_repo).scalar() | |
69 |
|
69 | |||
70 | c.has_origin_repo_read_perm = False |
|
|||
71 | if self.db_repo.fork: |
|
|||
72 | c.has_origin_repo_read_perm = h.HasRepoPermissionAny( |
|
|||
73 | 'repository.write', 'repository.read', 'repository.admin')( |
|
|||
74 | self.db_repo.fork.repo_name, 'repo set as fork page') |
|
|||
75 |
|
||||
76 | c.ver_info_dict = self.rhodecode_vcs_repo.get_hooks_info() |
|
70 | c.ver_info_dict = self.rhodecode_vcs_repo.get_hooks_info() | |
77 |
|
71 | |||
78 | return self._get_template_context(c) |
|
72 | return self._get_template_context(c) |
@@ -20,12 +20,13 b'' | |||||
20 |
|
20 | |||
21 | import os |
|
21 | import os | |
22 | import sys |
|
22 | import sys | |
23 | import shutil |
|
|||
24 | import logging |
|
23 | import logging | |
25 | import tempfile |
|
24 | import tempfile | |
26 | import textwrap |
|
25 | import textwrap | |
27 |
|
26 | import collections | ||
28 | from .base import VcsServer |
|
27 | from .base import VcsServer | |
|
28 | from rhodecode.model.db import RhodeCodeUi | |||
|
29 | from rhodecode.model.settings import VcsSettingsModel | |||
29 |
|
30 | |||
30 | log = logging.getLogger(__name__) |
|
31 | log = logging.getLogger(__name__) | |
31 |
|
32 | |||
@@ -37,62 +38,46 b' class MercurialTunnelWrapper(object):' | |||||
37 | self.server = server |
|
38 | self.server = server | |
38 | self.stdin = sys.stdin |
|
39 | self.stdin = sys.stdin | |
39 | self.stdout = sys.stdout |
|
40 | self.stdout = sys.stdout | |
40 |
self. |
|
41 | self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp(prefix='hgrc_rhodecode_') | |
41 | self.hooks_env_fd, self.hooks_env_path = tempfile.mkstemp() |
|
|||
42 |
|
42 | |||
43 | def create_hooks_env(self): |
|
43 | def create_hooks_env(self): | |
|
44 | repo_name = self.server.repo_name | |||
|
45 | hg_flags = self.config_to_hgrc(repo_name) | |||
44 |
|
46 | |||
45 | content = textwrap.dedent( |
|
47 | content = textwrap.dedent( | |
46 | ''' |
|
48 | ''' | |
47 |
# SSH hooks version= |
|
49 | # RhodeCode SSH hooks version=2.0.0 | |
48 |
|
|
50 | {custom} | |
49 | pretxnchangegroup.ssh_auth=python:vcsserver.hooks.pre_push_ssh_auth |
|
51 | ''' | |
50 | pretxnchangegroup.ssh=python:vcsserver.hooks.pre_push_ssh |
|
52 | ).format(custom='\n'.join(hg_flags)) | |
51 | changegroup.ssh=python:vcsserver.hooks.post_push_ssh |
|
|||
52 |
|
||||
53 | preoutgoing.ssh=python:vcsserver.hooks.pre_pull_ssh |
|
|||
54 | outgoing.ssh=python:vcsserver.hooks.post_pull_ssh |
|
|||
55 |
|
53 | |||
56 | ''' |
|
54 | root = self.server.get_root_store() | |
57 | ) |
|
55 | hgrc_custom = os.path.join(root, repo_name, '.hg', 'hgrc_rhodecode') | |
|
56 | hgrc_main = os.path.join(root, repo_name, '.hg', 'hgrc') | |||
58 |
|
57 | |||
|
58 | # cleanup custom hgrc file | |||
|
59 | if os.path.isfile(hgrc_custom): | |||
|
60 | with open(hgrc_custom, 'wb') as f: | |||
|
61 | f.write('') | |||
|
62 | log.debug('Cleanup custom hgrc file under %s', hgrc_custom) | |||
|
63 | ||||
|
64 | # write temp | |||
59 | with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file: |
|
65 | with os.fdopen(self.hooks_env_fd, 'w') as hooks_env_file: | |
60 | hooks_env_file.write(content) |
|
66 | hooks_env_file.write(content) | |
61 | root = self.server.get_root_store() |
|
|||
62 |
|
67 | |||
63 | hgrc_custom = os.path.join( |
|
68 | return self.hooks_env_path | |
64 | root, self.server.repo_name, '.hg', 'hgrc_rhodecode') |
|
|||
65 | log.debug('Wrote custom hgrc file under %s', hgrc_custom) |
|
|||
66 | shutil.move( |
|
|||
67 | self.hooks_env_path, hgrc_custom) |
|
|||
68 |
|
||||
69 | hgrc_main = os.path.join( |
|
|||
70 | root, self.server.repo_name, '.hg', 'hgrc') |
|
|||
71 | include_marker = '%include hgrc_rhodecode' |
|
|||
72 |
|
69 | |||
73 | if not os.path.isfile(hgrc_main): |
|
70 | def remove_configs(self): | |
74 | os.mknod(hgrc_main) |
|
71 | os.remove(self.hooks_env_path) | |
75 |
|
||||
76 | with open(hgrc_main, 'rb') as f: |
|
|||
77 | data = f.read() |
|
|||
78 | has_marker = include_marker in data |
|
|||
79 |
|
72 | |||
80 | if not has_marker: |
|
73 | def command(self, hgrc_path): | |
81 | log.debug('Adding include marker for hooks') |
|
|||
82 | with open(hgrc_main, 'wa') as f: |
|
|||
83 | f.write(textwrap.dedent(''' |
|
|||
84 | # added by RhodeCode |
|
|||
85 | {} |
|
|||
86 | '''.format(include_marker))) |
|
|||
87 |
|
||||
88 | def command(self): |
|
|||
89 | root = self.server.get_root_store() |
|
74 | root = self.server.get_root_store() | |
90 |
|
75 | |||
91 | command = ( |
|
76 | command = ( | |
92 | "cd {root}; {hg_path} -R {root}{repo_name} " |
|
77 | "cd {root}; HGRCPATH={hgrc} {hg_path} -R {root}{repo_name} " | |
93 | "serve --stdio".format( |
|
78 | "serve --stdio".format( | |
94 | root=root, hg_path=self.server.hg_path, |
|
79 | root=root, hg_path=self.server.hg_path, | |
95 | repo_name=self.server.repo_name)) |
|
80 | repo_name=self.server.repo_name, hgrc=hgrc_path)) | |
96 | log.debug("Final CMD: %s", command) |
|
81 | log.debug("Final CMD: %s", command) | |
97 | return command |
|
82 | return command | |
98 |
|
83 | |||
@@ -102,22 +87,61 b' class MercurialTunnelWrapper(object):' | |||||
102 | action = '?' |
|
87 | action = '?' | |
103 | # permissions are check via `pre_push_ssh_auth` hook |
|
88 | # permissions are check via `pre_push_ssh_auth` hook | |
104 | self.server.update_environment(action=action, extras=extras) |
|
89 | self.server.update_environment(action=action, extras=extras) | |
105 | self.create_hooks_env() |
|
90 | custom_hgrc_file = self.create_hooks_env() | |
106 | return os.system(self.command()) |
|
91 | ||
|
92 | try: | |||
|
93 | return os.system(self.command(custom_hgrc_file)) | |||
|
94 | finally: | |||
|
95 | self.remove_configs() | |||
107 |
|
96 | |||
108 |
|
97 | |||
109 | class MercurialServer(VcsServer): |
|
98 | class MercurialServer(VcsServer): | |
110 | backend = 'hg' |
|
99 | backend = 'hg' | |
|
100 | cli_flags = ['phases', 'largefiles', 'extensions', 'experimental', 'hooks'] | |||
111 |
|
101 | |||
112 | def __init__(self, store, ini_path, repo_name, |
|
102 | def __init__(self, store, ini_path, repo_name, user, user_permissions, config, env): | |
113 |
|
|
103 | super(MercurialServer, self).__init__(user, user_permissions, config, env) | |
114 | super(MercurialServer, self).\ |
|
|||
115 | __init__(user, user_permissions, config, env) |
|
|||
116 |
|
104 | |||
117 | self.store = store |
|
105 | self.store = store | |
118 | self.ini_path = ini_path |
|
106 | self.ini_path = ini_path | |
119 | self.repo_name = repo_name |
|
107 | self.repo_name = repo_name | |
120 | self._path = self.hg_path = config.get( |
|
108 | self._path = self.hg_path = config.get('app:main', 'ssh.executable.hg') | |
121 | 'app:main', 'ssh.executable.hg') |
|
109 | self.tunnel = MercurialTunnelWrapper(server=self) | |
|
110 | ||||
|
111 | def config_to_hgrc(self, repo_name): | |||
|
112 | ui_sections = collections.defaultdict(list) | |||
|
113 | ui = VcsSettingsModel(repo=repo_name).get_ui_settings(section=None, key=None) | |||
|
114 | ||||
|
115 | # write default hooks | |||
|
116 | default_hooks = [ | |||
|
117 | ('pretxnchangegroup.ssh_auth', 'python:vcsserver.hooks.pre_push_ssh_auth'), | |||
|
118 | ('pretxnchangegroup.ssh', 'python:vcsserver.hooks.pre_push_ssh'), | |||
|
119 | ('changegroup.ssh', 'python:vcsserver.hooks.post_push_ssh'), | |||
|
120 | ||||
|
121 | ('preoutgoing.ssh', 'python:vcsserver.hooks.pre_pull_ssh'), | |||
|
122 | ('outgoing.ssh', 'python:vcsserver.hooks.post_pull_ssh'), | |||
|
123 | ] | |||
|
124 | ||||
|
125 | for k, v in default_hooks: | |||
|
126 | ui_sections['hooks'].append((k, v)) | |||
122 |
|
127 | |||
123 | self.tunnel = MercurialTunnelWrapper(server=self) |
|
128 | for entry in ui: | |
|
129 | if not entry.active: | |||
|
130 | continue | |||
|
131 | sec = entry.section | |||
|
132 | key = entry.key | |||
|
133 | ||||
|
134 | if sec in self.cli_flags: | |||
|
135 | # we want only custom hooks, so we skip builtins | |||
|
136 | if sec == 'hooks' and key in RhodeCodeUi.HOOKS_BUILTIN: | |||
|
137 | continue | |||
|
138 | ||||
|
139 | ui_sections[sec].append([key, entry.value]) | |||
|
140 | ||||
|
141 | flags = [] | |||
|
142 | for _sec, key_val in ui_sections.items(): | |||
|
143 | flags.append(' ') | |||
|
144 | flags.append('[{}]'.format(_sec)) | |||
|
145 | for key, val in key_val: | |||
|
146 | flags.append('{}= {}'.format(key, val)) | |||
|
147 | return flags |
@@ -18,6 +18,7 b'' | |||||
18 | # RhodeCode Enterprise Edition, including its added features, Support services, |
|
18 | # RhodeCode Enterprise Edition, including its added features, Support services, | |
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ |
|
19 | # and proprietary license terms, please see https://rhodecode.com/licenses/ | |
20 |
|
20 | |||
|
21 | import os | |||
21 | import mock |
|
22 | import mock | |
22 | import pytest |
|
23 | import pytest | |
23 |
|
24 | |||
@@ -68,14 +69,16 b' def hg_server(app):' | |||||
68 |
|
69 | |||
69 | class TestMercurialServer(object): |
|
70 | class TestMercurialServer(object): | |
70 |
|
71 | |||
71 | def test_command(self, hg_server): |
|
72 | def test_command(self, hg_server, tmpdir): | |
72 | server = hg_server.create() |
|
73 | server = hg_server.create() | |
|
74 | custom_hgrc = os.path.join(str(tmpdir), 'hgrc') | |||
73 | expected_command = ( |
|
75 | expected_command = ( | |
74 | 'cd {root}; {hg_path} -R {root}{repo_name} serve --stdio'.format( |
|
76 | 'cd {root}; HGRCPATH={custom_hgrc} {hg_path} -R {root}{repo_name} serve --stdio'.format( | |
75 | root=hg_server.root, hg_path=hg_server.hg_path, |
|
77 | root=hg_server.root, custom_hgrc=custom_hgrc, hg_path=hg_server.hg_path, | |
76 | repo_name=hg_server.repo_name) |
|
78 | repo_name=hg_server.repo_name) | |
77 | ) |
|
79 | ) | |
78 |
|
|
80 | server_command = server.tunnel.command(custom_hgrc) | |
|
81 | assert expected_command == server_command | |||
79 |
|
82 | |||
80 | @pytest.mark.parametrize('permissions, action, code', [ |
|
83 | @pytest.mark.parametrize('permissions, action, code', [ | |
81 | ({}, 'pull', -2), |
|
84 | ({}, 'pull', -2), |
@@ -114,7 +114,6 b' class IntegrationSettingsViewBase(BaseAp' | |||||
114 | _ = self.request.translate |
|
114 | _ = self.request.translate | |
115 | c = super(IntegrationSettingsViewBase, self)._get_local_tmpl_context( |
|
115 | c = super(IntegrationSettingsViewBase, self)._get_local_tmpl_context( | |
116 | include_app_defaults=include_app_defaults) |
|
116 | include_app_defaults=include_app_defaults) | |
117 |
|
||||
118 | c.active = 'integrations' |
|
117 | c.active = 'integrations' | |
119 |
|
118 | |||
120 | return c |
|
119 | return c | |
@@ -404,6 +403,11 b' class RepoIntegrationsView(IntegrationSe' | |||||
404 | c.repo_name = self.db_repo.repo_name |
|
403 | c.repo_name = self.db_repo.repo_name | |
405 | c.repository_pull_requests = ScmModel().get_pull_requests(self.repo) |
|
404 | c.repository_pull_requests = ScmModel().get_pull_requests(self.repo) | |
406 |
|
405 | |||
|
406 | c.has_origin_repo_read_perm = False | |||
|
407 | if self.db_repo.fork: | |||
|
408 | c.has_origin_repo_read_perm = h.HasRepoPermissionAny( | |||
|
409 | 'repository.write', 'repository.read', 'repository.admin')( | |||
|
410 | self.db_repo.fork.repo_name, 'summary fork link') | |||
407 | return c |
|
411 | return c | |
408 |
|
412 | |||
409 | @LoginRequired() |
|
413 | @LoginRequired() |
@@ -375,6 +375,27 b' class DbManage(object):' | |||||
375 | hgevolve.ui_active = False |
|
375 | hgevolve.ui_active = False | |
376 | self.sa.add(hgevolve) |
|
376 | self.sa.add(hgevolve) | |
377 |
|
377 | |||
|
378 | hgevolve = RhodeCodeUi() | |||
|
379 | hgevolve.ui_section = 'experimental' | |||
|
380 | hgevolve.ui_key = 'evolution' | |||
|
381 | hgevolve.ui_value = '' | |||
|
382 | hgevolve.ui_active = False | |||
|
383 | self.sa.add(hgevolve) | |||
|
384 | ||||
|
385 | hgevolve = RhodeCodeUi() | |||
|
386 | hgevolve.ui_section = 'experimental' | |||
|
387 | hgevolve.ui_key = 'evolution.exchange' | |||
|
388 | hgevolve.ui_value = '' | |||
|
389 | hgevolve.ui_active = False | |||
|
390 | self.sa.add(hgevolve) | |||
|
391 | ||||
|
392 | hgevolve = RhodeCodeUi() | |||
|
393 | hgevolve.ui_section = 'extensions' | |||
|
394 | hgevolve.ui_key = 'topic' | |||
|
395 | hgevolve.ui_value = '' | |||
|
396 | hgevolve.ui_active = False | |||
|
397 | self.sa.add(hgevolve) | |||
|
398 | ||||
378 | # enable hggit disabled by default |
|
399 | # enable hggit disabled by default | |
379 | hggit = RhodeCodeUi() |
|
400 | hggit = RhodeCodeUi() | |
380 | hggit.ui_section = 'extensions' |
|
401 | hggit.ui_section = 'extensions' |
@@ -80,7 +80,7 b' def trigger_log_create_pull_request_hook' | |||||
80 | extras = _get_rc_scm_extras(username, repo_name, repo_alias, |
|
80 | extras = _get_rc_scm_extras(username, repo_name, repo_alias, | |
81 | 'create_pull_request') |
|
81 | 'create_pull_request') | |
82 | events.trigger(events.PullRequestCreateEvent(pull_request)) |
|
82 | events.trigger(events.PullRequestCreateEvent(pull_request)) | |
83 | extras.update(pull_request.get_api_data()) |
|
83 | extras.update(pull_request.get_api_data(with_merge_state=False)) | |
84 | hooks_base.log_create_pull_request(**extras) |
|
84 | hooks_base.log_create_pull_request(**extras) | |
85 |
|
85 | |||
86 |
|
86 |
@@ -371,7 +371,8 b' def config_data_from_db(clear_session=Tr' | |||||
371 | config.append(( |
|
371 | config.append(( | |
372 | safe_str(setting.section), safe_str(setting.key), False)) |
|
372 | safe_str(setting.section), safe_str(setting.key), False)) | |
373 | log.debug( |
|
373 | log.debug( | |
374 | 'settings ui from db: %s', |
|
374 | 'settings ui from db@repo[%s]: %s', | |
|
375 | repo, | |||
375 | ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data))) |
|
376 | ','.join(map(lambda s: '[{}] {}={}'.format(*s), ui_data))) | |
376 | if clear_session: |
|
377 | if clear_session: | |
377 | meta.Session.remove() |
|
378 | meta.Session.remove() |
@@ -161,7 +161,7 b' class MergeResponse(object):' | |||||
161 | u'This pull request cannot be merged because the source contains ' |
|
161 | u'This pull request cannot be merged because the source contains ' | |
162 | u'more branches than the target.'), |
|
162 | u'more branches than the target.'), | |
163 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext( |
|
163 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS: lazy_ugettext( | |
164 | u'This pull request cannot be merged because the target ' |
|
164 | u'This pull request cannot be merged because the target `{target_ref.name}` ' | |
165 | u'has multiple heads: `{heads}`.'), |
|
165 | u'has multiple heads: `{heads}`.'), | |
166 | MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext( |
|
166 | MergeFailureReason.TARGET_IS_LOCKED: lazy_ugettext( | |
167 | u'This pull request cannot be merged because the target repository is ' |
|
167 | u'This pull request cannot be merged because the target repository is ' | |
@@ -309,6 +309,9 b' class BaseRepository(object):' | |||||
309 | def _remote(self): |
|
309 | def _remote(self): | |
310 | raise NotImplementedError |
|
310 | raise NotImplementedError | |
311 |
|
311 | |||
|
312 | def _heads(self, branch=None): | |||
|
313 | return [] | |||
|
314 | ||||
312 | @LazyProperty |
|
315 | @LazyProperty | |
313 | def EMPTY_COMMIT(self): |
|
316 | def EMPTY_COMMIT(self): | |
314 | return EmptyCommit(self.EMPTY_COMMIT_ID) |
|
317 | return EmptyCommit(self.EMPTY_COMMIT_ID) |
@@ -715,11 +715,16 b' class MercurialRepository(BaseRepository' | |||||
715 |
|
715 | |||
716 | try: |
|
716 | try: | |
717 | if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1: |
|
717 | if target_ref.type == 'branch' and len(self._heads(target_ref.name)) != 1: | |
718 | heads = ','.join(self._heads(target_ref.name)) |
|
718 | heads = '\n,'.join(self._heads(target_ref.name)) | |
|
719 | metadata = { | |||
|
720 | 'target_ref': target_ref, | |||
|
721 | 'source_ref': source_ref, | |||
|
722 | 'heads': heads | |||
|
723 | } | |||
719 | return MergeResponse( |
|
724 | return MergeResponse( | |
720 | False, False, None, |
|
725 | False, False, None, | |
721 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, |
|
726 | MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, | |
722 |
metadata= |
|
727 | metadata=metadata) | |
723 | except CommitDoesNotExistError: |
|
728 | except CommitDoesNotExistError: | |
724 | log.exception('Failure when looking up branch heads on hg target') |
|
729 | log.exception('Failure when looking up branch heads on hg target') | |
725 | return MergeResponse( |
|
730 | return MergeResponse( |
@@ -25,6 +25,7 b' Database Models for RhodeCode Enterprise' | |||||
25 | import re |
|
25 | import re | |
26 | import os |
|
26 | import os | |
27 | import time |
|
27 | import time | |
|
28 | import string | |||
28 | import hashlib |
|
29 | import hashlib | |
29 | import logging |
|
30 | import logging | |
30 | import datetime |
|
31 | import datetime | |
@@ -50,6 +51,7 b' from sqlalchemy.dialects.mysql import LO' | |||||
50 | from zope.cachedescriptors.property import Lazy as LazyProperty |
|
51 | from zope.cachedescriptors.property import Lazy as LazyProperty | |
51 | from pyramid import compat |
|
52 | from pyramid import compat | |
52 | from pyramid.threadlocal import get_current_request |
|
53 | from pyramid.threadlocal import get_current_request | |
|
54 | from webhelpers.text import collapse, remove_formatting | |||
53 |
|
55 | |||
54 | from rhodecode.translation import _ |
|
56 | from rhodecode.translation import _ | |
55 | from rhodecode.lib.vcs import get_vcs_instance |
|
57 | from rhodecode.lib.vcs import get_vcs_instance | |
@@ -409,6 +411,15 b' class RhodeCodeUi(Base, BaseModel):' | |||||
409 | HOOK_PUSH = 'changegroup.push_logger' |
|
411 | HOOK_PUSH = 'changegroup.push_logger' | |
410 | HOOK_PUSH_KEY = 'pushkey.key_push' |
|
412 | HOOK_PUSH_KEY = 'pushkey.key_push' | |
411 |
|
413 | |||
|
414 | HOOKS_BUILTIN = [ | |||
|
415 | HOOK_PRE_PULL, | |||
|
416 | HOOK_PULL, | |||
|
417 | HOOK_PRE_PUSH, | |||
|
418 | HOOK_PRETX_PUSH, | |||
|
419 | HOOK_PUSH, | |||
|
420 | HOOK_PUSH_KEY, | |||
|
421 | ] | |||
|
422 | ||||
412 | # TODO: johbo: Unify way how hooks are configured for git and hg, |
|
423 | # TODO: johbo: Unify way how hooks are configured for git and hg, | |
413 | # git part is currently hardcoded. |
|
424 | # git part is currently hardcoded. | |
414 |
|
425 | |||
@@ -2469,7 +2480,8 b' class RepoGroup(Base, BaseModel):' | |||||
2469 | CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups |
|
2480 | CHOICES_SEPARATOR = '/' # used to generate select2 choices for nested groups | |
2470 |
|
2481 | |||
2471 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) |
|
2482 | group_id = Column("group_id", Integer(), nullable=False, unique=True, default=None, primary_key=True) | |
2472 | group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) |
|
2483 | _group_name = Column("group_name", String(255), nullable=False, unique=True, default=None) | |
|
2484 | group_name_hash = Column("repo_group_name_hash", String(1024), nullable=False, unique=False) | |||
2473 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) |
|
2485 | group_parent_id = Column("group_parent_id", Integer(), ForeignKey('groups.group_id'), nullable=True, unique=None, default=None) | |
2474 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) |
|
2486 | group_description = Column("group_description", String(10000), nullable=True, unique=None, default=None) | |
2475 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) |
|
2487 | enable_locking = Column("enable_locking", Boolean(), nullable=False, unique=None, default=False) | |
@@ -2492,6 +2504,15 b' class RepoGroup(Base, BaseModel):' | |||||
2492 | return u"<%s('id:%s:%s')>" % ( |
|
2504 | return u"<%s('id:%s:%s')>" % ( | |
2493 | self.__class__.__name__, self.group_id, self.group_name) |
|
2505 | self.__class__.__name__, self.group_id, self.group_name) | |
2494 |
|
2506 | |||
|
2507 | @hybrid_property | |||
|
2508 | def group_name(self): | |||
|
2509 | return self._group_name | |||
|
2510 | ||||
|
2511 | @group_name.setter | |||
|
2512 | def group_name(self, value): | |||
|
2513 | self._group_name = value | |||
|
2514 | self.group_name_hash = self.hash_repo_group_name(value) | |||
|
2515 | ||||
2495 | @validates('group_parent_id') |
|
2516 | @validates('group_parent_id') | |
2496 | def validate_group_parent_id(self, key, val): |
|
2517 | def validate_group_parent_id(self, key, val): | |
2497 | """ |
|
2518 | """ | |
@@ -2508,6 +2529,18 b' class RepoGroup(Base, BaseModel):' | |||||
2508 | return h.escape(self.group_description) |
|
2529 | return h.escape(self.group_description) | |
2509 |
|
2530 | |||
2510 | @classmethod |
|
2531 | @classmethod | |
|
2532 | def hash_repo_group_name(cls, repo_group_name): | |||
|
2533 | val = remove_formatting(repo_group_name) | |||
|
2534 | val = safe_str(val).lower() | |||
|
2535 | chars = [] | |||
|
2536 | for c in val: | |||
|
2537 | if c not in string.ascii_letters: | |||
|
2538 | c = str(ord(c)) | |||
|
2539 | chars.append(c) | |||
|
2540 | ||||
|
2541 | return ''.join(chars) | |||
|
2542 | ||||
|
2543 | @classmethod | |||
2511 | def _generate_choice(cls, repo_group): |
|
2544 | def _generate_choice(cls, repo_group): | |
2512 | from webhelpers.html import literal as _literal |
|
2545 | from webhelpers.html import literal as _literal | |
2513 | _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) |
|
2546 | _name = lambda k: _literal(cls.CHOICES_SEPARATOR.join(k)) | |
@@ -2770,6 +2803,13 b' class RepoGroup(Base, BaseModel):' | |||||
2770 | } |
|
2803 | } | |
2771 | return data |
|
2804 | return data | |
2772 |
|
2805 | |||
|
2806 | def get_dict(self): | |||
|
2807 | # Since we transformed `group_name` to a hybrid property, we need to | |||
|
2808 | # keep compatibility with the code which uses `group_name` field. | |||
|
2809 | result = super(RepoGroup, self).get_dict() | |||
|
2810 | result['group_name'] = result.pop('_group_name', None) | |||
|
2811 | return result | |||
|
2812 | ||||
2773 |
|
2813 | |||
2774 | class Permission(Base, BaseModel): |
|
2814 | class Permission(Base, BaseModel): | |
2775 | __tablename__ = 'permissions' |
|
2815 | __tablename__ = 'permissions' |
@@ -1020,6 +1020,9 b' class PullRequestModel(BaseModel):' | |||||
1020 | log.debug("Adding %s reviewers", ids_to_add) |
|
1020 | log.debug("Adding %s reviewers", ids_to_add) | |
1021 | log.debug("Removing %s reviewers", ids_to_remove) |
|
1021 | log.debug("Removing %s reviewers", ids_to_remove) | |
1022 | changed = False |
|
1022 | changed = False | |
|
1023 | added_audit_reviewers = [] | |||
|
1024 | removed_audit_reviewers = [] | |||
|
1025 | ||||
1023 | for uid in ids_to_add: |
|
1026 | for uid in ids_to_add: | |
1024 | changed = True |
|
1027 | changed = True | |
1025 | _usr = self._get_user(uid) |
|
1028 | _usr = self._get_user(uid) | |
@@ -1030,29 +1033,37 b' class PullRequestModel(BaseModel):' | |||||
1030 | # NOTE(marcink): mandatory shouldn't be changed now |
|
1033 | # NOTE(marcink): mandatory shouldn't be changed now | |
1031 | # reviewer.mandatory = reviewers[uid]['reasons'] |
|
1034 | # reviewer.mandatory = reviewers[uid]['reasons'] | |
1032 | Session().add(reviewer) |
|
1035 | Session().add(reviewer) | |
1033 | self._log_audit_action( |
|
1036 | added_audit_reviewers.append(reviewer.get_dict()) | |
1034 | 'repo.pull_request.reviewer.add', {'data': reviewer.get_dict()}, |
|
|||
1035 | user, pull_request) |
|
|||
1036 |
|
1037 | |||
1037 | for uid in ids_to_remove: |
|
1038 | for uid in ids_to_remove: | |
1038 | changed = True |
|
1039 | changed = True | |
|
1040 | # NOTE(marcink): we fetch "ALL" reviewers using .all(). This is an edge case | |||
|
1041 | # that prevents and fixes cases that we added the same reviewer twice. | |||
|
1042 | # this CAN happen due to the lack of DB checks | |||
1039 | reviewers = PullRequestReviewers.query()\ |
|
1043 | reviewers = PullRequestReviewers.query()\ | |
1040 | .filter(PullRequestReviewers.user_id == uid, |
|
1044 | .filter(PullRequestReviewers.user_id == uid, | |
1041 | PullRequestReviewers.pull_request == pull_request)\ |
|
1045 | PullRequestReviewers.pull_request == pull_request)\ | |
1042 | .all() |
|
1046 | .all() | |
1043 | # use .all() in case we accidentally added the same person twice |
|
1047 | ||
1044 | # this CAN happen due to the lack of DB checks |
|
|||
1045 | for obj in reviewers: |
|
1048 | for obj in reviewers: | |
1046 |
|
|
1049 | added_audit_reviewers.append(obj.get_dict()) | |
1047 | Session().delete(obj) |
|
1050 | Session().delete(obj) | |
1048 | self._log_audit_action( |
|
|||
1049 | 'repo.pull_request.reviewer.delete', |
|
|||
1050 | {'old_data': old_data}, user, pull_request) |
|
|||
1051 |
|
1051 | |||
1052 | if changed: |
|
1052 | if changed: | |
|
1053 | Session().expire_all() | |||
1053 | pull_request.updated_on = datetime.datetime.now() |
|
1054 | pull_request.updated_on = datetime.datetime.now() | |
1054 | Session().add(pull_request) |
|
1055 | Session().add(pull_request) | |
1055 |
|
1056 | |||
|
1057 | # finally store audit logs | |||
|
1058 | for user_data in added_audit_reviewers: | |||
|
1059 | self._log_audit_action( | |||
|
1060 | 'repo.pull_request.reviewer.add', {'data': user_data}, | |||
|
1061 | user, pull_request) | |||
|
1062 | for user_data in removed_audit_reviewers: | |||
|
1063 | self._log_audit_action( | |||
|
1064 | 'repo.pull_request.reviewer.delete', {'old_data': user_data}, | |||
|
1065 | user, pull_request) | |||
|
1066 | ||||
1056 | self.notify_reviewers(pull_request, ids_to_add) |
|
1067 | self.notify_reviewers(pull_request, ids_to_add) | |
1057 | return ids_to_add, ids_to_remove |
|
1068 | return ids_to_add, ids_to_remove | |
1058 |
|
1069 | |||
@@ -1306,8 +1317,16 b' class PullRequestModel(BaseModel):' | |||||
1306 | possible = pull_request.last_merge_status == MergeFailureReason.NONE |
|
1317 | possible = pull_request.last_merge_status == MergeFailureReason.NONE | |
1307 | metadata = { |
|
1318 | metadata = { | |
1308 | 'target_ref': pull_request.target_ref_parts, |
|
1319 | 'target_ref': pull_request.target_ref_parts, | |
1309 | 'source_ref': pull_request.source_ref_parts |
|
1320 | 'source_ref': pull_request.source_ref_parts, | |
1310 | } |
|
1321 | } | |
|
1322 | if not possible and target_ref.type == 'branch': | |||
|
1323 | # NOTE(marcink): case for mercurial multiple heads on branch | |||
|
1324 | heads = target_vcs._heads(target_ref.name) | |||
|
1325 | if len(heads) != 1: | |||
|
1326 | heads = '\n,'.join(target_vcs._heads(target_ref.name)) | |||
|
1327 | metadata.update({ | |||
|
1328 | 'heads': heads | |||
|
1329 | }) | |||
1311 | merge_state = MergeResponse( |
|
1330 | merge_state = MergeResponse( | |
1312 | possible, False, None, pull_request.last_merge_status, metadata=metadata) |
|
1331 | possible, False, None, pull_request.last_merge_status, metadata=metadata) | |
1313 |
|
1332 |
@@ -119,6 +119,7 b' class SettingsModel(BaseModel):' | |||||
119 | new_ui.ui_value = val |
|
119 | new_ui.ui_value = val | |
120 | new_ui.ui_active = active |
|
120 | new_ui.ui_active = active | |
121 |
|
121 | |||
|
122 | repository_id = '' | |||
122 | if self.repo: |
|
123 | if self.repo: | |
123 | repo = self._get_repo(self.repo) |
|
124 | repo = self._get_repo(self.repo) | |
124 | repository_id = repo.repo_id |
|
125 | repository_id = repo.repo_id | |
@@ -440,26 +441,39 b' class VcsSettingsModel(object):' | |||||
440 | HOOKS_SETTINGS = ( |
|
441 | HOOKS_SETTINGS = ( | |
441 | ('hooks', 'changegroup.repo_size'), |
|
442 | ('hooks', 'changegroup.repo_size'), | |
442 | ('hooks', 'changegroup.push_logger'), |
|
443 | ('hooks', 'changegroup.push_logger'), | |
443 |
('hooks', 'outgoing.pull_logger'), |
|
444 | ('hooks', 'outgoing.pull_logger'), | |
|
445 | ) | |||
444 | HG_SETTINGS = ( |
|
446 | HG_SETTINGS = ( | |
445 | ('extensions', 'largefiles'), |
|
447 | ('extensions', 'largefiles'), | |
446 | ('phases', 'publish'), |
|
448 | ('phases', 'publish'), | |
447 |
('extensions', 'evolve'), |
|
449 | ('extensions', 'evolve'), | |
|
450 | ('extensions', 'topic'), | |||
|
451 | ('experimental', 'evolution'), | |||
|
452 | ('experimental', 'evolution.exchange'), | |||
|
453 | ) | |||
448 | GIT_SETTINGS = ( |
|
454 | GIT_SETTINGS = ( | |
449 |
('vcs_git_lfs', 'enabled'), |
|
455 | ('vcs_git_lfs', 'enabled'), | |
|
456 | ) | |||
450 | GLOBAL_HG_SETTINGS = ( |
|
457 | GLOBAL_HG_SETTINGS = ( | |
451 | ('extensions', 'largefiles'), |
|
458 | ('extensions', 'largefiles'), | |
452 | ('largefiles', 'usercache'), |
|
459 | ('largefiles', 'usercache'), | |
453 | ('phases', 'publish'), |
|
460 | ('phases', 'publish'), | |
454 | ('extensions', 'hgsubversion'), |
|
461 | ('extensions', 'hgsubversion'), | |
455 |
('extensions', 'evolve'), |
|
462 | ('extensions', 'evolve'), | |
|
463 | ('extensions', 'topic'), | |||
|
464 | ('experimental', 'evolution'), | |||
|
465 | ('experimental', 'evolution.exchange'), | |||
|
466 | ) | |||
|
467 | ||||
456 | GLOBAL_GIT_SETTINGS = ( |
|
468 | GLOBAL_GIT_SETTINGS = ( | |
457 | ('vcs_git_lfs', 'enabled'), |
|
469 | ('vcs_git_lfs', 'enabled'), | |
458 |
('vcs_git_lfs', 'store_location') |
|
470 | ('vcs_git_lfs', 'store_location') | |
|
471 | ) | |||
459 |
|
472 | |||
460 | GLOBAL_SVN_SETTINGS = ( |
|
473 | GLOBAL_SVN_SETTINGS = ( | |
461 | ('vcs_svn_proxy', 'http_requests_enabled'), |
|
474 | ('vcs_svn_proxy', 'http_requests_enabled'), | |
462 |
('vcs_svn_proxy', 'http_server_url') |
|
475 | ('vcs_svn_proxy', 'http_server_url') | |
|
476 | ) | |||
463 |
|
477 | |||
464 | SVN_BRANCH_SECTION = 'vcs_svn_branch' |
|
478 | SVN_BRANCH_SECTION = 'vcs_svn_branch' | |
465 | SVN_TAG_SECTION = 'vcs_svn_tag' |
|
479 | SVN_TAG_SECTION = 'vcs_svn_tag' | |
@@ -574,12 +588,38 b' class VcsSettingsModel(object):' | |||||
574 | def create_repo_svn_settings(self, data): |
|
588 | def create_repo_svn_settings(self, data): | |
575 | return self._create_svn_settings(self.repo_settings, data) |
|
589 | return self._create_svn_settings(self.repo_settings, data) | |
576 |
|
590 | |||
|
591 | def _set_evolution(self, settings, is_enabled): | |||
|
592 | if is_enabled: | |||
|
593 | # if evolve is active set evolution=all | |||
|
594 | ||||
|
595 | self._create_or_update_ui( | |||
|
596 | settings, *('experimental', 'evolution'), value='all', | |||
|
597 | active=True) | |||
|
598 | self._create_or_update_ui( | |||
|
599 | settings, *('experimental', 'evolution.exchange'), value='yes', | |||
|
600 | active=True) | |||
|
601 | # if evolve is active set topics server support | |||
|
602 | self._create_or_update_ui( | |||
|
603 | settings, *('extensions', 'topic'), value='', | |||
|
604 | active=True) | |||
|
605 | ||||
|
606 | else: | |||
|
607 | self._create_or_update_ui( | |||
|
608 | settings, *('experimental', 'evolution'), value='', | |||
|
609 | active=False) | |||
|
610 | self._create_or_update_ui( | |||
|
611 | settings, *('experimental', 'evolution.exchange'), value='no', | |||
|
612 | active=False) | |||
|
613 | self._create_or_update_ui( | |||
|
614 | settings, *('extensions', 'topic'), value='', | |||
|
615 | active=False) | |||
|
616 | ||||
577 | @assert_repo_settings |
|
617 | @assert_repo_settings | |
578 | def create_or_update_repo_hg_settings(self, data): |
|
618 | def create_or_update_repo_hg_settings(self, data): | |
579 | largefiles, phases, evolve = \ |
|
619 | largefiles, phases, evolve = \ | |
580 | self.HG_SETTINGS |
|
620 | self.HG_SETTINGS[:3] | |
581 | largefiles_key, phases_key, evolve_key = \ |
|
621 | largefiles_key, phases_key, evolve_key = \ | |
582 | self._get_settings_keys(self.HG_SETTINGS, data) |
|
622 | self._get_settings_keys(self.HG_SETTINGS[:3], data) | |
583 |
|
623 | |||
584 | self._create_or_update_ui( |
|
624 | self._create_or_update_ui( | |
585 | self.repo_settings, *largefiles, value='', |
|
625 | self.repo_settings, *largefiles, value='', | |
@@ -587,21 +627,22 b' class VcsSettingsModel(object):' | |||||
587 | self._create_or_update_ui( |
|
627 | self._create_or_update_ui( | |
588 | self.repo_settings, *evolve, value='', |
|
628 | self.repo_settings, *evolve, value='', | |
589 | active=data[evolve_key]) |
|
629 | active=data[evolve_key]) | |
|
630 | self._set_evolution(self.repo_settings, is_enabled=data[evolve_key]) | |||
|
631 | ||||
590 | self._create_or_update_ui( |
|
632 | self._create_or_update_ui( | |
591 | self.repo_settings, *phases, value=safe_str(data[phases_key])) |
|
633 | self.repo_settings, *phases, value=safe_str(data[phases_key])) | |
592 |
|
634 | |||
593 | def create_or_update_global_hg_settings(self, data): |
|
635 | def create_or_update_global_hg_settings(self, data): | |
594 | largefiles, largefiles_store, phases, hgsubversion, evolve \ |
|
636 | largefiles, largefiles_store, phases, hgsubversion, evolve \ | |
595 | = self.GLOBAL_HG_SETTINGS |
|
637 | = self.GLOBAL_HG_SETTINGS[:5] | |
596 | largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \ |
|
638 | largefiles_key, largefiles_store_key, phases_key, subversion_key, evolve_key \ | |
597 | = self._get_settings_keys(self.GLOBAL_HG_SETTINGS, data) |
|
639 | = self._get_settings_keys(self.GLOBAL_HG_SETTINGS[:5], data) | |
598 |
|
640 | |||
599 | self._create_or_update_ui( |
|
641 | self._create_or_update_ui( | |
600 | self.global_settings, *largefiles, value='', |
|
642 | self.global_settings, *largefiles, value='', | |
601 | active=data[largefiles_key]) |
|
643 | active=data[largefiles_key]) | |
602 | self._create_or_update_ui( |
|
644 | self._create_or_update_ui( | |
603 | self.global_settings, *largefiles_store, |
|
645 | self.global_settings, *largefiles_store, value=data[largefiles_store_key]) | |
604 | value=data[largefiles_store_key]) |
|
|||
605 | self._create_or_update_ui( |
|
646 | self._create_or_update_ui( | |
606 | self.global_settings, *phases, value=safe_str(data[phases_key])) |
|
647 | self.global_settings, *phases, value=safe_str(data[phases_key])) | |
607 | self._create_or_update_ui( |
|
648 | self._create_or_update_ui( | |
@@ -609,9 +650,10 b' class VcsSettingsModel(object):' | |||||
609 | self._create_or_update_ui( |
|
650 | self._create_or_update_ui( | |
610 | self.global_settings, *evolve, value='', |
|
651 | self.global_settings, *evolve, value='', | |
611 | active=data[evolve_key]) |
|
652 | active=data[evolve_key]) | |
|
653 | self._set_evolution(self.global_settings, is_enabled=data[evolve_key]) | |||
612 |
|
654 | |||
613 | def create_or_update_repo_git_settings(self, data): |
|
655 | def create_or_update_repo_git_settings(self, data): | |
614 | # NOTE(marcink): # comma make unpack work properly |
|
656 | # NOTE(marcink): # comma makes unpack work properly | |
615 | lfs_enabled, \ |
|
657 | lfs_enabled, \ | |
616 | = self.GIT_SETTINGS |
|
658 | = self.GIT_SETTINGS | |
617 |
|
659 | |||
@@ -675,6 +717,7 b' class VcsSettingsModel(object):' | |||||
675 | def get_repo_ui_settings(self, section=None, key=None): |
|
717 | def get_repo_ui_settings(self, section=None, key=None): | |
676 | global_uis = self.global_settings.get_ui(section, key) |
|
718 | global_uis = self.global_settings.get_ui(section, key) | |
677 | repo_uis = self.repo_settings.get_ui(section, key) |
|
719 | repo_uis = self.repo_settings.get_ui(section, key) | |
|
720 | ||||
678 | filtered_repo_uis = self._filter_ui_settings(repo_uis) |
|
721 | filtered_repo_uis = self._filter_ui_settings(repo_uis) | |
679 | filtered_repo_uis_keys = [ |
|
722 | filtered_repo_uis_keys = [ | |
680 | (s.section, s.key) for s in filtered_repo_uis] |
|
723 | (s.section, s.key) for s in filtered_repo_uis] |
@@ -40,15 +40,32 b'' | |||||
40 |
|
40 | |||
41 | <script> |
|
41 | <script> | |
42 | $(document).ready(function() { |
|
42 | $(document).ready(function() { | |
43 |
|
43 | var $repoGroupsListTable = $('#group_list_table'); | ||
44 | var get_datatable_count = function(){ |
|
|||
45 | var api = $('#group_list_table').dataTable().api(); |
|
|||
46 | $('#repo_group_count').text(api.page.info().recordsDisplay); |
|
|||
47 | }; |
|
|||
48 |
|
44 | |||
49 | // repo group list |
|
45 | // repo group list | |
50 |
$ |
|
46 | $repoGroupsListTable.DataTable({ | |
51 | data: ${c.data|n}, |
|
47 | processing: true, | |
|
48 | serverSide: true, | |||
|
49 | ajax: { | |||
|
50 | "url": "${h.route_path('repo_groups_data')}", | |||
|
51 | "dataSrc": function (json) { | |||
|
52 | var filteredCount = json.recordsFiltered; | |||
|
53 | var filteredInactiveCount = json.recordsFilteredInactive; | |||
|
54 | var totalInactive = json.recordsTotalInactive; | |||
|
55 | var total = json.recordsTotal; | |||
|
56 | ||||
|
57 | var _text = _gettext( | |||
|
58 | "{0} of {1} repository groups").format( | |||
|
59 | filteredCount, total); | |||
|
60 | ||||
|
61 | if (total === filteredCount) { | |||
|
62 | _text = _gettext("{0} repository groups").format(total); | |||
|
63 | } | |||
|
64 | $('#repo_group_count').text(_text); | |||
|
65 | return json.data; | |||
|
66 | }, | |||
|
67 | }, | |||
|
68 | ||||
52 | dom: 'rtp', |
|
69 | dom: 'rtp', | |
53 | pageLength: ${c.visual.admin_grid_items}, |
|
70 | pageLength: ${c.visual.admin_grid_items}, | |
54 | order: [[ 0, "asc" ]], |
|
71 | order: [[ 0, "asc" ]], | |
@@ -66,36 +83,34 b'' | |||||
66 | { data: {"_": "owner", |
|
83 | { data: {"_": "owner", | |
67 | "sort": "owner"}, title: "${_('Owner')}", className: "td-user" }, |
|
84 | "sort": "owner"}, title: "${_('Owner')}", className: "td-user" }, | |
68 | { data: {"_": "action", |
|
85 | { data: {"_": "action", | |
69 | "sort": "action"}, title: "${_('Action')}", className: "td-action" } |
|
86 | "sort": "action"}, title: "${_('Action')}", className: "td-action", orderable: false } | |
70 | ], |
|
87 | ], | |
71 | language: { |
|
88 | language: { | |
72 | paginate: DEFAULT_GRID_PAGINATION, |
|
89 | paginate: DEFAULT_GRID_PAGINATION, | |
|
90 | sProcessing: _gettext('loading...'), | |||
73 | emptyTable: _gettext("No repository groups available yet.") |
|
91 | emptyTable: _gettext("No repository groups available yet.") | |
74 | }, |
|
92 | }, | |
75 | "initComplete": function( settings, json ) { |
|
|||
76 | get_datatable_count(); |
|
|||
77 | quick_repo_menu(); |
|
|||
78 | } |
|
|||
79 | }); |
|
93 | }); | |
80 |
|
94 | |||
81 | // update the counter when doing search |
|
95 | $repoGroupsListTable.on('xhr.dt', function(e, settings, json, xhr){ | |
82 | $('#group_list_table').on( 'search.dt', function (e,settings) { |
|
96 | $repoGroupsListTable.css('opacity', 1); | |
83 | get_datatable_count(); |
|
97 | }); | |
|
98 | ||||
|
99 | $repoGroupsListTable.on('preXhr.dt', function(e, settings, data){ | |||
|
100 | $repoGroupsListTable.css('opacity', 0.3); | |||
84 | }); |
|
101 | }); | |
85 |
|
102 | |||
86 | // filter, filter both grids |
|
103 | // filter | |
87 |
$('#q_filter').on( |
|
104 | $('#q_filter').on('keyup', | |
|
105 | $.debounce(250, function() { | |||
|
106 | $repoGroupsListTable.DataTable().search( | |||
|
107 | $('#q_filter').val() | |||
|
108 | ).draw(); | |||
|
109 | }) | |||
|
110 | ); | |||
|
111 | }); | |||
88 |
|
112 | |||
89 | var repo_group_api = $('#group_list_table').dataTable().api(); |
|
113 | </script> | |
90 | repo_group_api |
|
|||
91 | .columns(0) |
|
|||
92 | .search(this.value) |
|
|||
93 | .draw(); |
|
|||
94 | }); |
|
|||
95 |
|
114 | |||
96 | // refilter table if page load via back button |
|
|||
97 | $("#q_filter").trigger('keyup'); |
|
|||
98 | }); |
|
|||
99 | </script> |
|
|||
100 | </%def> |
|
115 | </%def> | |
101 |
|
116 |
@@ -70,7 +70,7 b'' | |||||
70 | { data: {"_": "state", |
|
70 | { data: {"_": "state", | |
71 | "sort": "state"}, title: "${_('State')}", className: "td-tags td-state" }, |
|
71 | "sort": "state"}, title: "${_('State')}", className: "td-tags td-state" }, | |
72 | { data: {"_": "action", |
|
72 | { data: {"_": "action", | |
73 | "sort": "action"}, title: "${_('Action')}", className: "td-action" } |
|
73 | "sort": "action"}, title: "${_('Action')}", className: "td-action", orderable: false } | |
74 | ], |
|
74 | ], | |
75 | language: { |
|
75 | language: { | |
76 | paginate: DEFAULT_GRID_PAGINATION, |
|
76 | paginate: DEFAULT_GRID_PAGINATION, |
@@ -105,7 +105,7 b'' | |||||
105 | // filter |
|
105 | // filter | |
106 | $('#q_filter').on('keyup', |
|
106 | $('#q_filter').on('keyup', | |
107 | $.debounce(250, function() { |
|
107 | $.debounce(250, function() { | |
108 |
$ |
|
108 | $userGroupsListTable.DataTable().search( | |
109 | $('#q_filter').val() |
|
109 | $('#q_filter').val() | |
110 | ).draw(); |
|
110 | ).draw(); | |
111 | }) |
|
111 | }) |
@@ -133,13 +133,13 b'' | |||||
133 |
|
133 | |||
134 | <div class="checkbox"> |
|
134 | <div class="checkbox"> | |
135 | ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)} |
|
135 | ${h.checkbox('extensions_evolve' + suffix, 'True', **kwargs)} | |
136 |
<label for="extensions_evolve${suffix}">${_('Enable |
|
136 | <label for="extensions_evolve${suffix}">${_('Enable Evolve and Topic extension')}</label> | |
137 | </div> |
|
137 | </div> | |
138 | <div class="label"> |
|
138 | <div class="label"> | |
139 | % if display_globals: |
|
139 | % if display_globals: | |
140 |
<span class="help-block">${_('Enable |
|
140 | <span class="help-block">${_('Enable Evolve and Topic extensions for all repositories.')}</span> | |
141 | % else: |
|
141 | % else: | |
142 |
<span class="help-block">${_('Enable |
|
142 | <span class="help-block">${_('Enable Evolve and Topic extensions for this repository.')}</span> | |
143 | % endif |
|
143 | % endif | |
144 | </div> |
|
144 | </div> | |
145 |
|
145 |
@@ -35,6 +35,8 b' def route_path(name, params=None, **kwar' | |||||
35 | ADMIN_PREFIX + '/repos', |
|
35 | ADMIN_PREFIX + '/repos', | |
36 | 'repo_groups': |
|
36 | 'repo_groups': | |
37 | ADMIN_PREFIX + '/repo_groups', |
|
37 | ADMIN_PREFIX + '/repo_groups', | |
|
38 | 'repo_groups_data': | |||
|
39 | ADMIN_PREFIX + '/repo_groups_data', | |||
38 | 'user_groups': |
|
40 | 'user_groups': | |
39 | ADMIN_PREFIX + '/user_groups', |
|
41 | ADMIN_PREFIX + '/user_groups', | |
40 | 'user_groups_data': |
|
42 | 'user_groups_data': | |
@@ -63,8 +65,9 b' class TestAdminDelegatedUser(TestControl' | |||||
63 | response = self.app.get(route_path('repos'), status=200) |
|
65 | response = self.app.get(route_path('repos'), status=200) | |
64 | response.mustcontain('data: []') |
|
66 | response.mustcontain('data: []') | |
65 |
|
67 | |||
66 |
response = self.app.get(route_path('repo_groups'), |
|
68 | response = self.app.get(route_path('repo_groups_data'), | |
67 | response.mustcontain('data: []') |
|
69 | status=200, extra_environ=xhr_header) | |
|
70 | assert response.json['data'] == [] | |||
68 |
|
71 | |||
69 | response = self.app.get(route_path('user_groups_data'), |
|
72 | response = self.app.get(route_path('user_groups_data'), | |
70 | status=200, extra_environ=xhr_header) |
|
73 | status=200, extra_environ=xhr_header) | |
@@ -97,7 +100,8 b' class TestAdminDelegatedUser(TestControl' | |||||
97 | response = self.app.get(route_path('repos'), status=200) |
|
100 | response = self.app.get(route_path('repos'), status=200) | |
98 | response.mustcontain('"name_raw": "{}"'.format(repo_name)) |
|
101 | response.mustcontain('"name_raw": "{}"'.format(repo_name)) | |
99 |
|
102 | |||
100 |
response = self.app.get(route_path('repo_groups'), |
|
103 | response = self.app.get(route_path('repo_groups_data'), | |
|
104 | extra_environ=xhr_header, status=200) | |||
101 | response.mustcontain('"name_raw": "{}"'.format(repo_group_name)) |
|
105 | response.mustcontain('"name_raw": "{}"'.format(repo_group_name)) | |
102 |
|
106 | |||
103 | response = self.app.get(route_path('user_groups_data'), |
|
107 | response = self.app.get(route_path('user_groups_data'), | |
@@ -139,7 +143,8 b' class TestAdminDelegatedUser(TestControl' | |||||
139 | response = self.app.get(route_path('repos'), status=200) |
|
143 | response = self.app.get(route_path('repos'), status=200) | |
140 | response.mustcontain('"name_raw": "{}"'.format(repo_name)) |
|
144 | response.mustcontain('"name_raw": "{}"'.format(repo_name)) | |
141 |
|
145 | |||
142 |
response = self.app.get(route_path('repo_groups'), |
|
146 | response = self.app.get(route_path('repo_groups_data'), | |
|
147 | extra_environ=xhr_header, status=200) | |||
143 | response.mustcontain('"name_raw": "{}"'.format(repo_group_name)) |
|
148 | response.mustcontain('"name_raw": "{}"'.format(repo_group_name)) | |
144 |
|
149 | |||
145 | response = self.app.get(route_path('user_groups_data'), |
|
150 | response = self.app.get(route_path('user_groups_data'), |
@@ -501,8 +501,8 b' class TestCreateOrUpdateUi(object):' | |||||
501 |
|
501 | |||
502 | def test_update(self, repo_stub, settings_util): |
|
502 | def test_update(self, repo_stub, settings_util): | |
503 | model = VcsSettingsModel(repo=repo_stub.repo_name) |
|
503 | model = VcsSettingsModel(repo=repo_stub.repo_name) | |
504 |
|
504 | # care about only 3 first settings | ||
505 | largefiles, phases, evolve = model.HG_SETTINGS |
|
505 | largefiles, phases, evolve = model.HG_SETTINGS[:3] | |
506 |
|
506 | |||
507 | section = 'test-section' |
|
507 | section = 'test-section' | |
508 | key = 'test-key' |
|
508 | key = 'test-key' | |
@@ -531,10 +531,11 b' class TestCreateOrUpdateRepoHgSettings(o' | |||||
531 | with mock.patch.object(model, '_create_or_update_ui') as create_mock: |
|
531 | with mock.patch.object(model, '_create_or_update_ui') as create_mock: | |
532 | model.create_or_update_repo_hg_settings(self.FORM_DATA) |
|
532 | model.create_or_update_repo_hg_settings(self.FORM_DATA) | |
533 | expected_calls = [ |
|
533 | expected_calls = [ | |
534 | mock.call(model.repo_settings, 'extensions', 'largefiles', |
|
534 | mock.call(model.repo_settings, 'extensions', 'largefiles', active=False, value=''), | |
535 | active=False, value=''), |
|
535 | mock.call(model.repo_settings, 'extensions', 'evolve', active=False, value=''), | |
536 |
mock.call(model.repo_settings, 'ex |
|
536 | mock.call(model.repo_settings, 'experimental', 'evolution', active=False, value=''), | |
537 | active=False, value=''), |
|
537 | mock.call(model.repo_settings, 'experimental', 'evolution.exchange', active=False, value='no'), | |
|
538 | mock.call(model.repo_settings, 'extensions', 'topic', active=False, value=''), | |||
538 | mock.call(model.repo_settings, 'phases', 'publish', value='False'), |
|
539 | mock.call(model.repo_settings, 'phases', 'publish', value='False'), | |
539 | ] |
|
540 | ] | |
540 | assert expected_calls == create_mock.call_args_list |
|
541 | assert expected_calls == create_mock.call_args_list | |
@@ -589,17 +590,16 b' class TestCreateOrUpdateGlobalHgSettings' | |||||
589 | with mock.patch.object(model, '_create_or_update_ui') as create_mock: |
|
590 | with mock.patch.object(model, '_create_or_update_ui') as create_mock: | |
590 | model.create_or_update_global_hg_settings(self.FORM_DATA) |
|
591 | model.create_or_update_global_hg_settings(self.FORM_DATA) | |
591 | expected_calls = [ |
|
592 | expected_calls = [ | |
592 | mock.call(model.global_settings, 'extensions', 'largefiles', |
|
593 | mock.call(model.global_settings, 'extensions', 'largefiles', active=False, value=''), | |
593 | active=False, value=''), |
|
594 | mock.call(model.global_settings, 'largefiles', 'usercache', value='/example/largefiles-store'), | |
594 |
mock.call(model.global_settings, ' |
|
595 | mock.call(model.global_settings, 'phases', 'publish', value='False'), | |
595 | value='/example/largefiles-store'), |
|
596 | mock.call(model.global_settings, 'extensions', 'hgsubversion', active=False), | |
596 |
mock.call(model.global_settings, ' |
|
597 | mock.call(model.global_settings, 'extensions', 'evolve', active=False, value=''), | |
597 | value='False'), |
|
598 | mock.call(model.global_settings, 'experimental', 'evolution', active=False, value=''), | |
598 |
mock.call(model.global_settings, 'ex |
|
599 | mock.call(model.global_settings, 'experimental', 'evolution.exchange', active=False, value='no'), | |
599 | active=False), |
|
600 | mock.call(model.global_settings, 'extensions', 'topic', active=False, value=''), | |
600 | mock.call(model.global_settings, 'extensions', 'evolve', |
|
|||
601 | active=False, value='') |
|
|||
602 | ] |
|
601 | ] | |
|
602 | ||||
603 | assert expected_calls == create_mock.call_args_list |
|
603 | assert expected_calls == create_mock.call_args_list | |
604 |
|
604 | |||
605 | @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys()) |
|
605 | @pytest.mark.parametrize('field_to_remove', FORM_DATA.keys()) | |
@@ -625,10 +625,8 b' class TestCreateOrUpdateGlobalGitSetting' | |||||
625 | with mock.patch.object(model, '_create_or_update_ui') as create_mock: |
|
625 | with mock.patch.object(model, '_create_or_update_ui') as create_mock: | |
626 | model.create_or_update_global_git_settings(self.FORM_DATA) |
|
626 | model.create_or_update_global_git_settings(self.FORM_DATA) | |
627 | expected_calls = [ |
|
627 | expected_calls = [ | |
628 | mock.call(model.global_settings, 'vcs_git_lfs', 'enabled', |
|
628 | mock.call(model.global_settings, 'vcs_git_lfs', 'enabled', active=False, value=False), | |
629 | active=False, value=False), |
|
629 | mock.call(model.global_settings, 'vcs_git_lfs', 'store_location', value='/example/lfs-store'), | |
630 | mock.call(model.global_settings, 'vcs_git_lfs', 'store_location', |
|
|||
631 | value='/example/lfs-store'), |
|
|||
632 | ] |
|
630 | ] | |
633 | assert expected_calls == create_mock.call_args_list |
|
631 | assert expected_calls == create_mock.call_args_list | |
634 |
|
632 |
@@ -512,7 +512,7 b' def test_outdated_comments(' | |||||
512 | (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES, |
|
512 | (MergeFailureReason.HG_SOURCE_HAS_MORE_BRANCHES, | |
513 | 'This pull request cannot be merged because the source contains more branches than the target.'), |
|
513 | 'This pull request cannot be merged because the source contains more branches than the target.'), | |
514 | (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, |
|
514 | (MergeFailureReason.HG_TARGET_HAS_MULTIPLE_HEADS, | |
515 | 'This pull request cannot be merged because the target has multiple heads: `a,b,c`.'), |
|
515 | 'This pull request cannot be merged because the target `ref_name` has multiple heads: `a,b,c`.'), | |
516 | (MergeFailureReason.TARGET_IS_LOCKED, |
|
516 | (MergeFailureReason.TARGET_IS_LOCKED, | |
517 | 'This pull request cannot be merged because the target repository is locked by user:123.'), |
|
517 | 'This pull request cannot be merged because the target repository is locked by user:123.'), | |
518 | (MergeFailureReason.MISSING_TARGET_REF, |
|
518 | (MergeFailureReason.MISSING_TARGET_REF, |
General Comments 0
You need to be logged in to leave comments.
Login now